diff --git a/.gitignore b/.gitignore index 7eaef69873..ea5b20eb69 100644 --- a/.gitignore +++ b/.gitignore @@ -102,5 +102,8 @@ website/.docusaurus .poetry/ .python-version +.editorconfig +.pre-commit-config.yaml +mypy.ini tools/run_eventserver.* diff --git a/.gitmodules b/.gitmodules index dfd89cdb3c..fe93791c4e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,4 +4,4 @@ [submodule "tools/modules/powershell/PSWriteColor"] path = tools/modules/powershell/PSWriteColor - url = https://github.com/EvotecIT/PSWriteColor.git + url = https://github.com/EvotecIT/PSWriteColor.git \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index e8da885473..2a8e962085 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,129 +1,154 @@ # Changelog -## [3.12.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) + +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) **🚀 Enhancements** -- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) -- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) -- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) -- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) -- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) -- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) -- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) -- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) -- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) -- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) -- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) **🐛 Bug fixes** -- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) -- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) -- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) -- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) -- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) -- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) -- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) +- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) **🔀 Refactored code** -- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) -- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) + +## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) + +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) +- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) +- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) +- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) + +**🐛 Bug fixes** + +- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) +- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) +- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) +- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) +- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) +- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) + +**Merged pull requests:** + +- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) +- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) + +## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) + +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) + +**🚀 Enhancements** + +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) + +**🐛 Bug fixes** + +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) +- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) + +**🔀 Refactored code** + +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) + +**Merged pull requests:** + +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) + +## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) -### 📖 Documentation - -- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) - -**🆕 New features** - -- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) - -**🚀 Enhancements** - -- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) -- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) -- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) -- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) -- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) -- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) -- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) -- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) -- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) -- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) - -**🐛 Bug fixes** - -- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) -- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) -- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) -- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) -- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) -- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) -- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) -- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) -- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) -- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) -- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) -- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) -- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) -- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) -- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) -- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) - -**🔀 Refactored code** - -- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) -- Maya: Re-use `maintained\_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) -- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) -- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) -- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) -- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) -- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) -- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) -- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) -- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) - ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) -### 📖 Documentation - -- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) -- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) - -**🚀 Enhancements** - -- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) -- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) - -**🐛 Bug fixes** - -- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) -- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) -- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) -- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) -- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) -- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) -- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) - -**🔀 Refactored code** - -- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) -- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) -- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) -- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) -- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) -- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) - ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 08333885c0..c5003b062e 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -122,7 +122,7 @@ class OpenPypeVersion(semver.VersionInfo): if self.staging: if kwargs.get("build"): if "staging" not in kwargs.get("build"): - kwargs["build"] = "{}-staging".format(kwargs.get("build")) + kwargs["build"] = f"{kwargs.get('build')}-staging" else: kwargs["build"] = "staging" @@ -136,8 +136,7 @@ class OpenPypeVersion(semver.VersionInfo): return bool(result and self.staging == other.staging) def __repr__(self): - return "<{}: {} - path={}>".format( - self.__class__.__name__, str(self), self.path) + return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>" def __lt__(self, other: OpenPypeVersion): result = super().__lt__(other) @@ -232,10 +231,7 @@ class OpenPypeVersion(semver.VersionInfo): return openpype_version def __hash__(self): - if self.path: - return hash(self.path) - else: - return hash(str(self)) + return hash(self.path) if self.path else hash(str(self)) @staticmethod def is_version_in_dir( @@ -384,7 +380,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_local_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None ) -> List: """Get all versions available on this machine. @@ -394,6 +391,10 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + + Returns: + list: of compatible versions available on the machine. + """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -410,10 +411,10 @@ class OpenPypeVersion(semver.VersionInfo): if not production and not staging: return [] + # DEPRECATED: backwards compatible way to look for versions in root dir_to_search = Path(user_data_dir("openpype", "pypeclub")) - versions = OpenPypeVersion.get_versions_from_directory( - dir_to_search - ) + versions = OpenPypeVersion.get_versions_from_directory(dir_to_search) + filtered_versions = [] for version in versions: if version.is_staging(): @@ -425,7 +426,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_remote_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None ) -> List: """Get all versions available in OpenPype Path. @@ -435,6 +437,7 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -469,6 +472,7 @@ class OpenPypeVersion(semver.VersionInfo): return [] versions = cls.get_versions_from_directory(dir_to_search) + filtered_versions = [] for version in versions: if version.is_staging(): @@ -479,7 +483,8 @@ class OpenPypeVersion(semver.VersionInfo): return list(sorted(set(filtered_versions))) @staticmethod - def get_versions_from_directory(openpype_dir: Path) -> List: + def get_versions_from_directory( + openpype_dir: Path) -> List: """Get all detected OpenPype versions in directory. Args: @@ -492,15 +497,22 @@ class OpenPypeVersion(semver.VersionInfo): ValueError: if invalid path is specified. """ + openpype_versions = [] if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") + return openpype_versions - _openpype_versions = [] # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): + # if the item is directory with major.minor version, dive deeper - # if file, strip extension, in case of dir not. + if item.is_dir() and re.match(r"^\d+\.\d+$", item.name): + _versions = OpenPypeVersion.get_versions_from_directory( + item) + if _versions: + openpype_versions += _versions + + # if file exists, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) @@ -519,9 +531,9 @@ class OpenPypeVersion(semver.VersionInfo): continue detected_version.path = item - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) - return sorted(_openpype_versions) + return sorted(openpype_versions) @staticmethod def get_installed_version_str() -> str: @@ -550,13 +562,13 @@ class OpenPypeVersion(semver.VersionInfo): staging: bool = False, local: bool = None, remote: bool = None - ) -> OpenPypeVersion: - """Get latest available version. + ) -> Union[OpenPypeVersion, None]: + """Get the latest available version. The version does not contain information about path and source. - This is utility version to get latest version from all found. Build - version is not listed if staging is enabled. + This is utility version to get the latest version from all found. + Build version is not listed if staging is enabled. Arguments 'local' and 'remote' define if local and remote repository versions are used. All versions are used if both are not set (or set @@ -568,6 +580,10 @@ class OpenPypeVersion(semver.VersionInfo): staging (bool, optional): List staging versions if True. local (bool, optional): List local versions if True. remote (bool, optional): List remote versions if True. + + Returns: + Latest OpenPypeVersion or None + """ if local is None and remote is None: local = True @@ -621,6 +637,21 @@ class OpenPypeVersion(semver.VersionInfo): return None return OpenPypeVersion(version=result) + def is_compatible(self, version: OpenPypeVersion): + """Test build compatibility. + + This will simply compare major and minor versions (ignoring patch + and the rest). + + Args: + version (OpenPypeVersion): Version to check compatibility with. + + Returns: + bool: if the version is compatible + + """ + return self.major == version.major and self.minor == version.minor + class BootstrapRepos: """Class for bootstrapping local OpenPype installation. @@ -714,9 +745,9 @@ class BootstrapRepos: self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]: """Copy zip created from OpenPype repositories to user data dir. - This detect OpenPype version either in local "live" OpenPype + This detects OpenPype version either in local "live" OpenPype repository or in user provided path. Then it will zip it in temporary - directory and finally it will move it to destination which is user + directory, and finally it will move it to destination which is user data directory. Existing files will be replaced. Args: @@ -727,7 +758,7 @@ class BootstrapRepos: """ # if repo dir is not set, we detect local "live" OpenPype repository - # version and use it as a source. Otherwise repo_dir is user + # version and use it as a source. Otherwise, repo_dir is user # entered location. if repo_dir: version = self.get_version(repo_dir) @@ -741,8 +772,9 @@ class BootstrapRepos: return # create destination directory - if not self.data_dir.exists(): - self.data_dir.mkdir(parents=True) + destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa + if not destination.exists(): + destination.mkdir(parents=True) # create zip inside temporary directory. with tempfile.TemporaryDirectory() as temp_dir: @@ -770,7 +802,9 @@ class BootstrapRepos: Path to moved zip on success. """ - destination = self.data_dir / zip_file.name + version = OpenPypeVersion.version_in_str(zip_file.name) + destination_dir = self.data_dir / f"{version.major}.{version.minor}" + destination = destination_dir / zip_file.name if destination.exists(): self._print( @@ -782,7 +816,7 @@ class BootstrapRepos: self._print(str(e), LOG_ERROR, exc_info=True) return None try: - shutil.move(zip_file.as_posix(), self.data_dir.as_posix()) + shutil.move(zip_file.as_posix(), destination_dir.as_posix()) except shutil.Error as e: self._print(str(e), LOG_ERROR, exc_info=True) return None @@ -995,6 +1029,16 @@ class BootstrapRepos: @staticmethod def _validate_dir(path: Path) -> tuple: + """Validate checksums in a given path. + + Args: + path (Path): path to folder to validate. + + Returns: + tuple(bool, str): returns status and reason as a bool + and str in a tuple. + + """ checksums_file = Path(path / "checksums") if not checksums_file.exists(): # FIXME: This should be set to False sometimes in the future @@ -1076,11 +1120,24 @@ class BootstrapRepos: sys.path.insert(0, directory.as_posix()) @staticmethod - def find_openpype_version(version, staging): + def find_openpype_version( + version: Union[str, OpenPypeVersion], + staging: bool + ) -> Union[OpenPypeVersion, None]: + """Find location of specified OpenPype version. + + Args: + version (Union[str, OpenPypeVersion): Version to find. + staging (bool): Filter staging versions. + + Returns: + requested OpenPypeVersion. + + """ + installed_version = OpenPypeVersion.get_installed_version() if isinstance(version, str): version = OpenPypeVersion(version=version) - installed_version = OpenPypeVersion.get_installed_version() if installed_version == version: return installed_version @@ -1107,7 +1164,18 @@ class BootstrapRepos: return None @staticmethod - def find_latest_openpype_version(staging): + def find_latest_openpype_version( + staging: bool + ) -> Union[OpenPypeVersion, None]: + """Find the latest available OpenPype version in all location. + + Args: + staging (bool): True to look for staging versions. + + Returns: + Latest OpenPype version on None if nothing was found. + + """ installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions( staging=staging @@ -1138,7 +1206,8 @@ class BootstrapRepos: self, openpype_path: Union[Path, str] = None, staging: bool = False, - include_zips: bool = False) -> Union[List[OpenPypeVersion], None]: + include_zips: bool = False + ) -> Union[List[OpenPypeVersion], None]: """Get ordered dict of detected OpenPype version. Resolution order for OpenPype is following: @@ -1172,30 +1241,38 @@ class BootstrapRepos: ("Finding OpenPype in non-filesystem locations is" " not implemented yet.")) - dir_to_search = self.data_dir - user_versions = self.get_openpype_versions(self.data_dir, staging) - # if we have openpype_path specified, search only there. + # if checks bellow for OPENPYPE_PATH and registry fails, use data_dir + # DEPRECATED: lookup in root of this folder is deprecated in favour + # of major.minor sub-folders. + dirs_to_search = [self.data_dir] + if openpype_path: - dir_to_search = openpype_path + dirs_to_search = [openpype_path] + elif os.getenv("OPENPYPE_PATH") \ + and Path(os.getenv("OPENPYPE_PATH")).exists(): + # first try OPENPYPE_PATH and if that is not available, + # try registry. + dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))] else: - if os.getenv("OPENPYPE_PATH"): - if Path(os.getenv("OPENPYPE_PATH")).exists(): - dir_to_search = Path(os.getenv("OPENPYPE_PATH")) - else: - try: - registry_dir = Path( - str(self.registry.get_item("openPypePath"))) - if registry_dir.exists(): - dir_to_search = registry_dir + try: + registry_dir = Path( + str(self.registry.get_item("openPypePath"))) + if registry_dir.exists(): + dirs_to_search = [registry_dir] - except ValueError: - # nothing found in registry, we'll use data dir - pass + except ValueError: + # nothing found in registry, we'll use data dir + pass - openpype_versions = self.get_openpype_versions(dir_to_search, staging) - openpype_versions += user_versions + openpype_versions = [] + for dir_to_search in dirs_to_search: + try: + openpype_versions += self.get_openpype_versions( + dir_to_search, staging) + except ValueError: + # location is invalid, skip it + pass - # remove zip file version if needed. if not include_zips: openpype_versions = [ v for v in openpype_versions if v.path.suffix != ".zip" @@ -1308,9 +1385,8 @@ class BootstrapRepos: raise ValueError( f"version {version} is not associated with any file") - destination = self.data_dir / version.path.stem - if destination.exists(): - assert destination.is_dir() + destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa + if destination.exists() and destination.is_dir(): try: shutil.rmtree(destination) except OSError as e: @@ -1379,7 +1455,7 @@ class BootstrapRepos: else: dir_name = openpype_version.path.stem - destination = self.data_dir / dir_name + destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa # test if destination directory already exist, if so lets delete it. if destination.exists() and force: @@ -1557,9 +1633,10 @@ class BootstrapRepos: return False return True - def get_openpype_versions(self, - openpype_dir: Path, - staging: bool = False) -> list: + def get_openpype_versions( + self, + openpype_dir: Path, + staging: bool = False) -> list: """Get all detected OpenPype versions in directory. Args: @@ -1574,14 +1651,20 @@ class BootstrapRepos: """ if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") + raise ValueError(f"specified directory {openpype_dir} is invalid") - _openpype_versions = [] + openpype_versions = [] # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): + # if the item is directory with major.minor version, dive deeper + if item.is_dir() and re.match(r"^\d+\.\d+$", item.name): + _versions = self.get_openpype_versions( + item, staging=staging) + if _versions: + openpype_versions += _versions - # if file, strip extension, in case of dir not. + # if it is file, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) @@ -1601,12 +1684,12 @@ class BootstrapRepos: detected_version.path = item if staging and detected_version.is_staging(): - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) if not staging and not detected_version.is_staging(): - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) - return sorted(_openpype_versions) + return sorted(openpype_versions) class OpenPypeVersionExists(Exception): diff --git a/igniter/install_thread.py b/igniter/install_thread.py index 8e31f8cb8f..0cccf664e7 100644 --- a/igniter/install_thread.py +++ b/igniter/install_thread.py @@ -62,7 +62,7 @@ class InstallThread(QThread): progress_callback=self.set_progress, message=self.message) local_version = OpenPypeVersion.get_installed_version_str() - # if user did entered nothing, we install OpenPype from local version. + # if user did enter nothing, we install OpenPype from local version. # zip content of `repos`, copy it to user data dir and append # version to it. if not self._path: @@ -93,6 +93,23 @@ class InstallThread(QThread): detected = bs.find_openpype(include_zips=True) if detected: + if not OpenPypeVersion.get_installed_version().is_compatible( + detected[-1]): + self.message.emit(( + f"Latest detected version {detected[-1]} " + "is not compatible with the currently running " + f"{local_version}" + ), True) + self.message.emit(( + "Filtering detected versions to compatible ones..." + ), False) + + detected = [ + version for version in detected + if version.is_compatible( + OpenPypeVersion.get_installed_version()) + ] + if OpenPypeVersion( version=local_version, path=Path()) < detected[-1]: self.message.emit(( diff --git a/igniter/tools.py b/igniter/tools.py index 57159b5e52..a9d592acf0 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception): pass +class OpenPypeVersionIncompatible(Exception): + """OpenPype version is not compatible with the installed one (build).""" + pass + + def should_add_certificate_path_to_mongo_url(mongo_url): """Check if should add ca certificate to mongo url. diff --git a/openpype/api.py b/openpype/api.py index fac2ae572b..c2227c1a52 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -9,6 +9,7 @@ from .settings import ( ) from .lib import ( PypeLogger, + Logger, Anatomy, config, execute, @@ -58,8 +59,6 @@ from .action import ( RepairContextAction ) -# for backward compatibility with Pype 2 -Logger = PypeLogger __all__ = [ "get_system_settings", diff --git a/openpype/cli.py b/openpype/cli.py index 9a2dfaa141..398d1a94c0 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -40,18 +40,6 @@ def settings(dev): PypeCommands().launch_settings_gui(dev) -@main.command() -def standalonepublisher(): - """Show Pype Standalone publisher UI.""" - PypeCommands().launch_standalone_publisher() - - -@main.command() -def traypublisher(): - """Show new OpenPype Standalone publisher UI.""" - PypeCommands().launch_traypublisher() - - @main.command() def tray(): """Launch pype tray. @@ -443,3 +431,26 @@ def interactive(): __version__, sys.version, sys.platform ) code.interact(banner) + + +@main.command() +@click.option("--build", help="Print only build version", + is_flag=True, default=False) +def version(build): + """Print OpenPype version.""" + + from openpype.version import __version__ + from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion + from pathlib import Path + import os + + if getattr(sys, 'frozen', False): + local_version = BootstrapRepos.get_version( + Path(os.getenv("OPENPYPE_ROOT"))) + else: + local_version = OpenPypeVersion.get_installed_version_str() + + if build: + print(local_version) + return + print(f"{__version__} (booted: {local_version})") diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 97e6755d09..64a82334d9 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -1,3 +1,7 @@ +from .mongo import ( + OpenPypeMongoConnection, +) + from .entities import ( get_projects, get_project, @@ -25,6 +29,8 @@ from .entities import ( get_last_version_by_subset_name, get_output_link_versions, + version_is_latest, + get_representation_by_id, get_representation_by_name, get_representations, @@ -40,6 +46,8 @@ from .entities import ( ) __all__ = ( + "OpenPypeMongoConnection", + "get_projects", "get_project", "get_whole_project", @@ -66,6 +74,8 @@ __all__ = ( "get_last_version_by_subset_name", "get_output_link_versions", + "version_is_latest", + "get_representation_by_id", "get_representation_by_name", "get_representations", diff --git a/openpype/client/entities.py b/openpype/client/entities.py index e7eeadcf48..3d2730a17c 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,24 +6,13 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -import os +import re import collections import six from bson.objectid import ObjectId -from openpype.lib.mongo import OpenPypeMongoConnection - - -def _get_project_database(): - db_name = os.environ.get("AVALON_DB") or "avalon" - return OpenPypeMongoConnection.get_mongo_client()[db_name] - - -def _get_project_connection(project_name): - if not project_name: - raise ValueError("Invalid project name {}".format(str(project_name))) - return _get_project_database()[project_name] +from .mongo import get_project_database, get_project_connection def _prepare_fields(fields, required_fields=None): @@ -58,7 +47,7 @@ def _convert_ids(in_ids): def get_projects(active=True, inactive=False, fields=None): - mongodb = _get_project_database() + mongodb = get_project_database() for project_name in mongodb.collection_names(): if project_name in ("system.indexes",): continue @@ -93,7 +82,7 @@ def get_project(project_name, active=True, inactive=False, fields=None): {"data.active": False}, ] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -108,7 +97,7 @@ def get_whole_project(project_name): project collection. """ - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find({}) @@ -131,7 +120,7 @@ def get_asset_by_id(project_name, asset_id, fields=None): return None query_filter = {"type": "asset", "_id": asset_id} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -153,7 +142,7 @@ def get_asset_by_name(project_name, asset_name, fields=None): return None query_filter = {"type": "asset", "name": asset_name} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -223,7 +212,7 @@ def _get_assets( return [] query_filter["data.visualParent"] = {"$in": parent_ids} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -323,7 +312,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): return [] subset_query["parent"] = {"$in": asset_ids} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) result = conn.aggregate([ { "$match": subset_query @@ -363,7 +352,7 @@ def get_subset_by_id(project_name, subset_id, fields=None): return None query_filters = {"type": "subset", "_id": subset_id} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filters, _prepare_fields(fields)) @@ -394,7 +383,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): "name": subset_name, "parent": asset_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filters, _prepare_fields(fields)) @@ -467,7 +456,7 @@ def get_subsets( return [] query_filter["$or"] = or_query - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -491,7 +480,7 @@ def get_subset_families(project_name, subset_ids=None): return set() subset_filter["_id"] = {"$in": list(subset_ids)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) result = list(conn.aggregate([ {"$match": subset_filter}, {"$project": { @@ -529,7 +518,7 @@ def get_version_by_id(project_name, version_id, fields=None): "type": {"$in": ["version", "hero_version"]}, "_id": version_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -552,7 +541,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None): if not subset_id: return None - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) query_filter = { "type": "version", "parent": subset_id, @@ -561,6 +550,42 @@ def get_version_by_name(project_name, version, subset_id, fields=None): return conn.find_one(query_filter, _prepare_fields(fields)) +def version_is_latest(project_name, version_id): + """Is version the latest from it's subset. + + Note: + Hero versions are considered as latest. + + Todo: + Maybe raise exception when version was not found? + + Args: + project_name (str):Name of project where to look for queried entities. + version_id (Union[str, ObjectId]): Version id which is checked. + + Returns: + bool: True if is latest version from subset else False. + """ + + version_id = _convert_id(version_id) + if not version_id: + return False + version_doc = get_version_by_id( + project_name, version_id, fields=["_id", "type", "parent"] + ) + # What to do when version is not found? + if not version_doc: + return False + + if version_doc["type"] == "hero_version": + return True + + last_version = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) + return last_version["_id"] == version_id + + def _get_versions( project_name, subset_ids=None, @@ -606,7 +631,7 @@ def _get_versions( else: query_filter["name"] = {"$in": versions} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -765,11 +790,11 @@ def get_output_link_versions(project_name, version_id, fields=None): if not version_id: return [] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) # Does make sense to look for hero versions? query_filter = { "type": "version", - "data.inputLinks.input": version_id + "data.inputLinks.id": version_id } return conn.find(query_filter, _prepare_fields(fields)) @@ -830,7 +855,7 @@ def get_last_versions(project_name, subset_ids, fields=None): {"$group": group_item} ] - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) aggregate_result = conn.aggregate(aggregation_pipeline) if limit_query: output = {} @@ -948,7 +973,7 @@ def get_representation_by_id(project_name, representation_id, fields=None): if representation_id is not None: query_filter["_id"] = _convert_id(representation_id) - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -981,21 +1006,74 @@ def get_representation_by_name( "parent": version_id } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) +def _flatten_dict(data): + flatten_queue = collections.deque() + flatten_queue.append(data) + output = {} + while flatten_queue: + item = flatten_queue.popleft() + for key, value in item.items(): + if not isinstance(value, dict): + output[key] = value + continue + + tmp = {} + for subkey, subvalue in value.items(): + new_key = "{}.{}".format(key, subkey) + tmp[new_key] = subvalue + flatten_queue.append(tmp) + return output + + +def _regex_filters(filters): + output = [] + for key, value in filters.items(): + regexes = [] + a_values = [] + if isinstance(value, re.Pattern): + regexes.append(value) + elif isinstance(value, (list, tuple, set)): + for item in value: + if isinstance(item, re.Pattern): + regexes.append(item) + else: + a_values.append(item) + else: + a_values.append(value) + + key_filters = [] + if len(a_values) == 1: + key_filters.append({key: a_values[0]}) + elif a_values: + key_filters.append({key: {"$in": a_values}}) + + for regex in regexes: + key_filters.append({key: {"$regex": regex}}) + + if len(key_filters) == 1: + output.append(key_filters[0]) + else: + output.append({"$or": key_filters}) + + return output + + def _get_representations( project_name, representation_ids, representation_names, version_ids, - extensions, + context_filters, names_by_version_ids, standard, archived, fields ): + default_output = [] repre_types = [] if standard: repre_types.append("representation") @@ -1003,7 +1081,7 @@ def _get_representations( repre_types.append("archived_representation") if not repre_types: - return [] + return default_output if len(repre_types) == 1: query_filter = {"type": repre_types[0]} @@ -1013,25 +1091,21 @@ def _get_representations( if representation_ids is not None: representation_ids = _convert_ids(representation_ids) if not representation_ids: - return [] + return default_output query_filter["_id"] = {"$in": representation_ids} if representation_names is not None: if not representation_names: - return [] + return default_output query_filter["name"] = {"$in": list(representation_names)} if version_ids is not None: version_ids = _convert_ids(version_ids) if not version_ids: - return [] + return default_output query_filter["parent"] = {"$in": version_ids} - if extensions is not None: - if not extensions: - return [] - query_filter["context.ext"] = {"$in": list(extensions)} - + or_queries = [] if names_by_version_ids is not None: or_query = [] for version_id, names in names_by_version_ids.items(): @@ -1041,10 +1115,38 @@ def _get_representations( "name": {"$in": list(names)} }) if not or_query: - return [] - query_filter["$or"] = or_query + return default_output + or_queries.append(or_query) - conn = _get_project_connection(project_name) + if context_filters is not None: + if not context_filters: + return [] + _flatten_filters = _flatten_dict(context_filters) + flatten_filters = {} + for key, value in _flatten_filters.items(): + if not key.startswith("context"): + key = "context.{}".format(key) + flatten_filters[key] = value + + for item in _regex_filters(flatten_filters): + for key, value in item.items(): + if key != "$or": + query_filter[key] = value + + elif value: + or_queries.append(value) + + if len(or_queries) == 1: + query_filter["$or"] = or_queries[0] + elif or_queries: + and_query = [] + for or_query in or_queries: + if isinstance(or_query, list): + or_query = {"$or": or_query} + and_query.append(or_query) + query_filter["$and"] = and_query + + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -1054,7 +1156,7 @@ def get_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, + context_filters=None, names_by_version_ids=None, archived=False, standard=True, @@ -1072,8 +1174,8 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. archived (bool): Output will also contain archived representations. @@ -1089,7 +1191,7 @@ def get_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=True, archived=archived, @@ -1102,7 +1204,7 @@ def get_archived_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, + context_filters=None, names_by_version_ids=None, fields=None ): @@ -1118,8 +1220,8 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. fields (Iterable[str]): Fields that should be returned. All fields are @@ -1134,7 +1236,7 @@ def get_archived_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=False, archived=True, @@ -1157,58 +1259,64 @@ def get_representations_parents(project_name, representations): dict[ObjectId, tuple]: Parents by representation id. """ - repres_by_version_id = collections.defaultdict(list) - versions_by_version_id = {} - versions_by_subset_id = collections.defaultdict(list) - subsets_by_subset_id = {} - subsets_by_asset_id = collections.defaultdict(list) + repre_docs_by_version_id = collections.defaultdict(list) + version_docs_by_version_id = {} + version_docs_by_subset_id = collections.defaultdict(list) + subset_docs_by_subset_id = {} + subset_docs_by_asset_id = collections.defaultdict(list) output = {} - for representation in representations: - repre_id = representation["_id"] + for repre_doc in representations: + repre_id = repre_doc["_id"] + version_id = repre_doc["parent"] output[repre_id] = (None, None, None, None) - version_id = representation["parent"] - repres_by_version_id[version_id].append(representation) + repre_docs_by_version_id[version_id].append(repre_doc) - versions = get_versions( - project_name, version_ids=repres_by_version_id.keys() + version_docs = get_versions( + project_name, + version_ids=repre_docs_by_version_id.keys(), + hero=True ) - for version in versions: - version_id = version["_id"] - subset_id = version["parent"] - versions_by_version_id[version_id] = version - versions_by_subset_id[subset_id].append(version) + for version_doc in version_docs: + version_id = version_doc["_id"] + subset_id = version_doc["parent"] + version_docs_by_version_id[version_id] = version_doc + version_docs_by_subset_id[subset_id].append(version_doc) - subsets = get_subsets( - project_name, subset_ids=versions_by_subset_id.keys() + subset_docs = get_subsets( + project_name, subset_ids=version_docs_by_subset_id.keys() ) - for subset in subsets: - subset_id = subset["_id"] - asset_id = subset["parent"] - subsets_by_subset_id[subset_id] = subset - subsets_by_asset_id[asset_id].append(subset) + for subset_doc in subset_docs: + subset_id = subset_doc["_id"] + asset_id = subset_doc["parent"] + subset_docs_by_subset_id[subset_id] = subset_doc + subset_docs_by_asset_id[asset_id].append(subset_doc) - assets = get_assets(project_name, asset_ids=subsets_by_asset_id.keys()) - assets_by_id = { - asset["_id"]: asset - for asset in assets + asset_docs = get_assets( + project_name, asset_ids=subset_docs_by_asset_id.keys() + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs } - project = get_project(project_name) + project_doc = get_project(project_name) - for version_id, representations in repres_by_version_id.items(): - asset = None - subset = None - version = versions_by_version_id.get(version_id) - if version: - subset_id = version["parent"] - subset = subsets_by_subset_id.get(subset_id) - if subset: - asset_id = subset["parent"] - asset = assets_by_id.get(asset_id) + for version_id, repre_docs in repre_docs_by_version_id.items(): + asset_doc = None + subset_doc = None + version_doc = version_docs_by_version_id.get(version_id) + if version_doc: + subset_id = version_doc["parent"] + subset_doc = subset_docs_by_subset_id.get(subset_id) + if subset_doc: + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_id.get(asset_id) - for representation in representations: - repre_id = representation["_id"] - output[repre_id] = (version, subset, asset, project) + for repre_doc in repre_docs: + repre_id = repre_doc["_id"] + output[repre_id] = ( + version_doc, subset_doc, asset_doc, project_doc + ) return output @@ -1255,7 +1363,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): query_filter = {"_id": _convert_id(src_id)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) if src_doc: return src_doc.get("data", {}).get("thumbnail_id") @@ -1288,7 +1396,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): "type": "thumbnail", "_id": {"$in": thumbnail_ids} } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find(query_filter, _prepare_fields(fields)) @@ -1309,7 +1417,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): if not thumbnail_id: return None query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)} - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -1340,14 +1448,14 @@ def get_workfile_info( "task_name": task_name, "filename": filename } - conn = _get_project_connection(project_name) + conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) """ ## Custom data storage: - Settings - OP settings overrides and local settings -- Logging - logs from PypeLogger +- Logging - logs from Logger - Webpublisher - jobs - Ftrack - events - Maya - Shaders diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py new file mode 100644 index 0000000000..72acbc5476 --- /dev/null +++ b/openpype/client/mongo.py @@ -0,0 +1,235 @@ +import os +import sys +import time +import logging +import pymongo +import certifi + +if sys.version_info[0] == 2: + from urlparse import urlparse, parse_qs +else: + from urllib.parse import urlparse, parse_qs + + +class MongoEnvNotSet(Exception): + pass + + +def _decompose_url(url): + """Decompose mongo url to basic components. + + Used for creation of MongoHandler which expect mongo url components as + separated kwargs. Components are at the end not used as we're setting + connection directly this is just a dumb components for MongoHandler + validation pass. + """ + + # Use first url from passed url + # - this is because it is possible to pass multiple urls for multiple + # replica sets which would crash on urlparse otherwise + # - please don't use comma in username of password + url = url.split(",")[0] + components = { + "scheme": None, + "host": None, + "port": None, + "username": None, + "password": None, + "auth_db": None + } + + result = urlparse(url) + if result.scheme is None: + _url = "mongodb://{}".format(url) + result = urlparse(_url) + + components["scheme"] = result.scheme + components["host"] = result.hostname + try: + components["port"] = result.port + except ValueError: + raise RuntimeError("invalid port specified") + components["username"] = result.username + components["password"] = result.password + + try: + components["auth_db"] = parse_qs(result.query)['authSource'][0] + except KeyError: + # no auth db provided, mongo will use the one we are connecting to + pass + + return components + + +def get_default_components(): + mongo_url = os.environ.get("OPENPYPE_MONGO") + if mongo_url is None: + raise MongoEnvNotSet( + "URL for Mongo logging connection is not set." + ) + return _decompose_url(mongo_url) + + +def should_add_certificate_path_to_mongo_url(mongo_url): + """Check if should add ca certificate to mongo url. + + Since 30.9.2021 cloud mongo requires newer certificates that are not + available on most of workstation. This adds path to certifi certificate + which is valid for it. To add the certificate path url must have scheme + 'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query. + """ + + parsed = urlparse(mongo_url) + query = parse_qs(parsed.query) + lowered_query_keys = set(key.lower() for key in query.keys()) + add_certificate = False + # Check if url 'ssl' or 'tls' are set to 'true' + for key in ("ssl", "tls"): + if key in query and "true" in query["ssl"]: + add_certificate = True + break + + # Check if url contains 'mongodb+srv' + if not add_certificate and parsed.scheme == "mongodb+srv": + add_certificate = True + + # Check if url does already contain certificate path + if add_certificate and "tlscafile" in lowered_query_keys: + add_certificate = False + + return add_certificate + + +def validate_mongo_connection(mongo_uri): + """Check if provided mongodb URL is valid. + + Args: + mongo_uri (str): URL to validate. + + Raises: + ValueError: When port in mongo uri is not valid. + pymongo.errors.InvalidURI: If passed mongo is invalid. + pymongo.errors.ServerSelectionTimeoutError: If connection timeout + passed so probably couldn't connect to mongo server. + + """ + + client = OpenPypeMongoConnection.create_connection( + mongo_uri, retry_attempts=1 + ) + client.close() + + +class OpenPypeMongoConnection: + """Singleton MongoDB connection. + + Keeps MongoDB connections by url. + """ + + mongo_clients = {} + log = logging.getLogger("OpenPypeMongoConnection") + + @staticmethod + def get_default_mongo_url(): + return os.environ["OPENPYPE_MONGO"] + + @classmethod + def get_mongo_client(cls, mongo_url=None): + if mongo_url is None: + mongo_url = cls.get_default_mongo_url() + + connection = cls.mongo_clients.get(mongo_url) + if connection: + # Naive validation of existing connection + try: + connection.server_info() + with connection.start_session(): + pass + except Exception: + connection = None + + if not connection: + cls.log.debug("Creating mongo connection to {}".format(mongo_url)) + connection = cls.create_connection(mongo_url) + cls.mongo_clients[mongo_url] = connection + + return connection + + @classmethod + def create_connection(cls, mongo_url, timeout=None, retry_attempts=None): + parsed = urlparse(mongo_url) + # Force validation of scheme + if parsed.scheme not in ["mongodb", "mongodb+srv"]: + raise pymongo.errors.InvalidURI(( + "Invalid URI scheme:" + " URI must begin with 'mongodb://' or 'mongodb+srv://'" + )) + + if timeout is None: + timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000) + + kwargs = { + "serverSelectionTimeoutMS": timeout + } + if should_add_certificate_path_to_mongo_url(mongo_url): + kwargs["ssl_ca_certs"] = certifi.where() + + mongo_client = pymongo.MongoClient(mongo_url, **kwargs) + + if retry_attempts is None: + retry_attempts = 3 + + elif not retry_attempts: + retry_attempts = 1 + + last_exc = None + valid = False + t1 = time.time() + for attempt in range(1, retry_attempts + 1): + try: + mongo_client.server_info() + with mongo_client.start_session(): + pass + valid = True + break + + except Exception as exc: + last_exc = exc + if attempt < retry_attempts: + cls.log.warning( + "Attempt {} failed. Retrying... ".format(attempt) + ) + time.sleep(1) + + if not valid: + raise last_exc + + cls.log.info("Connected to {}, delay {:.3f}s".format( + mongo_url, time.time() - t1 + )) + return mongo_client + + +def get_project_database(): + db_name = os.environ.get("AVALON_DB") or "avalon" + return OpenPypeMongoConnection.get_mongo_client()[db_name] + + +def get_project_connection(project_name): + """Direct access to mongo collection. + + We're trying to avoid using direct access to mongo. This should be used + only for Create, Update and Remove operations until there are implemented + api calls for that. + + Args: + project_name(str): Project name for which collection should be + returned. + + Returns: + pymongo.Collection: Collection realated to passed project. + """ + + if not project_name: + raise ValueError("Invalid project name {}".format(str(project_name))) + return get_project_database()[project_name] diff --git a/openpype/client/notes.md b/openpype/client/notes.md new file mode 100644 index 0000000000..a261b86eca --- /dev/null +++ b/openpype/client/notes.md @@ -0,0 +1,39 @@ +# Client functionality +## Reason +Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code. + +Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state. + +## Queries +Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity. + +## Changes +Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data. + +### Create +Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues. + +### Update +Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare__update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion. + +### Delete +Delete operation need entity id. Entity will be deleted from mongo. + + +## What (probably) won't be replaced +Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future. +- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data. +- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3. +- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure. +- Code parts that is marked as deprecated in v3 or will be deprecated in v4. + - integrate asset legacy publish plugin - already is legacy kept for safety + - integrate thumbnail - thumbnails will be stored in different way in v4 + - input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation". + +## Known missing replacements +- change subset group in loader tool +- integrate subset group +- query input links in openpype lib +- create project in openpype lib +- save/create workfile doc in openpype lib +- integrate hero version diff --git a/openpype/client/operations.py b/openpype/client/operations.py new file mode 100644 index 0000000000..c0716ee109 --- /dev/null +++ b/openpype/client/operations.py @@ -0,0 +1,640 @@ +import re +import uuid +import copy +import collections +from abc import ABCMeta, abstractmethod, abstractproperty + +import six +from bson.objectid import ObjectId +from pymongo import DeleteOne, InsertOne, UpdateOne + +from .mongo import get_project_connection + +REMOVED_VALUE = object() + +PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" +PROJECT_NAME_REGEX = re.compile( + "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) +) + +CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" +CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" +CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" +CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" +CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" +CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" + + +def _create_or_convert_to_mongo_id(mongo_id): + if mongo_id is None: + return ObjectId() + return ObjectId(mongo_id) + + +def new_project_document( + project_name, project_code, config, data=None, entity_id=None +): + """Create skeleton data of project document. + + Args: + project_name (str): Name of project. Used as identifier of a project. + project_code (str): Shorter version of projet without spaces and + special characters (in most of cases). Should be also considered + as unique name across projects. + config (Dic[str, Any]): Project config consist of roots, templates, + applications and other project Anatomy related data. + data (Dict[str, Any]): Project data with information about it's + attributes (e.g. 'fps' etc.) or integration specific keys. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of project document. + """ + + if data is None: + data = {} + + data["code"] = project_code + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "name": project_name, + "type": CURRENT_PROJECT_SCHEMA, + "entity_data": data, + "config": config + } + + +def new_asset_document( + name, project_id, parent_id, parents, data=None, entity_id=None +): + """Create skeleton data of asset document. + + Args: + name (str): Is considered as unique identifier of asset in project. + project_id (Union[str, ObjectId]): Id of project doument. + parent_id (Union[str, ObjectId]): Id of parent asset. + parents (List[str]): List of parent assets names. + data (Dict[str, Any]): Asset document data. Empty dictionary is used + if not passed. Value of 'parent_id' is used to fill 'visualParent'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of asset document. + """ + + if data is None: + data = {} + if parent_id is not None: + parent_id = ObjectId(parent_id) + data["visualParent"] = parent_id + data["parents"] = parents + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "asset", + "name": name, + "parent": ObjectId(project_id), + "data": data, + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + +def new_subset_document(name, family, asset_id, data=None, entity_id=None): + """Create skeleton data of subset document. + + Args: + name (str): Is considered as unique identifier of subset under asset. + family (str): Subset's family. + asset_id (Union[str, ObjectId]): Id of parent asset. + data (Dict[str, Any]): Subset document data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset document. + """ + + if data is None: + data = {} + data["family"] = family + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_SUBSET_SCHEMA, + "type": "subset", + "name": name, + "data": data, + "parent": asset_id + } + + +def new_version_doc(version, subset_id, data=None, entity_id=None): + """Create skeleton data of version document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_VERSION_SCHEMA, + "type": "version", + "name": int(version), + "parent": subset_id, + "data": data + } + + +def new_representation_doc( + name, version_id, context, data=None, entity_id=None +): + """Create skeleton data of asset document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + version_id (Union[str, ObjectId]): Id of parent version. + context (Dict[str, Any]): Representation context used for fill template + of to query. + data (Dict[str, Any]): Representation document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_REPRESENTATION_SCHEMA, + "type": "representation", + "parent": version_id, + "name": name, + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": context + } + + +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "workfile", + "parent": ObjectId(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + +def _prepare_update_data(old_doc, new_doc, replace): + changes = {} + for key, value in new_doc.items(): + if key not in old_doc or value != old_doc[key]: + changes[key] = value + + if replace: + for key in old_doc.keys(): + if key not in new_doc: + changes[key] = REMOVED_VALUE + return changes + + +def prepare_subset_update_data(old_doc, new_doc, replace=True): + """Compare two subset documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_version_update_data(old_doc, new_doc, replace=True): + """Compare two version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_representation_update_data(old_doc, new_doc, replace=True): + """Compare two representation documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + +@six.add_metaclass(ABCMeta) +class AbstractOperation(object): + """Base operation class. + + Opration represent a call into database. The call can create, change or + remove data. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + """ + + def __init__(self, project_name, entity_type): + self._project_name = project_name + self._entity_type = entity_type + self._id = str(uuid.uuid4()) + + @property + def project_name(self): + return self._project_name + + @property + def id(self): + """Identifier of operation.""" + + return self._id + + @property + def entity_type(self): + return self._entity_type + + @abstractproperty + def operation_name(self): + """Stringified type of operation.""" + + pass + + @abstractmethod + def to_mongo_operation(self): + """Convert operation to Mongo batch operation.""" + + pass + + def to_data(self): + """Convert opration to data that can be converted to json or others. + + Warning: + Current state returns ObjectId objects which cannot be parsed by + json. + + Returns: + Dict[str, Any]: Description of operation. + """ + + return { + "id": self._id, + "entity_type": self.entity_type, + "project_name": self.project_name, + "operation": self.operation_name + } + + +class CreateOperation(AbstractOperation): + """Opeartion to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + + operation_name = "create" + + def __init__(self, project_name, entity_type, data): + super(CreateOperation, self).__init__(project_name, entity_type) + + if not data: + data = {} + else: + data = copy.deepcopy(dict(data)) + + if "_id" not in data: + data["_id"] = ObjectId() + else: + data["_id"] = ObjectId(data["_id"]) + + self._entity_id = data["_id"] + self._data = data + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __getitem__(self, key): + return self.data[key] + + def set_value(self, key, value): + self.data[key] = value + + def get(self, key, *args, **kwargs): + return self.data.get(key, *args, **kwargs) + + @property + def entity_id(self): + return self._entity_id + + @property + def data(self): + return self._data + + def to_mongo_operation(self): + return InsertOne(copy.deepcopy(self._data)) + + def to_data(self): + output = super(CreateOperation, self).to_data() + output["data"] = copy.deepcopy(self.data) + return output + + +class UpdateOperation(AbstractOperation): + """Opeartion to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + + operation_name = "update" + + def __init__(self, project_name, entity_type, entity_id, update_data): + super(UpdateOperation, self).__init__(project_name, entity_type) + + self._entity_id = ObjectId(entity_id) + self._update_data = update_data + + @property + def entity_id(self): + return self._entity_id + + @property + def update_data(self): + return self._update_data + + def to_mongo_operation(self): + unset_data = {} + set_data = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + unset_data[key] = None + else: + set_data[key] = value + + op_data = {} + if unset_data: + op_data["$unset"] = unset_data + if set_data: + op_data["$set"] = set_data + + if not op_data: + return None + + return UpdateOne( + {"_id": self.entity_id}, + op_data + ) + + def to_data(self): + changes = {} + for key, value in self._update_data.items(): + if value is REMOVED_VALUE: + value = None + changes[key] = value + + output = super(UpdateOperation, self).to_data() + output.update({ + "entity_id": self.entity_id, + "changes": changes + }) + return output + + +class DeleteOperation(AbstractOperation): + """Opeartion to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id): + super(DeleteOperation, self).__init__(project_name, entity_type) + + self._entity_id = ObjectId(entity_id) + + @property + def entity_id(self): + return self._entity_id + + def to_mongo_operation(self): + return DeleteOne({"_id": self.entity_id}) + + def to_data(self): + output = super(DeleteOperation, self).to_data() + output["entity_id"] = self.entity_id + return output + + +class OperationsSession(object): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be sonsidered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and document + values are not validated. + + All operations must be related to single project. + + Args: + project_name (str): Project name to which are operations related. + """ + + def __init__(self): + self._operations = [] + + def add(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + if not isinstance( + operation, + (CreateOperation, UpdateOperation, DeleteOperation) + ): + raise TypeError("Expected Operation object got {}".format( + str(type(operation)) + )) + + self._operations.append(operation) + + def append(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + + self.add(operation) + + def extend(self, operations): + """Add operations to be processed. + + Args: + operations (List[BaseOperation]): Operations that should be + processed. + """ + + for operation in operations: + self.add(operation) + + def remove(self, operation): + """Remove operation.""" + + self._operations.remove(operation) + + def clear(self): + """Clear all registered operations.""" + + self._operations = [] + + def to_data(self): + return [ + operation.to_data() + for operation in self._operations + ] + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + operations_by_project = collections.defaultdict(list) + for operation in operations: + operations_by_project[operation.project_name].append(operation) + + for project_name, operations in operations_by_project.items(): + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(project_name) + collection.bulk_write(bulk_writes) + + def create_entity(self, project_name, entity_type, data): + """Fast access to 'CreateOperation'. + + Returns: + CreateOperation: Object of update operation. + """ + + operation = CreateOperation(project_name, entity_type, data) + self.add(operation) + return operation + + def update_entity(self, project_name, entity_type, entity_id, update_data): + """Fast access to 'UpdateOperation'. + + Returns: + UpdateOperation: Object of update operation. + """ + + operation = UpdateOperation( + project_name, entity_type, entity_id, update_data + ) + self.add(operation) + return operation + + def delete_entity(self, project_name, entity_type, entity_id): + """Fast access to 'DeleteOperation'. + + Returns: + DeleteOperation: Object of delete operation. + """ + + operation = DeleteOperation(project_name, entity_type, entity_id) + self.add(operation) + return operation diff --git a/openpype/hooks/pre_copy_template_workfile.py b/openpype/hooks/pre_copy_template_workfile.py index dffac22ee2..70c549919f 100644 --- a/openpype/hooks/pre_copy_template_workfile.py +++ b/openpype/hooks/pre_copy_template_workfile.py @@ -1,11 +1,11 @@ import os import shutil -from openpype.lib import ( - PreLaunchHook, - get_custom_workfile_template_by_context, +from openpype.lib import PreLaunchHook +from openpype.settings import get_project_settings +from openpype.pipeline.workfile import ( + get_custom_workfile_template, get_custom_workfile_template_by_string_context ) -from openpype.settings import get_project_settings class CopyTemplateWorkfile(PreLaunchHook): @@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook): project_name = self.data["project_name"] asset_name = self.data["asset_name"] task_name = self.data["task_name"] + host_name = self.application.host_name project_settings = get_project_settings(project_name) - host_settings = project_settings[self.application.host_name] - - workfile_builder_settings = host_settings.get("workfile_builder") - if not workfile_builder_settings: - # TODO remove warning when deprecated - self.log.warning(( - "Seems like old version of settings is used." - " Can't access custom templates in host \"{}\"." - ).format(self.application.full_label)) - return - - if not workfile_builder_settings["create_first_version"]: - self.log.info(( - "Project \"{}\" has turned off to create first workfile for" - " application \"{}\"" - ).format(project_name, self.application.full_label)) - return - - # Backwards compatibility - template_profiles = workfile_builder_settings.get("custom_templates") - if not template_profiles: - self.log.info( - "Custom templates are not filled. Skipping template copy." - ) - return project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") if project_doc and asset_doc: self.log.debug("Started filtering of custom template paths.") - template_path = get_custom_workfile_template_by_context( - template_profiles, project_doc, asset_doc, task_name, anatomy + template_path = get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + project_settings ) else: @@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook): "Global data collection probably did not execute." " Using backup solution." )) - dbcon = self.data.get("dbcon") template_path = get_custom_workfile_template_by_string_context( - template_profiles, project_name, asset_name, task_name, - dbcon, anatomy + project_name, + asset_name, + task_name, + host_name, + anatomy, + project_settings ) if not template_path: diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py index 6577e37cbe..8a178915fb 100644 --- a/openpype/hooks/pre_global_host_data.py +++ b/openpype/hooks/pre_global_host_data.py @@ -1,3 +1,4 @@ +from openpype.client import get_project, get_asset_by_name from openpype.lib import ( PreLaunchHook, EnvironmentPrepData, @@ -69,7 +70,7 @@ class GlobalHostDataHook(PreLaunchHook): self.data["dbcon"] = dbcon # Project document - project_doc = dbcon.find_one({"type": "project"}) + project_doc = get_project(project_name) self.data["project_doc"] = project_doc asset_name = self.data.get("asset_name") @@ -79,8 +80,5 @@ class GlobalHostDataHook(PreLaunchHook): ) return - asset_doc = dbcon.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) self.data["asset_doc"] = asset_doc diff --git a/openpype/host/host.py b/openpype/host/host.py index 48907e7ec7..9cdbb819e1 100644 --- a/openpype/host/host.py +++ b/openpype/host/host.py @@ -19,8 +19,15 @@ class MissingMethodsError(ValueError): joined_missing = ", ".join( ['"{}"'.format(item) for item in missing_methods] ) + if isinstance(host, HostBase): + host_name = host.name + else: + try: + host_name = host.__file__.replace("\\", "/").split("/")[-3] + except Exception: + host_name = str(host) message = ( - "Host \"{}\" miss methods {}".format(host.name, joined_missing) + "Host \"{}\" miss methods {}".format(host_name, joined_missing) ) super(MissingMethodsError, self).__init__(message) diff --git a/openpype/hosts/aftereffects/__init__.py b/openpype/hosts/aftereffects/__init__.py index deae48d122..ae750d05b6 100644 --- a/openpype/hosts/aftereffects/__init__.py +++ b/openpype/hosts/aftereffects/__init__.py @@ -1,9 +1,6 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8097/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .addon import AfterEffectsAddon + + +__all__ = ( + "AfterEffectsAddon", +) diff --git a/openpype/hosts/aftereffects/addon.py b/openpype/hosts/aftereffects/addon.py new file mode 100644 index 0000000000..94843e7dc5 --- /dev/null +++ b/openpype/hosts/aftereffects/addon.py @@ -0,0 +1,23 @@ +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + + +class AfterEffectsAddon(OpenPypeModule, IHostAddon): + name = "aftereffects" + host_name = "aftereffects" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8097/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".aep"] diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index ce4cbf09af..8cdf9c407e 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -1,13 +1,16 @@ import os import sys +import re +import json import contextlib import traceback import logging +from functools import partial from Qt import QtWidgets from openpype.pipeline import install_host -from openpype.lib.remote_publish import headless_publish +from openpype.modules import ModulesManager from openpype.tools.utils import host_tools from .launch_logic import ProcessLauncher, get_stub @@ -35,10 +38,18 @@ def main(*subprocess_args): launcher.start() if os.environ.get("HEADLESS_PUBLISH"): - launcher.execute_in_main_thread(lambda: headless_publish( - log, - "CloseAE", - os.environ.get("IS_TEST"))) + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] + + launcher.execute_in_main_thread( + partial( + webpublisher_addon.headless_publish, + log, + "CloseAE", + os.environ.get("IS_TEST") + ) + ) + elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True): save = False if os.getenv("WORKFILES_SAVE_AS"): @@ -68,3 +79,57 @@ def get_extension_manifest_path(): "CSXS", "manifest.xml" ) + + +def get_unique_layer_name(layers, name): + """ + Gets all layer names and if 'name' is present in them, increases + suffix by 1 (eg. creates unique layer name - for Loader) + Args: + layers (list): of strings, names only + name (string): checked value + + Returns: + (string): name_00X (without version) + """ + names = {} + for layer in layers: + layer_name = re.sub(r'_\d{3}$', '', layer) + if layer_name in names.keys(): + names[layer_name] = names[layer_name] + 1 + else: + names[layer_name] = 1 + occurrences = names.get(name, 0) + + return "{}_{:0>3d}".format(name, occurrences + 1) + + +def get_background_layers(file_url): + """ + Pulls file name from background json file, enrich with folder url for + AE to be able import files. + + Order is important, follows order in json. + + Args: + file_url (str): abs url of background json + + Returns: + (list): of abs paths to images + """ + with open(file_url) as json_file: + data = json.load(json_file) + + layers = list() + bg_folder = os.path.dirname(file_url) + for child in data['children']: + if child.get("filename"): + layers.append(os.path.join(bg_folder, child.get("filename")). + replace("\\", "/")) + else: + for layer in child['children']: + if layer.get("filename"): + layers.append(os.path.join(bg_folder, + layer.get("filename")). + replace("\\", "/")) + return layers diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 0bc47665b0..c13c22ced5 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -1,5 +1,4 @@ import os -import sys from Qt import QtWidgets @@ -15,6 +14,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, legacy_io, ) +from openpype.pipeline.load import any_outdated_containers import openpype.hosts.aftereffects from openpype.lib import register_event_callback @@ -136,7 +136,7 @@ def ls(): def check_inventory(): """Checks loaded containers if they are of highest version""" - if not lib.any_outdated(): + if not any_outdated_containers(): return # Warn about outdated containers. diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index d6c732285a..18b40af5dc 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -1,12 +1,11 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["aftereffects"] + return [".aep"] def has_unsaved_changes(): diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index badb3675fd..f82d15b3c9 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -11,6 +11,8 @@ class AEWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" + default_variant = "Main" + def get_instance_attr_defs(self): return [] @@ -35,7 +37,6 @@ class AEWorkfileCreator(AutoCreator): existing_instance = instance break - variant = '' project_name = legacy_io.Session["AVALON_PROJECT"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] @@ -44,15 +45,17 @@ class AEWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) data = { "asset": asset_name, "task": task_name, - "variant": variant + "variant": self.default_variant } data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name )) new_instance = CreatedInstance( @@ -69,7 +72,9 @@ class AEWorkfileCreator(AutoCreator): ): asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name + existing_instance["subset"] = subset_name diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py index d346df504a..260e780be0 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_background.py +++ b/openpype/hosts/aftereffects/plugins/load/load_background.py @@ -1,14 +1,14 @@ import re -from openpype.lib import ( - get_background_layers, - get_unique_layer_name -) from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import ( + get_background_layers, + get_unique_layer_name, +) class BackgroundLoader(AfterEffectsLoader): diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py index 6ab69c6bfa..2ddc9825e5 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_file.py +++ b/openpype/hosts/aftereffects/plugins/load/load_file.py @@ -1,12 +1,11 @@ import re -from openpype import lib - from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import get_unique_layer_name class FileLoader(AfterEffectsLoader): @@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader): stub = self.get_stub() layers = stub.get_items(comps=True, folders=True, footages=True) existing_layers = [layer.name for layer in layers] - comp_name = lib.get_unique_layer_name( + comp_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"]["name"], name)) import_options = {} @@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader): if namespace_from_container != layer_name: layers = stub.get_items(comps=True) existing_layers = [layer.name for layer in layers] - layer_name = lib.get_unique_layer_name( + layer_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"], context["subset"])) else: # switching version - keep same name diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index bb199a61f7..d444ead6dc 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -102,7 +102,6 @@ class CollectAERender(publish.AbstractCollectRender): attachTo=False, setMembers='', publish=True, - renderer='aerender', name=subset_name, resolutionWidth=render_q.width, resolutionHeight=render_q.height, @@ -113,7 +112,6 @@ class CollectAERender(publish.AbstractCollectRender): frameStart=frame_start, frameEnd=frame_end, frameStep=1, - toBeRenderedOn='deadline', fps=fps, app_version=app_version, publish_attributes=inst.data.get("publish_attributes", {}), @@ -138,6 +136,9 @@ class CollectAERender(publish.AbstractCollectRender): fam = "render.farm" if fam not in instance.families: instance.families.append(fam) + instance.toBeRenderedOn = "deadline" + instance.renderer = "aerender" + instance.farm = True # to skip integrate instances.append(instance) instances_to_remove.append(inst) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 9cb6900b0a..3c5013b3bd 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -1,8 +1,8 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect After Effects Workfile Instance" order = pyblish.api.CollectorOrder + 0.1 + default_variant = "Main" + def process(self, context): existing_instance = None for instance in context: @@ -69,13 +71,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, - "", + self.default_variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance instance = context.create_instance(subset) diff --git a/openpype/hosts/blender/__init__.py b/openpype/hosts/blender/__init__.py index 0f27882c7e..2a6603606a 100644 --- a/openpype/hosts/blender/__init__.py +++ b/openpype/hosts/blender/__init__.py @@ -1,52 +1,6 @@ -import os +from .addon import BlenderAddon -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - # Prepare path to implementation script - implementation_user_script_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "blender_addon" - ) - - # Add blender implementation script path to PYTHONPATH - python_path = env.get("PYTHONPATH") or "" - python_path_parts = [ - path - for path in python_path.split(os.pathsep) - if path - ] - python_path_parts.insert(0, implementation_user_script_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Modify Blender user scripts path - previous_user_scripts = set() - # Implementation path is added to set for easier paths check inside loops - # - will be removed at the end - previous_user_scripts.add(implementation_user_script_path) - - openpype_blender_user_scripts = ( - env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" - ) - for path in openpype_blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" - for path in blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - # Remove implementation path from user script paths as is set to - # `BLENDER_USER_SCRIPTS` - previous_user_scripts.remove(implementation_user_script_path) - env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path - - # Set custom user scripts env - env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( - previous_user_scripts - ) - - # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" +__all__ = ( + "BlenderAddon", +) diff --git a/openpype/hosts/blender/addon.py b/openpype/hosts/blender/addon.py new file mode 100644 index 0000000000..3ee638a5bb --- /dev/null +++ b/openpype/hosts/blender/addon.py @@ -0,0 +1,73 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class BlenderAddon(OpenPypeModule, IHostAddon): + name = "blender" + host_name = "blender" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + # Prepare path to implementation script + implementation_user_script_path = os.path.join( + BLENDER_ROOT_DIR, + "blender_addon" + ) + + # Add blender implementation script path to PYTHONPATH + python_path = env.get("PYTHONPATH") or "" + python_path_parts = [ + path + for path in python_path.split(os.pathsep) + if path + ] + python_path_parts.insert(0, implementation_user_script_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Modify Blender user scripts path + previous_user_scripts = set() + # Implementation path is added to set for easier paths check inside + # loops - will be removed at the end + previous_user_scripts.add(implementation_user_script_path) + + openpype_blender_user_scripts = ( + env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" + ) + for path in openpype_blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" + for path in blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + # Remove implementation path from user script paths as is set to + # `BLENDER_USER_SCRIPTS` + previous_user_scripts.remove(implementation_user_script_path) + env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path + + # Set custom user scripts env + env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( + previous_user_scripts + ) + + # Define Qt binding if not defined + if not env.get("QT_PREFERRED_BINDING"): + env["QT_PREFERRED_BINDING"] = "PySide2" + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(BLENDER_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".blend"] diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index 20098c0fe8..9cd1ace821 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List: def read(node: bpy.types.bpy_struct_meta_idprop): """Return user-defined attributes from `node`""" - data = dict(node.get(pipeline.AVALON_PROPERTY)) + data = dict(node.get(pipeline.AVALON_PROPERTY, {})) # Ignore hidden/internal data data = { diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index c1b5add518..e0e09277df 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict() # This seems like a good value to keep the Qt app responsive and doesn't slow # down Blender. At least on macOS I the interace of Blender gets very laggy if # you make it smaller. -TIMER_INTERVAL: float = 0.01 +TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1 class BlenderApplication(QtWidgets.QApplication): @@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]: dialog.setDetailedText(detail) dialog.exec_() + # Refresh Manager + if GlobalClass.app: + manager = GlobalClass.app.get_window("WM_OT_avalon_manager") + if manager: + manager.refresh() + if not GlobalClass.is_windows: if OpenFileCacher.opening_file: return TIMER_INTERVAL @@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator): self._app = BlenderApplication.get_app() GlobalClass.app = self._app - bpy.app.timers.register( - _process_app_events, - persistent=True - ) + if not bpy.app.timers.is_registered(_process_app_events): + bpy.app.timers.register( + _process_app_events, + persistent=True + ) def execute(self, context): """Execute the operator. @@ -220,12 +227,9 @@ class LaunchQtApp(bpy.types.Operator): self._app.store_window(self.bl_idname, window) self._window = window - if not isinstance( - self._window, - (QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType) - ): + if not isinstance(self._window, (QtWidgets.QWidget, ModuleType)): raise AttributeError( - "`window` should be a `QDialog or module`. Got: {}".format( + "`window` should be a `QWidget or module`. Got: {}".format( str(type(window)) ) ) @@ -249,9 +253,9 @@ class LaunchQtApp(bpy.types.Operator): self._window.setWindowFlags(on_top_flags) self._window.show() - if on_top_flags != origin_flags: - self._window.setWindowFlags(origin_flags) - self._window.show() + # if on_top_flags != origin_flags: + # self._window.setWindowFlags(origin_flags) + # self._window.show() return {'FINISHED'} diff --git a/openpype/hosts/blender/api/workio.py b/openpype/hosts/blender/api/workio.py index 5eb9f82999..a8f6193abc 100644 --- a/openpype/hosts/blender/api/workio.py +++ b/openpype/hosts/blender/api/workio.py @@ -5,8 +5,6 @@ from typing import List, Optional import bpy -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - class OpenFileCacher: """Store information about opening file. @@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool: def file_extensions() -> List[str]: """Return the supported file extensions for Blender scene files.""" - return HOST_WORKFILE_EXTENSIONS["blender"] + return [".blend"] def work_root(session: dict) -> str: diff --git a/openpype/hosts/blender/blender_addon/startup/init.py b/openpype/hosts/blender/blender_addon/startup/init.py index 13a4b8a7a1..8dbff8a91d 100644 --- a/openpype/hosts/blender/blender_addon/startup/init.py +++ b/openpype/hosts/blender/blender_addon/startup/init.py @@ -1,4 +1,10 @@ from openpype.pipeline import install_host from openpype.hosts.blender import api -install_host(api) + +def register(): + install_host(api) + + +def unregister(): + pass diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index cf8e89ed1f..e0124053bf 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, @@ -157,7 +157,7 @@ class BlendLayoutLoader(plugin.AssetLoader): t.id = local_obj elif local_obj.type == 'EMPTY': - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index a0580af4a0..eca098627e 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader): # Camera creation when loading a layout is not necessary for now, # but the code is worth keeping in case we need it in the future. # # Create the camera asset and the camera instance - # creator_plugin = lib.get_creator_by_name("CreateCamera") + # creator_plugin = get_legacy_creator_by_name("CreateCamera") # if not creator_plugin: # raise ValueError("Creator plugin \"CreateCamera\" was " # "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 4dfa96167f..1d23a70061 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import ( plugin, get_selection, @@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader): objects = self._process(libpath, asset_group, group_name, action) if create_animation: - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index 75d9cf440d..8502c6fbd4 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -180,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor): "rotation": { "x": asset.rotation_euler.x, "y": asset.rotation_euler.y, - "z": asset.rotation_euler.z, + "z": asset.rotation_euler.z }, "scale": { "x": asset.scale.x, @@ -189,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor): } } + json_element["transform_matrix"] = [] + + for row in list(asset.matrix_world.transposed()): + json_element["transform_matrix"].append(list(row)) + + json_element["basis"] = [ + [1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1] + ] + # Extract the animation as well if family == "rig": f, n = self._export_animation( diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 8c7b3a2e74..eb91def090 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -14,7 +14,7 @@ from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger().get_logger("Celaction_cli_publisher") +log = Logger.get_logger("Celaction_cli_publisher") publish_host = "celaction" diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 2c461e5f16..76c1c93379 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -30,7 +30,8 @@ from .lib import ( maintained_temp_file_path, get_clip_segment, get_batch_group_from_desktop, - MediaInfoFile + MediaInfoFile, + TimeEffectMetadata ) from .utils import ( setup, @@ -107,6 +108,7 @@ __all__ = [ "get_clip_segment", "get_batch_group_from_desktop", "MediaInfoFile", + "TimeEffectMetadata", # pipeline "install", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index d59308ad6c..94c46fe937 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -5,10 +5,11 @@ import json import pickle import clique import tempfile +import traceback import itertools import contextlib import xml.etree.cElementTree as cET -from copy import deepcopy +from copy import deepcopy, copy from xml.etree import ElementTree as ET from pprint import pformat from .constants import ( @@ -266,7 +267,7 @@ def get_current_sequence(selection): def rescan_hooks(): import flame try: - flame.execute_shortcut('Rescan Python Hooks') + flame.execute_shortcut("Rescan Python Hooks") except Exception: pass @@ -1082,21 +1083,21 @@ class MediaInfoFile(object): xml_data (ET.Element): clip data """ try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): + for out_track in xml_data.iter("track"): + for out_feed in out_track.iter("feed"): # start frame out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') + "startTimecode/nbTicks") self.start_frame = out_feed_nb_ticks_obj.text # fps out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') + "startTimecode/rate") self.fps = out_feed_fps_obj.text # drop frame mode out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') + "startTimecode/dropMode") self.drop_mode = out_feed_drop_mode_obj.text break except Exception as msg: @@ -1118,8 +1119,153 @@ class MediaInfoFile(object): tree = cET.ElementTree(xml_element_data) tree.write( fpath, xml_declaration=True, - method='xml', encoding='UTF-8' + method="xml", encoding="UTF-8" ) except IOError as error: raise IOError( "Not able to write data to file: {}".format(error)) + + +class TimeEffectMetadata(object): + log = log + _data = {} + _retime_modes = { + 0: "speed", + 1: "timewarp", + 2: "duration" + } + + def __init__(self, segment, logger=None): + if logger: + self.log = logger + + self._data = self._get_metadata(segment) + + @property + def data(self): + """ Returns timewarp effect data + + Returns: + dict: retime data + """ + return self._data + + def _get_metadata(self, segment): + effects = segment.effects or [] + for effect in effects: + if effect.type == "Timewarp": + with maintained_temp_file_path(".timewarp_node") as tmp_path: + self.log.info("Temp File: {}".format(tmp_path)) + effect.save_setup(tmp_path) + return self._get_attributes_from_xml(tmp_path) + + return {} + + def _get_attributes_from_xml(self, tmp_path): + with open(tmp_path, "r") as tw_setup_file: + tw_setup_string = tw_setup_file.read() + tw_setup_file.close() + + tw_setup_xml = ET.fromstring(tw_setup_string) + tw_setup = self._dictify(tw_setup_xml) + # pprint(tw_setup) + try: + tw_setup_state = tw_setup["Setup"]["State"][0] + mode = int( + tw_setup_state["TW_RetimerMode"][0]["_text"] + ) + r_data = { + "type": self._retime_modes[mode], + "effectStart": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]), + "effectEnd": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["End"]) + } + + if mode == 0: # speed + r_data[self._retime_modes[mode]] = float( + tw_setup_state["TW_Speed"] + [0]["Channel"][0]["Value"][0]["_text"] + ) / 100 + elif mode == 1: # timewarp + print("timing") + r_data[self._retime_modes[mode]] = self._get_anim_keys( + tw_setup_state["TW_Timing"] + ) + elif mode == 2: # duration + r_data[self._retime_modes[mode]] = { + "start": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"] + ) + }, + "end": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"] + ) + } + } + except Exception: + lines = traceback.format_exception(*sys.exc_info()) + self.log.error("\n".join(lines)) + return + + return r_data + + def _get_anim_keys(self, setup_cat, index=None): + return_data = { + "extrapolation": ( + setup_cat[0]["Channel"][0]["Extrap"][0]["_text"] + ), + "animKeys": [] + } + for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]: + if index and int(key["Index"]) != index: + continue + key_data = { + "source": float(key["Value"][0]["_text"]), + "timeline": float(key["Frame"][0]["_text"]), + "index": int(key["Index"]), + "curveMode": key["CurveMode"][0]["_text"], + "curveOrder": key["CurveOrder"][0]["_text"] + } + if key.get("TangentMode"): + key_data["tangentMode"] = key["TangentMode"][0]["_text"] + + return_data["animKeys"].append(key_data) + + return return_data + + def _dictify(self, xml_, root=True): + """ Convert xml object to dictionary + + Args: + xml_ (xml.etree.ElementTree.Element): xml data + root (bool, optional): is root available. Defaults to True. + + Returns: + dict: dictionarized xml + """ + + if root: + return {xml_.tag: self._dictify(xml_, False)} + + d = copy(xml_.attrib) + if xml_.text: + d["_text"] = xml_.text + + for x in xml_.findall("./*"): + if x.tag not in d: + d[x.tag] = [] + d[x.tag].append(self._dictify(x, False)) + return d diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 1e4ef866ed..6d6b33d2a1 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -275,7 +275,7 @@ def create_otio_reference(clip_data, fps=None): def create_otio_clip(clip_data): - from openpype.hosts.flame.api import MediaInfoFile + from openpype.hosts.flame.api import MediaInfoFile, TimeEffectMetadata segment = clip_data["PySegment"] @@ -284,14 +284,31 @@ def create_otio_clip(clip_data): media_timecode_start = media_info.start_frame media_fps = media_info.fps + # Timewarp metadata + tw_data = TimeEffectMetadata(segment, logger=log).data + log.debug("__ tw_data: {}".format(tw_data)) + # define first frame - first_frame = media_timecode_start or utils.get_frame_from_filename( - clip_data["fpath"]) or 0 + file_first_frame = utils.get_frame_from_filename( + clip_data["fpath"]) + if file_first_frame: + file_first_frame = int(file_first_frame) + + first_frame = media_timecode_start or file_first_frame or 0 _clip_source_in = int(clip_data["source_in"]) _clip_source_out = int(clip_data["source_out"]) + _clip_record_in = clip_data["record_in"] + _clip_record_out = clip_data["record_out"] _clip_record_duration = int(clip_data["record_duration"]) + log.debug("_ file_first_frame: {}".format(file_first_frame)) + log.debug("_ first_frame: {}".format(first_frame)) + log.debug("_ _clip_source_in: {}".format(_clip_source_in)) + log.debug("_ _clip_source_out: {}".format(_clip_source_out)) + log.debug("_ _clip_record_in: {}".format(_clip_record_in)) + log.debug("_ _clip_record_out: {}".format(_clip_record_out)) + # first solve if the reverse timing speed = 1 if clip_data["source_in"] > clip_data["source_out"]: @@ -302,16 +319,28 @@ def create_otio_clip(clip_data): source_in = _clip_source_in - int(first_frame) source_out = _clip_source_out - int(first_frame) + log.debug("_ source_in: {}".format(source_in)) + log.debug("_ source_out: {}".format(source_out)) + + if file_first_frame: + log.debug("_ file_source_in: {}".format( + file_first_frame + source_in)) + log.debug("_ file_source_in: {}".format( + file_first_frame + source_out)) + source_duration = (source_out - source_in + 1) # secondly check if any change of speed if source_duration != _clip_record_duration: retime_speed = float(source_duration) / float(_clip_record_duration) - log.debug("_ retime_speed: {}".format(retime_speed)) + log.debug("_ calculated speed: {}".format(retime_speed)) speed *= retime_speed - log.debug("_ source_in: {}".format(source_in)) - log.debug("_ source_out: {}".format(source_out)) + # get speed from metadata if available + if tw_data.get("speed"): + speed = tw_data["speed"] + log.debug("_ metadata speed: {}".format(speed)) + log.debug("_ speed: {}".format(speed)) log.debug("_ source_duration: {}".format(source_duration)) log.debug("_ _clip_record_duration: {}".format(_clip_record_duration)) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 5db89a0ab9..992db62c75 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -136,7 +136,8 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "tasks": { task["name"]: {"type": task["type"]} for task in self.add_tasks}, - "representations": [] + "representations": [], + "newAssetPublishing": True }) self.log.debug("__ inst_data: {}".format(pformat(inst_data))) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index 0a9b0db334..917041e053 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.lib as oplib -from openpype.pipeline import legacy_io import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export +from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollecTimelineOTIO(pyblish.api.ContextPlugin): @@ -24,11 +24,14 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # create subset name - subset_name = oplib.get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, asset_doc, + context.data["projectName"], + context.data["hostName"], + project_settings=context.data["project_settings"] ) # adding otio timeline to context diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index d34f5d5854..3e1e8db986 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -8,6 +8,9 @@ import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi from openpype.hosts.flame.api import MediaInfoFile +from openpype.pipeline.editorial import ( + get_media_range_with_retimes +) import flame @@ -47,7 +50,6 @@ class ExtractSubsetResources(openpype.api.Extractor): export_presets_mapping = {} def process(self, instance): - if not self.keep_original_representation: # remove previeous representation if not needed instance.data["representations"] = [] @@ -67,18 +69,60 @@ class ExtractSubsetResources(openpype.api.Extractor): # get media source first frame source_first_frame = instance.data["sourceFirstFrame"] + self.log.debug("_ frame_start: {}".format(frame_start)) + self.log.debug("_ source_first_frame: {}".format(source_first_frame)) + # get timeline in/out of segment clip_in = instance.data["clipIn"] clip_out = instance.data["clipOut"] + # get retimed attributres + retimed_data = self._get_retimed_attributes(instance) + + # get individual keys + r_handle_start = retimed_data["handle_start"] + r_handle_end = retimed_data["handle_end"] + r_source_dur = retimed_data["source_duration"] + r_speed = retimed_data["speed"] + # get handles value - take only the max from both handle_start = instance.data["handleStart"] - handle_end = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) + include_handles = instance.data.get("includeHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] + # retime if needed + if r_speed != 1.0: + source_start_handles = ( + instance.data["sourceStart"] - r_handle_start) + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + r_handle_start + + r_handle_end + ) + + # get frame range with handles for representation range + frame_start_handle = frame_start - handle_start + repre_frame_start = frame_start_handle + if include_handles: + if r_speed == 1.0: + frame_start_handle = frame_start + else: + frame_start_handle = ( + frame_start - handle_start) + r_handle_start + + self.log.debug("_ frame_start_handle: {}".format( + frame_start_handle)) + self.log.debug("_ repre_frame_start: {}".format( + repre_frame_start)) + + # calculate duration with handles + source_duration_handles = ( + source_end_handles - source_start_handles) + 1 # create staging dir path staging_dir = self.staging_dir(instance) @@ -93,6 +137,28 @@ class ExtractSubsetResources(openpype.api.Extractor): } export_presets.update(self.export_presets_mapping) + if not instance.data.get("versionData"): + instance.data["versionData"] = {} + + # set versiondata if any retime + version_data = retimed_data.get("version_data") + self.log.debug("_ version_data: {}".format(version_data)) + + if version_data: + instance.data["versionData"].update(version_data) + + if r_speed != 1.0: + instance.data["versionData"].update({ + "frameStart": frame_start_handle, + "frameEnd": ( + (frame_start_handle + source_duration_handles - 1) + - (r_handle_start + r_handle_end) + ) + }) + self.log.debug("_ i_version_data: {}".format( + instance.data["versionData"] + )) + # loop all preset names and for unique_name, preset_config in export_presets.items(): modify_xml_data = {} @@ -115,20 +181,10 @@ class ExtractSubsetResources(openpype.api.Extractor): ) ) - # get frame range with handles for representation range - frame_start_handle = frame_start - handle_start - - # calculate duration with handles - source_duration_handles = ( - source_end_handles - source_start_handles) - - # define in/out marks - in_mark = (source_start_handles - source_first_frame) + 1 - out_mark = in_mark + source_duration_handles - exporting_clip = None name_patern_xml = "_{}.".format( unique_name) + if export_type == "Sequence Publish": # change export clip to sequence exporting_clip = flame.duplicate(sequence_clip) @@ -142,19 +198,25 @@ class ExtractSubsetResources(openpype.api.Extractor): "__{}.").format( unique_name) - # change in/out marks to timeline in/out + # only for h264 with baked retime in_mark = clip_in - out_mark = clip_out + out_mark = clip_out + 1 + modify_xml_data.update({ + "exportHandles": True, + "nbHandles": handles + }) else: + in_mark = (source_start_handles - source_first_frame) + 1 + out_mark = in_mark + source_duration_handles exporting_clip = self.import_clip(clip_path) exporting_clip.name.set_value("{}_{}".format( asset_name, segment_name)) # add xml tags modifications modify_xml_data.update({ - "exportHandles": True, - "nbHandles": handles, - "startFrame": frame_start, + # enum position low start from 0 + "frameIndex": 0, + "startFrame": repre_frame_start, "namePattern": name_patern_xml }) @@ -162,6 +224,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("_ in_mark: {}".format(in_mark)) + self.log.debug("_ out_mark: {}".format(out_mark)) + export_kwargs = {} # validate xml preset file is filled if preset_file == "": @@ -196,9 +261,8 @@ class ExtractSubsetResources(openpype.api.Extractor): "namePattern": "__thumbnail" }) thumb_frame_number = int(in_mark + ( - source_duration_handles / 2)) + (out_mark - in_mark + 1) / 2)) - self.log.debug("__ in_mark: {}".format(in_mark)) self.log.debug("__ thumb_frame_number: {}".format( thumb_frame_number )) @@ -210,9 +274,6 @@ class ExtractSubsetResources(openpype.api.Extractor): "out_mark": out_mark }) - self.log.debug("__ modify_xml_data: {}".format( - pformat(modify_xml_data) - )) preset_path = opfapi.modify_preset_file( preset_orig_xml_path, staging_dir, modify_xml_data) @@ -281,9 +342,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # add frame range if preset_config["representation_add_range"]: representation_data.update({ - "frameStart": frame_start_handle, + "frameStart": repre_frame_start, "frameEnd": ( - frame_start_handle + source_duration_handles), + repre_frame_start + source_duration_handles) - 1, "fps": instance.data["fps"] }) @@ -300,8 +361,32 @@ class ExtractSubsetResources(openpype.api.Extractor): # at the end remove the duplicated clip flame.delete(exporting_clip) - self.log.debug("All representations: {}".format( - pformat(instance.data["representations"]))) + def _get_retimed_attributes(self, instance): + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + + # get basic variables + otio_clip = instance.data["otioClip"] + + # get available range trimmed with processed retimes + retimed_attributes = get_media_range_with_retimes( + otio_clip, handle_start, handle_end) + self.log.debug( + ">> retimed_attributes: {}".format(retimed_attributes)) + + r_media_in = int(retimed_attributes["mediaIn"]) + r_media_out = int(retimed_attributes["mediaOut"]) + version_data = retimed_attributes.get("versionData") + + return { + "version_data": version_data, + "handle_start": int(retimed_attributes["handleStart"]), + "handle_end": int(retimed_attributes["handleEnd"]), + "source_duration": ( + (r_media_out - r_media_in) + 1 + ), + "speed": float(retimed_attributes["speed"]) + } def _should_skip(self, preset_config, clip_path, unique_name): # get activating attributes @@ -313,8 +398,6 @@ class ExtractSubsetResources(openpype.api.Extractor): unique_name, activated_preset, filter_path_regex ) ) - self.log.debug( - "__ clip_path: `{}`".format(clip_path)) # skip if not activated presete if not activated_preset: diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index b59107f155..4d45f67ded 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -3,9 +3,9 @@ import copy from collections import OrderedDict from pprint import pformat import pyblish -from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline +from openpype.pipeline.workfile import get_workdir class IntegrateBatchGroup(pyblish.api.InstancePlugin): @@ -324,7 +324,13 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] anatomy = instance.context.data["anatomy"] + project_settings = instance.context.data["project_settings"] return get_workdir( - project_doc, asset_entity, task_data["name"], "flame", anatomy + project_doc, + asset_entity, + task_data["name"], + "flame", + anatomy, + project_settings=project_settings ) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 54002f9f51..54a6c94b60 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -8,7 +8,7 @@ import contextlib import pyblish.api -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -20,7 +20,7 @@ from openpype.pipeline import ( ) import openpype.hosts.fusion -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 52a157c56e..49ef340679 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -3,9 +3,7 @@ import re import sys import logging -# Pipeline imports from openpype.client import ( - get_project, get_asset_by_name, get_versions, ) @@ -17,13 +15,10 @@ from openpype.pipeline import ( from openpype.lib import version_up from openpype.hosts.fusion import api from openpype.hosts.fusion.api import lib -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") -self = sys.modules[__name__] -self._project = None - def _format_version_folder(folder): """Format a version folder based on the filepath @@ -212,9 +207,6 @@ def switch(asset_name, filepath=None, new=True): asset = get_asset_by_name(project_name, asset_name) assert asset, "Could not find '%s' in the database" % asset_name - # Get current project - self._project = get_project(project_name) - # Go to comp if not filepath: current_comp = api.get_current_comp() diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py index de8fc4b3b4..870e74280a 100644 --- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py +++ b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py @@ -1,14 +1,12 @@ import os import sys -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( install_host, registered_host, ) -log = Logger().get_logger(__name__) - def main(env): from openpype.hosts.fusion import api @@ -17,6 +15,7 @@ def main(env): # activate resolve from pype install_host(api) + log = Logger.get_logger(__name__) log.info(f"Registered host: {registered_host()}") menu.launch_openpype_menu() diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 01d55db647..93f775b24b 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.hosts.fusion import api -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Fusion Switch Shot") diff --git a/openpype/hosts/harmony/__init__.py b/openpype/hosts/harmony/__init__.py index d2f710d83d..9177eaa285 100644 --- a/openpype/hosts/harmony/__init__.py +++ b/openpype/hosts/harmony/__init__.py @@ -1,11 +1,10 @@ -import os +from .addon import ( + HARMONY_HOST_DIR, + HarmonyAddon, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - openharmony_path = os.path.join( - os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "hosts", - "harmony", "vendor", "OpenHarmony" - ) - # TODO check if is already set? What to do if is already set? - env["LIB_OPENHARMONY_PATH"] = openharmony_path +__all__ = ( + "HARMONY_HOST_DIR", + "HarmonyAddon", +) diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py new file mode 100644 index 0000000000..872a7490b5 --- /dev/null +++ b/openpype/hosts/harmony/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HarmonyAddon(OpenPypeModule, IHostAddon): + name = "harmony" + host_name = "harmony" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + openharmony_path = os.path.join( + HARMONY_HOST_DIR, "vendor", "OpenHarmony" + ) + # TODO check if is already set? What to do if is already set? + env["LIB_OPENHARMONY_PATH"] = openharmony_path + + def get_workfile_extensions(self): + return [".zip"] diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 86b5753f7e..4b9849c190 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -4,25 +4,24 @@ import logging import pyblish.api -from openpype import lib -from openpype.client import get_representation_by_id from openpype.lib import register_event_callback from openpype.pipeline import ( - legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) -import openpype.hosts.harmony +from openpype.pipeline.load import get_outdated_containers +from openpype.pipeline.context_tools import get_current_project_asset + +from openpype.hosts.harmony import HARMONY_HOST_DIR import openpype.hosts.harmony.api as harmony log = logging.getLogger("openpype.hosts.harmony") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.harmony.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -50,7 +49,9 @@ def get_asset_settings(): dict: Scene data. """ - asset_data = lib.get_asset()["data"] + + asset_doc = get_current_project_asset() + asset_data = asset_doc["data"] fps = asset_data.get("fps") frame_start = asset_data.get("frameStart") frame_end = asset_data.get("frameEnd") @@ -105,16 +106,7 @@ def check_inventory(): in Harmony. """ - project_name = legacy_io.active_project() - outdated_containers = [] - for container in ls(): - representation_id = container['representation'] - representation_doc = get_representation_by_id( - project_name, representation_id, fields=["parent"] - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - + outdated_containers = get_outdated_containers() if not outdated_containers: return diff --git a/openpype/hosts/harmony/api/workio.py b/openpype/hosts/harmony/api/workio.py index ab1cb9b1a9..8df5ede917 100644 --- a/openpype/hosts/harmony/api/workio.py +++ b/openpype/hosts/harmony/api/workio.py @@ -2,8 +2,6 @@ import os import shutil -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - from .lib import ( ProcessContext, get_local_harmony_path, @@ -16,7 +14,7 @@ save_disabled = False def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["harmony"] + return [".zip"] def has_unsaved_changes(): diff --git a/openpype/hosts/harmony/plugins/load/load_background.py b/openpype/hosts/harmony/plugins/load/load_background.py index 9c01fe3cd8..c28a87791e 100644 --- a/openpype/hosts/harmony/plugins/load/load_background.py +++ b/openpype/hosts/harmony/plugins/load/load_background.py @@ -5,8 +5,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib copy_files = """function copyFile(srcFilename, dstFilename) @@ -280,9 +280,7 @@ class BackgroundLoader(load.LoaderPlugin): ) def update(self, container, representation): - path = get_representation_path(representation) - with open(path) as json_file: data = json.load(json_file) @@ -300,10 +298,9 @@ class BackgroundLoader(load.LoaderPlugin): bg_folder = os.path.dirname(path) - path = get_representation_path(representation) - print(container) + is_latest = is_representation_from_latest(representation) for layer in sorted(layers): file_to_import = [ os.path.join(bg_folder, layer).replace("\\", "/") @@ -347,7 +344,7 @@ class BackgroundLoader(load.LoaderPlugin): } %s """ % (sig, sig) - if openpype.lib.is_latest(representation): + if is_latest: harmony.send({"function": func, "args": [node, "green"]}) else: harmony.send({"function": func, "args": [node, "red"]}) diff --git a/openpype/hosts/harmony/plugins/load/load_imagesequence.py b/openpype/hosts/harmony/plugins/load/load_imagesequence.py index 18695438d5..1b64aff595 100644 --- a/openpype/hosts/harmony/plugins/load/load_imagesequence.py +++ b/openpype/hosts/harmony/plugins/load/load_imagesequence.py @@ -10,8 +10,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib class ImageSequenceLoader(load.LoaderPlugin): @@ -109,7 +109,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) # Colour node. - if openpype.lib.is_latest(representation): + if is_representation_from_latest(representation): harmony.send( { "function": "PypeHarmony.setColor", diff --git a/openpype/hosts/harmony/plugins/load/load_template.py b/openpype/hosts/harmony/plugins/load/load_template.py index c6dc9d913b..f3c69a9104 100644 --- a/openpype/hosts/harmony/plugins/load/load_template.py +++ b/openpype/hosts/harmony/plugins/load/load_template.py @@ -10,8 +10,8 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.context_tools import is_representation_from_latest import openpype.hosts.harmony.api as harmony -import openpype.lib class TemplateLoader(load.LoaderPlugin): @@ -83,7 +83,7 @@ class TemplateLoader(load.LoaderPlugin): self_name = self.__class__.__name__ update_and_replace = False - if openpype.lib.is_latest(representation): + if is_representation_from_latest(representation): self._set_green(node) else: self._set_red(node) diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py index c0493315a4..3624147435 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- """Collect current workfile from Harmony.""" -import pyblish.api import os +import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -17,13 +17,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): """Plugin entry point.""" family = "workfile" basename = os.path.basename(context.data["currentFile"]) - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, "", context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance diff --git a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py index 4c3a6c4465..936533abd6 100644 --- a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py @@ -55,6 +55,10 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + + # TODO 'get_asset_settings' could expect asset document as argument + # which is available on 'context.data["assetEntity"]' + # - the same approach can be used in 'ValidateSceneSettingsRepair' expected_settings = harmony.get_asset_settings() self.log.info("scene settings from DB:".format(expected_settings)) diff --git a/openpype/hosts/hiero/__init__.py b/openpype/hosts/hiero/__init__.py index d2ac82391b..e6744d5aec 100644 --- a/openpype/hosts/hiero/__init__.py +++ b/openpype/hosts/hiero/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .addon import ( + HIERO_ROOT_DIR, + HieroAddon, +) -def add_implementation_envs(env, _app): - # Add requirements to HIERO_PLUGIN_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_hiero_paths = [ - os.path.join(pype_root, "openpype", "hosts", "hiero", "api", "startup") - ] - old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" - for path in old_hiero_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_hiero_paths: - new_hiero_paths.append(norm_path) - - env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "HIERO_ROOT_DIR", + "HieroAddon", +) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py new file mode 100644 index 0000000000..3523e9aed7 --- /dev/null +++ b/openpype/hosts/hiero/addon.py @@ -0,0 +1,63 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HieroAddon(OpenPypeModule, IHostAddon): + name = "hiero" + host_name = "hiero" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to HIERO_PLUGIN_PATH + new_hiero_paths = [ + os.path.join(HIERO_ROOT_DIR, "api", "startup") + ] + old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" + for path in old_hiero_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_hiero_paths: + new_hiero_paths.append(norm_path) + + env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(HIERO_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".hrox"] diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 59fd278a81..862a2607c1 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -1,7 +1,6 @@ import os import hiero.core.events -from openpype.api import Logger -from openpype.lib import register_event_callback +from openpype.lib import Logger, register_event_callback from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, @@ -11,7 +10,7 @@ from .lib import ( from .tags import add_tags_to_workfile from .menu import update_menu_task_label -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def startupCompleted(event): diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 2f66f3ddd7..e288cea2b1 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -21,7 +21,7 @@ from openpype.client import ( ) from openpype.settings import get_anatomy_settings from openpype.pipeline import legacy_io, Anatomy -from openpype.api import Logger +from openpype.lib import Logger from . import tags try: @@ -34,7 +34,7 @@ except ImportError: # from opentimelineio import opentime # from pprint import pformat -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self._has_been_setup = False diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index b243a38b06..dacfd338bb 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -6,7 +6,7 @@ import contextlib from collections import OrderedDict from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( schema, register_creator_plugin_path, @@ -18,7 +18,7 @@ from openpype.pipeline import ( from openpype.tools.utils import host_tools from . import lib, menu, events -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) # plugin paths API_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index add416d04e..77fedbbbdc 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -9,10 +9,12 @@ from Qt import QtWidgets, QtCore import qargparse import openpype.api as openpype +from openpype.lib import Logger from openpype.pipeline import LoaderPlugin, LegacyCreator +from openpype.pipeline.context_tools import get_current_project_asset from . import lib -log = openpype.Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def load_stylesheet(): @@ -484,7 +486,7 @@ class ClipLoader: """ asset_name = self.context["representation"]["context"]["asset"] - asset_doc = openpype.get_asset(asset_name) + asset_doc = get_current_project_asset(asset_name) log.debug("__ asset_doc: {}".format(pformat(asset_doc))) self.data["assetData"] = asset_doc["data"] diff --git a/openpype/hosts/hiero/api/workio.py b/openpype/hosts/hiero/api/workio.py index 394cb5e2ab..762e22804f 100644 --- a/openpype/hosts/hiero/api/workio.py +++ b/openpype/hosts/hiero/api/workio.py @@ -2,13 +2,12 @@ import os import hiero from openpype.api import Logger -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS log = Logger.get_logger(__name__) def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["hiero"] + return [".hrox"] def has_unsaved_changes(): diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 2d0ec6fc99..0c7dbc1f22 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -109,7 +109,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "clipAnnotations": annotations, # add all additional tags - "tags": phiero.get_track_item_tags(track_item) + "tags": phiero.get_track_item_tags(track_item), + "newAssetPublishing": True }) # otio clip data diff --git a/openpype/hosts/hiero/vendor/google/protobuf/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/hiero/vendor/google/protobuf/json_format.py b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message.py b/openpype/hosts/hiero/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service.py b/openpype/hosts/hiero/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_format.py b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index dd8a5ba473..c8a7f92bb9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -5,8 +5,8 @@ from contextlib import contextmanager import six from openpype.client import get_asset_by_name -from openpype.api import get_asset from openpype.pipeline import legacy_io +from openpype.pipeline.context_tools import get_current_project_asset import hou @@ -16,7 +16,7 @@ log = logging.getLogger(__name__) def get_asset_fps(): """Return current asset fps.""" - return get_asset()["data"].get("fps") + return get_current_project_asset()["data"].get("fps") def set_id(node, unique_id, overwrite=False): diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 7048accceb..b5f5459392 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -12,13 +12,13 @@ from openpype.pipeline import ( register_loader_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import any_outdated_containers import openpype.hosts.houdini from openpype.hosts.houdini.api import lib from openpype.lib import ( register_event_callback, emit_event, - any_outdated, ) from .lib import get_asset_fps @@ -245,7 +245,7 @@ def on_open(): # ensure it is using correct FPS for the asset lib.validate_fps() - if any_outdated(): + if any_outdated_containers(): from openpype.widgets import popup log.warning("Scene has outdated content.") diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index c1c82c62e5..860db766f3 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,27 +1,6 @@ -import os +from .addon import MayaAddon -def add_implementation_envs(env, _app): - # Add requirements to PYTHONPATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_python_paths = [ - os.path.join(pype_root, "openpype", "hosts", "maya", "startup") - ] - old_python_path = env.get("PYTHONPATH") or "" - for path in old_python_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_python_paths: - new_python_paths.append(norm_path) - - env["PYTHONPATH"] = os.pathsep.join(new_python_paths) - - # Set default values if are not already set via settings - defaults = { - "OPENPYPE_LOG_NO_COLORS": "Yes" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "MayaAddon", +) diff --git a/openpype/hosts/maya/addon.py b/openpype/hosts/maya/addon.py new file mode 100644 index 0000000000..7b1f7bf754 --- /dev/null +++ b/openpype/hosts/maya/addon.py @@ -0,0 +1,47 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class MayaAddon(OpenPypeModule, IHostAddon): + name = "maya" + host_name = "maya" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to PYTHONPATH + new_python_paths = [ + os.path.join(MAYA_ROOT_DIR, "startup") + ] + old_python_path = env.get("PYTHONPATH") or "" + for path in old_python_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_python_paths: + new_python_paths.append(norm_path) + + env["PYTHONPATH"] = os.pathsep.join(new_python_paths) + + # Set default values if are not already set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "Yes" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(MAYA_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".ma", ".mb"] diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index e4221978c0..58e160cb2f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -23,7 +23,6 @@ from openpype.client import ( get_last_versions, get_representation_by_name ) -from openpype import lib from openpype.api import get_anatomy_settings from openpype.pipeline import ( legacy_io, @@ -33,6 +32,7 @@ from openpype.pipeline import ( load_container, registered_host, ) +from openpype.pipeline.context_tools import get_current_project_asset from .commands import reset_frame_range @@ -2174,7 +2174,7 @@ def reset_scene_resolution(): project_name = legacy_io.active_project() project_doc = get_project(project_name) project_data = project_doc["data"] - asset_data = lib.get_asset()["data"] + asset_data = get_current_project_asset()["data"] # Set project resolution width_key = "resolutionWidth" @@ -2208,7 +2208,8 @@ def set_context_settings(): project_name = legacy_io.active_project() project_doc = get_project(project_name) project_data = project_doc["data"] - asset_data = lib.get_asset()["data"] + asset_doc = get_current_project_asset(fields=["data.fps"]) + asset_data = asset_doc.get("data", {}) # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) @@ -2233,7 +2234,7 @@ def validate_fps(): """ - fps = lib.get_asset()["data"]["fps"] + fps = get_current_project_asset(fields=["data.fps"])["data"]["fps"] # TODO(antirotor): This is hack as for framerates having multiple # decimal places. FTrack is ceiling decimal values on # fps to two decimal places but Maya 2019+ is reporting those fps @@ -3051,8 +3052,9 @@ def update_content_on_context_change(): This will update scene content to match new asset on context change """ scene_sets = cmds.listSets(allSets=True) - new_asset = legacy_io.Session["AVALON_ASSET"] - new_data = lib.get_asset()["data"] + asset_doc = get_current_project_asset() + new_asset = asset_doc["name"] + new_data = asset_doc["data"] for s in scene_sets: try: if cmds.getAttr("{}.id".format(s)) == "pyblish.avalon.instance": diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 123b934428..1e883ea43f 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -82,6 +82,14 @@ IMAGE_PREFIXES = { RENDERMAN_IMAGE_DIR = "maya//" + +def has_tokens(string, tokens): + """Return whether any of tokens is in input string (case-insensitive)""" + pattern = "({})".format("|".join(re.escape(token) for token in tokens)) + match = re.search(pattern, string, re.IGNORECASE) + return bool(match) + + @attr.s class LayerMetadata(object): """Data class for Render Layer metadata.""" @@ -99,6 +107,12 @@ class LayerMetadata(object): # Render Products products = attr.ib(init=False, default=attr.Factory(list)) + # The AOV separator token. Note that not all renderers define an explicit + # render separator but allow to put the AOV/RenderPass token anywhere in + # the file path prefix. For those renderers we'll fall back to whatever + # is between the last occurrences of and tokens. + aov_separator = attr.ib(default="_") + @attr.s class RenderProduct(object): @@ -183,7 +197,6 @@ class ARenderProducts: self.layer = layer self.render_instance = render_instance self.multipart = False - self.aov_separator = render_instance.data.get("aovSeparator", "_") # Initialize self.layer_data = self._get_layer_data() @@ -296,6 +309,42 @@ class ARenderProducts: return lib.get_attr_in_layer(plug, layer=self.layer) + @staticmethod + def extract_separator(file_prefix): + """Extract AOV separator character from the prefix. + + Default behavior extracts the part between + last occurrences of and + + Todo: + This code also triggers for V-Ray which overrides it explicitly + so this code will invalidly debug log it couldn't extract the + AOV separator even though it does set it in RenderProductsVray. + + Args: + file_prefix (str): File prefix with tokens. + + Returns: + str or None: prefix character if it can be extracted. + """ + layer_tokens = ["", ""] + aov_tokens = ["", ""] + + def match_last(tokens, text): + """regex match the last occurence from a list of tokens""" + pattern = "(?:.*)({})".format("|".join(tokens)) + return re.search(pattern, text, re.IGNORECASE) + + layer_match = match_last(layer_tokens, file_prefix) + aov_match = match_last(aov_tokens, file_prefix) + separator = None + if layer_match and aov_match: + matches = sorted((layer_match, aov_match), + key=lambda match: match.end(1)) + separator = file_prefix[matches[0].end(1):matches[1].start(1)] + return separator + + def _get_layer_data(self): # type: () -> LayerMetadata # ______________________________________________ @@ -304,7 +353,7 @@ class ARenderProducts: # ____________________/ _, scene_basename = os.path.split(cmds.file(q=True, loc=True)) scene_name, _ = os.path.splitext(scene_basename) - + kwargs = {} file_prefix = self.get_renderer_prefix() # If the Render Layer belongs to a Render Setup layer then the @@ -319,6 +368,13 @@ class ARenderProducts: # defaultRenderLayer renders as masterLayer layer_name = "masterLayer" + separator = self.extract_separator(file_prefix) + if separator: + kwargs["aov_separator"] = separator + else: + log.debug("Couldn't extract aov separator from " + "file prefix: {}".format(file_prefix)) + # todo: Support Custom Frames sequences 0,5-10,100-120 # Deadline allows submitting renders with a custom frame list # to support those cases we might want to allow 'custom frames' @@ -335,7 +391,8 @@ class ARenderProducts: layerName=layer_name, renderer=self.renderer, defaultExt=self._get_attr("defaultRenderGlobals.imfPluginKey"), - filePrefix=file_prefix + filePrefix=file_prefix, + **kwargs ) def _generate_file_sequence( @@ -680,9 +737,17 @@ class RenderProductsVray(ARenderProducts): """ prefix = super(RenderProductsVray, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.aov_separator) + aov_separator = self._get_aov_separator() + prefix = "{}{}".format(prefix, aov_separator) return prefix + def _get_aov_separator(self): + # type: () -> str + """Return the V-Ray AOV/Render Elements separator""" + return self._get_attr( + "vraySettings.fileNameRenderElementSeparator" + ) + def _get_layer_data(self): # type: () -> LayerMetadata """Override to get vray specific extension.""" @@ -694,6 +759,8 @@ class RenderProductsVray(ARenderProducts): layer_data.defaultExt = default_ext layer_data.padding = self._get_attr("vraySettings.fileNamePadding") + layer_data.aov_separator = self._get_aov_separator() + return layer_data def get_render_products(self): @@ -913,8 +980,9 @@ class RenderProductsRedshift(ARenderProducts): :func:`ARenderProducts.get_renderer_prefix()` """ - prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.aov_separator) + file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + separator = self.extract_separator(file_prefix) + prefix = "{}{}".format(file_prefix, separator or "_") return prefix def get_render_products(self): diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py new file mode 100644 index 0000000000..7cd2193086 --- /dev/null +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +"""Class for handling Render Settings.""" +from maya import cmds # noqa +import maya.mel as mel +import six +import sys + +from openpype.api import ( + get_project_settings, + get_current_project_settings +) + +from openpype.pipeline import legacy_io +from openpype.pipeline import CreatorError +from openpype.pipeline.context_tools import get_current_project_asset +from openpype.hosts.maya.api.commands import reset_frame_range + + +class RenderSettings(object): + + _image_prefix_nodes = { + 'vray': 'vraySettings.fileNamePrefix', + 'arnold': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'defaultRenderGlobals.imageFilePrefix', + 'redshift': 'defaultRenderGlobals.imageFilePrefix' + } + + _image_prefixes = { + 'vray': get_current_project_settings()["maya"]["RenderSettings"]["vray_renderer"]["image_prefix"], # noqa + 'arnold': get_current_project_settings()["maya"]["RenderSettings"]["arnold_renderer"]["image_prefix"], # noqa + 'renderman': 'maya///{aov_separator}', + 'redshift': get_current_project_settings()["maya"]["RenderSettings"]["redshift_renderer"]["image_prefix"] # noqa + } + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + @classmethod + def get_image_prefix_attr(cls, renderer): + return cls._image_prefix_nodes[renderer] + + def __init__(self, project_settings=None): + self._project_settings = project_settings + if not self._project_settings: + self._project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) + + def set_default_renderer_settings(self, renderer=None): + """Set basic settings based on renderer.""" + if not renderer: + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() + + asset_doc = get_current_project_asset() + # project_settings/maya/create/CreateRender/aov_separator + try: + aov_separator = self._aov_chars[( + self._project_settings["maya"] + ["RenderSettings"] + ["aov_separator"] + )] + except KeyError: + aov_separator = "_" + reset_frame = self._project_settings["maya"]["RenderSettings"]["reset_current_frame"] # noqa + + if reset_frame: + start_frame = cmds.getAttr("defaultRenderGlobals.startFrame") + cmds.currentTime(start_frame, edit=True) + + if renderer in self._image_prefix_nodes: + prefix = self._image_prefixes[renderer] + prefix = prefix.replace("{aov_separator}", aov_separator) + cmds.setAttr(self._image_prefix_nodes[renderer], + prefix, type="string") # noqa + else: + print("{0} isn't a supported renderer to autoset settings.".format(renderer)) # noqa + + # TODO: handle not having res values in the doc + width = asset_doc["data"].get("resolutionWidth") + height = asset_doc["data"].get("resolutionHeight") + + if renderer == "arnold": + # set renderer settings for Arnold from project settings + self._set_arnold_settings(width, height) + + if renderer == "vray": + self._set_vray_settings(aov_separator, width, height) + + if renderer == "redshift": + self._set_redshift_settings(width, height) + + def _set_arnold_settings(self, width, height): + """Sets settings for Arnold.""" + from mtoa.core import createOptions # noqa + from mtoa.aovs import AOVInterface # noqa + createOptions() + arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa + # Force resetting settings and AOV list to avoid having to deal with + # AOV checking logic, for now. + # This is a work around because the standard + # function to revert render settings does not reset AOVs list in MtoA + # Fetch current aovs in case there's any. + current_aovs = AOVInterface().getAOVs() + # Remove fetched AOVs + AOVInterface().removeAOVs(current_aovs) + mel.eval("unifiedRenderGlobalsRevertToDefault") + img_ext = arnold_render_presets["image_format"] + img_prefix = arnold_render_presets["image_prefix"] + aovs = arnold_render_presets["aov_list"] + img_tiled = arnold_render_presets["tiled"] + multi_exr = arnold_render_presets["multilayer_exr"] + additional_options = arnold_render_presets["additional_options"] + for aov in aovs: + AOVInterface('defaultArnoldRenderOptions').addAOV(aov) + + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + self._set_global_output_settings() + + cmds.setAttr( + "defaultRenderGlobals.imageFilePrefix", img_prefix, type="string") + + cmds.setAttr( + "defaultArnoldDriver.ai_translator", img_ext, type="string") + + cmds.setAttr( + "defaultArnoldDriver.exrTiled", img_tiled) + + cmds.setAttr( + "defaultArnoldDriver.mergeAOVs", multi_exr) + # Passes additional options in from the schema as a list + # but converts it to a dictionary because ftrack doesn't + # allow fullstops in custom attributes. Then checks for + # type of MtoA attribute passed to adjust the `setAttr` + # command accordingly. + self._additional_attribs_setter(additional_options) + for item in additional_options: + attribute, value = item + if (cmds.getAttr(str(attribute), type=True)) == "long": + cmds.setAttr(str(attribute), int(value)) + elif (cmds.getAttr(str(attribute), type=True)) == "bool": + cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa + elif (cmds.getAttr(str(attribute), type=True)) == "string": + cmds.setAttr(str(attribute), str(value), type = "string") # noqa + reset_frame_range() + + def _set_redshift_settings(self, width, height): + """Sets settings for Redshift.""" + redshift_render_presets = ( + self._project_settings + ["maya"] + ["RenderSettings"] + ["redshift_renderer"] + ) + additional_options = redshift_render_presets["additional_options"] + ext = redshift_render_presets["image_format"] + img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] + img_ext = img_exts.index(ext) + + self._set_global_output_settings() + cmds.setAttr("redshiftOptions.imageFormat", img_ext) + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + self._additional_attribs_setter(additional_options) + + def _set_vray_settings(self, aov_separator, width, height): + # type: (str, int, int) -> None + """Sets important settings for Vray.""" + settings = cmds.ls(type="VRaySettingsNode") + node = settings[0] if settings else cmds.createNode("VRaySettingsNode") + vray_render_presets = ( + self._project_settings + ["maya"] + ["RenderSettings"] + ["vray_renderer"] + ) + # Set aov separator + # First we need to explicitly set the UI items in Render Settings + # because that is also what V-Ray updates to when that Render Settings + # UI did initialize before and refreshes again. + MENU = "vrayRenderElementSeparator" + if cmds.optionMenuGrp(MENU, query=True, exists=True): + items = cmds.optionMenuGrp(MENU, query=True, ill=True) + separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 + try: + sep_idx = separators.index(aov_separator) + except ValueError as e: + six.reraise( + CreatorError, + CreatorError( + "AOV character {} not in {}".format( + aov_separator, separators)), + sys.exc_info()[2]) + + cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) + + # Set the render element attribute as string. This is also what V-Ray + # sets whenever the `vrayRenderElementSeparator` menu items switch + cmds.setAttr( + "{}.fileNameRenderElementSeparator".format(node), + aov_separator, + type="string" + ) + + # Set render file format to exr + cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string") + + # animType + cmds.setAttr("{}.animType".format(node), 1) + + # resolution + cmds.setAttr("{}.width".format(node), width) + cmds.setAttr("{}.height".format(node), height) + + additional_options = vray_render_presets["additional_options"] + + self._additional_attribs_setter(additional_options) + + @staticmethod + def _set_global_output_settings(): + # enable animation + cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) + cmds.setAttr("defaultRenderGlobals.animation", 1) + cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) + cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) + + def _additional_attribs_setter(self, additional_attribs): + print(additional_attribs) + for item in additional_attribs: + attribute, value = item + if (cmds.getAttr(str(attribute), type=True)) == "long": + cmds.setAttr(str(attribute), int(value)) + elif (cmds.getAttr(str(attribute), type=True)) == "bool": + cmds.setAttr(str(attribute), int(value)) # noqa + elif (cmds.getAttr(str(attribute), type=True)) == "string": + cmds.setAttr(str(attribute), str(value), type = "string") # noqa diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py new file mode 100644 index 0000000000..34a8450a26 --- /dev/null +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -0,0 +1,253 @@ +import json +from collections import OrderedDict +import maya.cmds as cmds + +import qargparse +from openpype.tools.utils.widgets import OptionDialog +from .lib import get_main_window, imprint + +# To change as enum +build_types = ["context_asset", "linked_asset", "all_assets"] + + +def get_placeholder_attributes(node): + return { + attr: cmds.getAttr("{}.{}".format(node, attr)) + for attr in cmds.listAttr(node, userDefined=True)} + + +def delete_placeholder_attributes(node): + ''' + function to delete all extra placeholder attributes + ''' + extra_attributes = get_placeholder_attributes(node) + for attribute in extra_attributes: + cmds.deleteAttr(node + '.' + attribute) + + +def create_placeholder(): + args = placeholder_window() + + if not args: + return # operation canceled, no locator created + + # custom arg parse to force empty data query + # and still imprint them on placeholder + # and getting items when arg is of type Enumerator + options = create_options(args) + + # create placeholder name dynamically from args and options + placeholder_name = create_placeholder_name(args, options) + + selection = cmds.ls(selection=True) + if not selection: + raise ValueError("Nothing is selected") + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + + # get the long name of the placeholder (with the groups) + placeholder_full_name = cmds.ls(selection[0], long=True)[ + 0] + '|' + placeholder.replace('|', '') + + if selection: + cmds.parent(placeholder, selection[0]) + + imprint(placeholder_full_name, options) + + # Some tweaks because imprint force enums to to default value so we get + # back arg read and force them to attributes + imprint_enum(placeholder_full_name, args) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder_full_name, + longName="parent", + hidden=True, + dataType="string" + ) + cmds.addAttr( + placeholder_full_name, + longName="index", + hidden=True, + attributeType="short", + defaultValue=-1 + ) + + cmds.setAttr(placeholder_full_name + '.parent', "", type="string") + + +def create_placeholder_name(args, options): + placeholder_builder_type = [ + arg.read() for arg in args if 'builder_type' in str(arg) + ][0] + placeholder_family = options['family'] + placeholder_name = placeholder_builder_type.split('_') + + # add famlily in any + if placeholder_family: + placeholder_name.insert(1, placeholder_family) + + # add loader arguments if any + if options['loader_args']: + pos = 2 + loader_args = options['loader_args'].replace('\'', '\"') + loader_args = json.loads(loader_args) + values = [v for v in loader_args.values()] + for i in range(len(values)): + placeholder_name.insert(i + pos, values[i]) + + placeholder_name = '_'.join(placeholder_name) + + return placeholder_name.capitalize() + + +def update_placeholder(): + placeholder = cmds.ls(selection=True) + if len(placeholder) == 0: + raise ValueError("No node selected") + if len(placeholder) > 1: + raise ValueError("Too many selected nodes") + placeholder = placeholder[0] + + args = placeholder_window(get_placeholder_attributes(placeholder)) + + if not args: + return # operation canceled + + # delete placeholder attributes + delete_placeholder_attributes(placeholder) + + options = create_options(args) + + imprint(placeholder, options) + imprint_enum(placeholder, args) + + cmds.addAttr( + placeholder, + longName="parent", + hidden=True, + dataType="string" + ) + cmds.addAttr( + placeholder, + longName="index", + hidden=True, + attributeType="short", + defaultValue=-1 + ) + + cmds.setAttr(placeholder + '.parent', '', type="string") + + +def create_options(args): + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + return options + + +def imprint_enum(placeholder, args): + """ + Imprint method doesn't act properly with enums. + Replacing the functionnality with this for now + """ + enum_values = {str(arg): arg.read() + for arg in args if arg._data.get("items")} + string_to_value_enum_table = { + build: i for i, build + in enumerate(build_types)} + for key, value in enum_values.items(): + cmds.setAttr( + placeholder + "." + key, + string_to_value_enum_table[value]) + + +def placeholder_window(options=None): + options = options or dict() + dialog = OptionDialog(parent=get_main_window()) + dialog.setWindowTitle("Create Placeholder") + + args = [ + qargparse.Separator("Main attributes"), + qargparse.Enum( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type", 0), + items=build_types, + help="""Asset Builder Type +Builder type describe what template loader will look for. +context_asset : Template loader will look for subsets of +current context asset (Asset bob will find asset) +linked_asset : Template loader will look for assets linked +to current context asset. +Linked asset are looked in avalon database under field "inputLinks" +""" + ), + qargparse.String( + "family", + default=options.get("family", ""), + label="OpenPype Family", + placeholder="ex: model, look ..."), + qargparse.String( + "representation", + default=options.get("representation", ""), + label="OpenPype Representation", + placeholder="ex: ma, abc ..."), + qargparse.String( + "loader", + default=options.get("loader", ""), + label="Loader", + placeholder="ex: ReferenceLoader, LightLoader ...", + help="""Loader +Defines what openpype loader will be used to load assets. +Useable loader depends on current host's loader list. +Field is case sensitive. +"""), + qargparse.String( + "loader_args", + default=options.get("loader_args", ""), + label="Loader Arguments", + placeholder='ex: {"camera":"persp", "lights":True}', + help="""Loader +Defines a dictionnary of arguments used to load assets. +Useable arguments depend on current placeholder Loader. +Field should be a valid python dict. Anything else will be ignored. +"""), + qargparse.Integer( + "order", + default=options.get("order", 0), + min=0, + max=999, + label="Order", + placeholder="ex: 0, 100 ... (smallest order loaded first)", + help="""Order +Order defines asset loading priority (0 to 999) +Priority rule is : "lowest is first to load"."""), + qargparse.Separator( + "Optional attributes"), + qargparse.String( + "asset", + default=options.get("asset", ""), + label="Asset filter", + placeholder="regex filtering by asset name", + help="Filtering assets by matching field regex to asset's name"), + qargparse.String( + "subset", + default=options.get("subset", ""), + label="Subset filter", + placeholder="regex filtering by subset name", + help="Filtering assets by matching field regex to subset's name"), + qargparse.String( + "hierarchy", + default=options.get("hierarchy", ""), + label="Hierarchy filter", + placeholder="regex filtering by asset's hierarchy", + help="Filtering assets by matching field asset's hierarchy") + ] + dialog.create(args) + + if not dialog.exec_(): + return None + + return args diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 97f06c43af..ebba706a6c 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,13 +6,18 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.pipeline import legacy_io +from openpype.pipeline.workfile import BuildWorkfile +from openpype.pipeline.workfile.build_template import ( + build_workfile_template, + update_workfile_template +) from openpype.tools.utils import host_tools -from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range +from .lib_template_builder import create_placeholder, update_placeholder log = logging.getLogger(__name__) @@ -44,6 +49,7 @@ def install(): parent="MayaWindow" ) + renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() # Create context menu context_label = "{}, {}".format( legacy_io.Session["AVALON_ASSET"], @@ -98,6 +104,13 @@ def install(): cmds.menuItem(divider=True) + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa + ) + + cmds.menuItem(divider=True) + cmds.menuItem( "Work Files...", command=lambda *args: host_tools.show_workfiles( @@ -139,6 +152,34 @@ def install(): parent_widget ) ) + + builder_menu = cmds.menuItem( + "Template Builder", + subMenu=True, + tearOff=True, + parent=MENU_NAME + ) + cmds.menuItem( + "Create Placeholder", + parent=builder_menu, + command=lambda *args: create_placeholder() + ) + cmds.menuItem( + "Update Placeholder", + parent=builder_menu, + command=lambda *args: update_placeholder() + ) + cmds.menuItem( + "Build Workfile from template", + parent=builder_menu, + command=build_workfile_template + ) + cmds.menuItem( + "Update Workfile from template", + parent=builder_menu, + command=update_workfile_template + ) + cmds.setParent(MENU_NAME, menu=True) def add_scripts_menu(): diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index d08e8d1926..f565f6a308 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -13,7 +13,6 @@ from openpype.host import HostBase, IWorkfileHost, ILoadHost import openpype.hosts.maya from openpype.tools.utils import host_tools from openpype.lib import ( - any_outdated, register_event_callback, emit_event ) @@ -28,6 +27,7 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import any_outdated_containers from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib from .workio import ( @@ -470,7 +470,7 @@ def on_open(): lib.validate_fps() lib.fix_incompatible_containers() - if any_outdated(): + if any_outdated_containers(): log.warning("Scene has outdated content.") # Find maya main window diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 9280805945..39d821f620 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -4,6 +4,7 @@ from maya import cmds import qargparse +from openpype.lib import Logger from openpype.pipeline import ( LegacyCreator, LoaderPlugin, @@ -50,9 +51,7 @@ def get_reference_node(members, log=None): # Warn the user when we're taking the highest reference node if len(references) > 1: if not log: - from openpype.lib import PypeLogger - - log = PypeLogger().get_logger(__name__) + log = Logger.get_logger(__name__) log.warning("More than one reference node found in " "container, using highest reference node: " @@ -208,7 +207,8 @@ class ReferenceLoader(Loader): file_type = { "ma": "mayaAscii", "mb": "mayaBinary", - "abc": "Alembic" + "abc": "Alembic", + "fbx": "FBX" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation @@ -234,7 +234,7 @@ class ReferenceLoader(Loader): path = self.prepare_root_value(path, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(path, loadReference=reference_node, type=file_type, diff --git a/openpype/hosts/maya/api/shader_definition_editor.py b/openpype/hosts/maya/api/shader_definition_editor.py index 911db48ac2..6ea5e1a127 100644 --- a/openpype/hosts/maya/api/shader_definition_editor.py +++ b/openpype/hosts/maya/api/shader_definition_editor.py @@ -6,7 +6,7 @@ Shader names are stored as simple text file over GridFS in mongodb. """ import os from Qt import QtWidgets, QtCore, QtGui -from openpype.lib.mongo import OpenPypeMongoConnection +from openpype.client.mongo import OpenPypeMongoConnection from openpype import resources import gridfs diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py new file mode 100644 index 0000000000..ecffafc93d --- /dev/null +++ b/openpype/hosts/maya/api/template_loader.py @@ -0,0 +1,252 @@ +import re +from maya import cmds + +from openpype.client import get_representations +from openpype.pipeline import legacy_io +from openpype.pipeline.workfile.abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader +) +from openpype.pipeline.workfile.build_template_exceptions import ( + TemplateAlreadyImported +) + +PLACEHOLDER_SET = 'PLACEHOLDERS_SET' + + +class MayaTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for maya + """ + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + Returns: + bool: Wether the template was succesfully imported or not + """ + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)") + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + self.new_nodes = cmds.file(path, i=True, returnNewNodes=True) + cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) + + for set in cmds.listSets(allSets=True): + if (cmds.objExists(set) and + cmds.attributeQuery('id', node=set, exists=True) and + cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'): + if cmds.attributeQuery('asset', node=set, exists=True): + cmds.setAttr( + set + '.asset', + legacy_io.Session['AVALON_ASSET'], type='string' + ) + + return True + + def template_already_imported(self, err_msg): + clearButton = "Clear scene and build" + updateButton = "Update template" + abortButton = "Abort" + + title = "Scene already builded" + message = ( + "It's seems a template was already build for this scene.\n" + "Error message reveived :\n\n\"{}\"".format(err_msg)) + buttons = [clearButton, updateButton, abortButton] + defaultButton = clearButton + cancelButton = abortButton + dismissString = abortButton + answer = cmds.confirmDialog( + t=title, + m=message, + b=buttons, + db=defaultButton, + cb=cancelButton, + ds=dismissString) + + if answer == clearButton: + cmds.file(newFile=True, force=True) + self.import_template(self.template_path) + self.populate_template() + elif answer == updateButton: + self.update_missing_containers() + elif answer == abortButton: + return + + @staticmethod + def get_template_nodes(): + attributes = cmds.ls('*.builder_type', long=True) + return [attribute.rpartition('.')[0] for attribute in attributes] + + def get_loaded_containers_by_id(self): + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + return None + + return [ + cmds.getAttr(container + '.representation') + for container in containers] + + +class MayaPlaceholder(AbstractPlaceholder): + """Concrete implementation of AbstractPlaceholder for maya + """ + + optional_keys = {'asset', 'subset', 'hierarchy'} + + def get_data(self, node): + user_data = dict() + for attr in self.required_keys.union(self.optional_keys): + attribute_name = '{}.{}'.format(node, attr) + if not cmds.attributeQuery(attr, node=node, exists=True): + print("{} not found".format(attribute_name)) + continue + user_data[attr] = cmds.getAttr( + attribute_name, + asString=True) + user_data['parent'] = ( + cmds.getAttr(node + '.parent', asString=True) + or node.rpartition('|')[0] + or "" + ) + user_data['node'] = node + if user_data['parent']: + siblings = cmds.listRelatives(user_data['parent'], children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = user_data['node'].rpartition('|')[2] + current_index = cmds.getAttr(node + '.index', asString=True) + user_data['index'] = ( + current_index if current_index >= 0 + else siblings.index(node_shortname)) + + self.data = user_data + + def parent_in_hierarchy(self, containers): + """Parent loaded container to placeholder's parent + ie : Set loaded content as placeholder's sibling + Args: + containers (String): Placeholder loaded containers + """ + if not containers: + return + + roots = cmds.sets(containers, q=True) + nodes_to_parent = [] + for root in roots: + if root.endswith("_RN"): + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root in cmds.listSets(allSets=True): + if not cmds.sets(root, q=True): + return + else: + continue + else: + nodes_to_parent.append(root) + + if self.data['parent']: + cmds.parent(nodes_to_parent, self.data['parent']) + # Move loaded nodes to correct index in outliner hierarchy + placeholder_node = self.data['node'] + placeholder_form = cmds.xform( + placeholder_node, + q=True, + matrix=True, + worldSpace=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=self.data['index']) + cmds.xform(node, matrix=placeholder_form, ws=True) + + holding_sets = cmds.listSets(object=placeholder_node) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + def clean(self): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + node = self.data['node'] + if self.data['parent']: + cmds.setAttr(node + '.parent', self.data['parent'], type='string') + if cmds.getAttr(node + '.index') < 0: + cmds.setAttr(node + '.index', self.data['index']) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + '.hiddenInOutliner', True) + + def get_representations(self, current_asset_doc, linked_asset_docs): + project_name = legacy_io.active_project() + + builder_type = self.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representations": [self.data["representation"]], + "family": [self.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(self.data["asset"])], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]] + } + + else: + asset_regex = re.compile(self.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) + + def err_message(self): + return ( + "Error while trying to load a representation.\n" + "Either the subset wasn't published or the template is malformed." + "\n\n" + "Builder was looking for :\n{attributes}".format( + attributes="\n".join([ + "{}: {}".format(key.title(), value) + for key, value in self.data.items()] + ) + ) + ) diff --git a/openpype/hosts/maya/api/workio.py b/openpype/hosts/maya/api/workio.py index fd4961c4bf..8c31974c73 100644 --- a/openpype/hosts/maya/api/workio.py +++ b/openpype/hosts/maya/api/workio.py @@ -2,11 +2,9 @@ import os from maya import cmds -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["maya"] + return [".ma", ".mb"] def has_unsaved_changes(): diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 5cd1f7090a..e47d4e5b5a 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -11,6 +11,7 @@ class CreateAnimation(plugin.Creator): label = "Animation" family = "animation" icon = "male" + write_color_sets = False def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -22,7 +23,7 @@ class CreateAnimation(plugin.Creator): self.data[key] = value # Write vertex colors with the geometry. - self.data["writeColorSets"] = False + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Include only renderable visible shapes. diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index e876015adb..5516445de8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -11,6 +11,7 @@ class CreatePointCache(plugin.Creator): label = "Point Cache" family = "pointcache" icon = "gears" + write_color_sets = False def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) @@ -18,7 +19,8 @@ class CreatePointCache(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - self.data["writeColorSets"] = False # Vertex colors with the geometry. + # Vertex colors with the geometry. + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 93ee6679e5..5418ec1f2f 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -1,27 +1,34 @@ # -*- coding: utf-8 -*- """Create ``Render`` instance in Maya.""" -import os import json +import os + import appdirs import requests from maya import cmds -import maya.app.renderSetup.model.renderSetup as renderSetup +from maya.app.renderSetup.model import renderSetup +from openpype.api import ( + get_system_settings, + get_project_settings, +) from openpype.hosts.maya.api import ( lib, + lib_rendersettings, plugin ) from openpype.lib import requests_get from openpype.api import ( get_system_settings, - get_project_settings, - get_asset) + get_project_settings) from openpype.modules import ModulesManager +from openpype.pipeline import legacy_io from openpype.pipeline import ( CreatorError, legacy_io, ) +from openpype.pipeline.context_tools import get_current_project_asset class CreateRender(plugin.Creator): @@ -64,40 +71,10 @@ class CreateRender(plugin.Creator): label = "Render" family = "rendering" icon = "eye" - _token = None _user = None _password = None - # renderSetup instance - _rs = None - - _image_prefix_nodes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', - 'vray': 'vraySettings.fileNamePrefix', - 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'rmanGlobals.imageFileFormat', - 'redshift': 'defaultRenderGlobals.imageFilePrefix', - 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', - } - - _image_prefixes = { - 'mentalray': 'maya///{aov_separator}', # noqa - 'vray': 'maya///', - 'arnold': 'maya///{aov_separator}', # noqa - # this needs `imageOutputDir` - # (/renders/maya/) set separately - 'renderman': '_..', - 'redshift': 'maya///', # noqa - 'mayahardware2': 'maya///', # noqa - } - - _aov_chars = { - "dot": ".", - "dash": "-", - "underscore": "_" - } - _project_settings = None def __init__(self, *args, **kwargs): @@ -109,18 +86,8 @@ class CreateRender(plugin.Creator): return self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) - - # project_settings/maya/create/CreateRender/aov_separator - try: - self.aov_separator = self._aov_chars[( - self._project_settings["maya"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - self.aov_separator = "_" - + if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa + lib_rendersettings.RenderSettings().set_default_renderer_settings() manager = ModulesManager() self.deadline_module = manager.modules_by_name["deadline"] try: @@ -177,13 +144,13 @@ class CreateRender(plugin.Creator): ]) cmds.setAttr("{}.machineList".format(self.instance), lock=True) - self._rs = renderSetup.instance() - layers = self._rs.getRenderLayers() + rs = renderSetup.instance() + layers = rs.getRenderLayers() if use_selection: - print(">>> processing existing layers") + self.log.info("Processing existing layers") sets = [] for layer in layers: - print(" - creating set for {}:{}".format( + self.log.info(" - creating set for {}:{}".format( namespace, layer.name())) render_set = cmds.sets( n="{}:{}".format(namespace, layer.name())) @@ -193,17 +160,10 @@ class CreateRender(plugin.Creator): # if no render layers are present, create default one with # asterisk selector if not layers: - render_layer = self._rs.createRenderLayer('Main') + render_layer = rs.createRenderLayer('Main') collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') - renderer = cmds.getAttr( - 'defaultRenderGlobals.currentRenderer').lower() - # handle various renderman names - if renderer.startswith('renderman'): - renderer = 'renderman' - - self._set_default_renderer_settings(renderer) return self.instance def _deadline_webservice_changed(self): @@ -237,7 +197,7 @@ class CreateRender(plugin.Creator): def _create_render_settings(self): """Create instance settings.""" - # get pools + # get pools (slave machines of the render farm) pool_names = [] default_priority = 50 @@ -259,6 +219,12 @@ class CreateRender(plugin.Creator): self.data["tilesY"] = 2 self.data["convertToScanline"] = False self.data["useReferencedAovs"] = False + self.data["renderSetupIncludeLights"] = ( + self._project_settings.get( + "maya", {}).get( + "RenderSettings", {}).get( + "enable_all_lights", False) + ) # Disable for now as this feature is not working yet # self.data["assScene"] = False @@ -281,7 +247,8 @@ class CreateRender(plugin.Creator): # if 'default' server is not between selected, # use first one for initial list of pools. deadline_url = next(iter(self.deadline_servers.values())) - + # Uses function to get pool machines from the assigned deadline + # url in settings pool_names = self.deadline_module.get_deadline_pools(deadline_url, self.log) maya_submit_dl = self._project_settings.get( @@ -400,102 +367,36 @@ class CreateRender(plugin.Creator): self.log.error("Cannot show login form to Muster") raise Exception("Cannot show login form to Muster") - def _set_default_renderer_settings(self, renderer): - """Set basic settings based on renderer. + def _requests_post(self, *args, **kwargs): + """Wrap request post method. - Args: - renderer (str): Renderer name. + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. """ - prefix = self._image_prefixes[renderer] - prefix = prefix.replace("{aov_separator}", self.aov_separator) - cmds.setAttr(self._image_prefix_nodes[renderer], - prefix, - type="string") + if "verify" not in kwargs: + kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) + return requests.post(*args, **kwargs) - asset = get_asset() + def _requests_get(self, *args, **kwargs): + """Wrap request get method. - if renderer == "arnold": - # set format to exr + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. - cmds.setAttr( - "defaultArnoldDriver.ai_translator", "exr", type="string") - self._set_global_output_settings() - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. - if renderer == "vray": - self._set_vray_settings(asset) - if renderer == "redshift": - cmds.setAttr("redshiftOptions.imageFormat", 1) - - # resolution - cmds.setAttr( - "defaultResolution.width", - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "defaultResolution.height", - asset["data"].get("resolutionHeight")) - - self._set_global_output_settings() - - if renderer == "renderman": - cmds.setAttr("rmanGlobals.imageOutputDir", - "maya//", type="string") - - def _set_vray_settings(self, asset): - # type: (dict) -> None - """Sets important settings for Vray.""" - settings = cmds.ls(type="VRaySettingsNode") - node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - - # set separator - # set it in vray menu - if cmds.optionMenuGrp("vrayRenderElementSeparator", exists=True, - q=True): - items = cmds.optionMenuGrp( - "vrayRenderElementSeparator", ill=True, query=True) - - separators = [cmds.menuItem(i, label=True, query=True) for i in items] # noqa: E501 - try: - sep_idx = separators.index(self.aov_separator) - except ValueError: - raise CreatorError( - "AOV character {} not in {}".format( - self.aov_separator, separators)) - - cmds.optionMenuGrp( - "vrayRenderElementSeparator", sl=sep_idx + 1, edit=True) - cmds.setAttr( - "{}.fileNameRenderElementSeparator".format(node), - self.aov_separator, - type="string" - ) - # set format to exr - cmds.setAttr( - "{}.imageFormatStr".format(node), "exr", type="string") - - # animType - cmds.setAttr( - "{}.animType".format(node), 1) - - # resolution - cmds.setAttr( - "{}.width".format(node), - asset["data"].get("resolutionWidth")) - cmds.setAttr( - "{}.height".format(node), - asset["data"].get("resolutionHeight")) - - @staticmethod - def _set_global_output_settings(): - # enable animation - cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) - cmds.setAttr("defaultRenderGlobals.animation", 1) - cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) - cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) + """ + if "verify" not in kwargs: + kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) + return requests.get(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/load/_load_animation.py b/openpype/hosts/maya/plugins/load/_load_animation.py index 0010efb829..b419a730b5 100644 --- a/openpype/hosts/maya/plugins/load/_load_animation.py +++ b/openpype/hosts/maya/plugins/load/_load_animation.py @@ -36,7 +36,7 @@ class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # hero_001 (abc) # asset_counter{optional} file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, diff --git a/openpype/hosts/maya/plugins/load/load_ass.py b/openpype/hosts/maya/plugins/load/load_ass.py index 1f0eb88995..d1b12ceaba 100644 --- a/openpype/hosts/maya/plugins/load/load_ass.py +++ b/openpype/hosts/maya/plugins/load/load_ass.py @@ -65,8 +65,9 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyPath = proxyPath_base + ".ma" + project_name = context["project"]["name"] file_url = self.prepare_root_value(proxyPath, - context["project"]["code"]) + project_name) nodes = cmds.file(file_url, namespace=namespace, @@ -85,7 +86,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyShape.dso.set(path) proxyShape.aiOverrideShaders.set(0) - settings = get_project_settings(os.environ['AVALON_PROJECT']) + settings = get_project_settings(project_name) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -128,7 +129,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): file_url = self.prepare_root_value(proxyPath, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(file_url, loadReference=reference_node, type="mayaAscii", diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 7392adc4dd..3ef19ad96f 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -33,7 +33,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index e4355ed3d4..5a06661df9 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -2,10 +2,10 @@ import os from maya import cmds from openpype.api import get_project_settings -from openpype.lib import get_creator_by_name -from openpype.pipeline import ( - legacy_io, +from openpype.pipeline import legacy_io +from openpype.pipeline.create import ( legacy_create, + get_legacy_creator_by_name, ) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api.lib import maintained_selection @@ -52,7 +52,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with maintained_selection(): cmds.loadPlugin("AbcImport.mll", quiet=True) file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, @@ -153,7 +153,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): self.log.info("Creating subset: {}".format(namespace)) # Create the animation instance - creator_plugin = get_creator_by_name(self.animation_creator_name) + creator_plugin = get_legacy_creator_by_name( + self.animation_creator_name + ) with maintained_selection(): cmds.select([output, controls] + roots, noExpand=True) legacy_create( diff --git a/openpype/hosts/maya/plugins/load/load_yeti_rig.py b/openpype/hosts/maya/plugins/load/load_yeti_rig.py index 241c28467a..4b730ad2c1 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_rig.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_rig.py @@ -54,7 +54,7 @@ class YetiRigLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # load rig with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index ec583bcce7..157be5717b 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -551,7 +551,9 @@ class CollectLook(pyblish.api.InstancePlugin): if cmds.getAttr(attribute, type=True) == "message": continue node_attributes[attr] = cmds.getAttr(attribute) - + # Only include if there are any properties we care about + if not node_attributes: + continue attributes.append({"name": node, "uuid": lib.get_id(node), "attributes": node_attributes}) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 8b911a867d..ebda5e190d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -72,7 +72,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): def process(self, context): """Entry point to collector.""" render_instance = None - deadline_url = None for instance in context: if "rendering" in instance.data["families"]: @@ -96,23 +95,12 @@ class CollectMayaRender(pyblish.api.ContextPlugin): asset = legacy_io.Session["AVALON_ASSET"] workspace = context.data["workspaceDir"] - deadline_settings = ( - context.data - ["system_settings"] - ["modules"] - ["deadline"] - ) - - if deadline_settings["enabled"]: - deadline_url = render_instance.data.get("deadlineUrl") - self._rs = renderSetup.instance() - current_layer = self._rs.getVisibleRenderLayer() + # Retrieve render setup layers + rs = renderSetup.instance() maya_render_layers = { - layer.name(): layer for layer in self._rs.getRenderLayers() + layer.name(): layer for layer in rs.getRenderLayers() } - self.maya_layers = maya_render_layers - for layer in collected_render_layers: try: if layer.startswith("LAYER_"): @@ -147,49 +135,28 @@ class CollectMayaRender(pyblish.api.ContextPlugin): self.log.warning(msg) continue - # test if there are sets (subsets) to attach render to + # detect if there are sets (subsets) to attach render to sets = cmds.sets(layer, query=True) or [] attach_to = [] - if sets: - for s in sets: - if "family" not in cmds.listAttr(s): - continue + for s in sets: + if not cmds.attributeQuery("family", node=s, exists=True): + continue - attach_to.append( - { - "version": None, # we need integrator for that - "subset": s, - "family": cmds.getAttr("{}.family".format(s)), - } - ) - self.log.info(" -> attach render to: {}".format(s)) + attach_to.append( + { + "version": None, # we need integrator for that + "subset": s, + "family": cmds.getAttr("{}.family".format(s)), + } + ) + self.log.info(" -> attach render to: {}".format(s)) layer_name = "rs_{}".format(expected_layer_name) # collect all frames we are expecting to be rendered - renderer = cmds.getAttr( - "defaultRenderGlobals.currentRenderer" - ).lower() - # handle various renderman names - if renderer.startswith("renderman"): - renderer = "renderman" - - try: - aov_separator = self._aov_chars[( - context.data["project_settings"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - aov_separator = "_" - - render_instance.data["aovSeparator"] = aov_separator - # return all expected files for all cameras and aovs in given # frame range - layer_render_products = get_layer_render_products( - layer_name, render_instance) + layer_render_products = get_layer_render_products(layer_name) render_products = layer_render_products.layer_data.products assert render_products, "no render products generated" exp_files = [] @@ -226,13 +193,11 @@ class CollectMayaRender(pyblish.api.ContextPlugin): ) # append full path - full_exp_files = [] aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ - .get('default_render_image_folder') + .get('RenderSettings')\ + .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. publish_meta_path = None @@ -246,6 +211,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): full_paths.append(full_path) publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths + full_exp_files = [aov_dict] frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -269,8 +235,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): frame_start_handle = frame_start_render frame_end_handle = frame_end_render - full_exp_files.append(aov_dict) - # find common path to store metadata # so if image prefix is branching to many directories # metadata file will be located in top-most common @@ -299,16 +263,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): self.log.info("collecting layer: {}".format(layer_name)) # Get layer specific settings, might be overrides - try: - aov_separator = self._aov_chars[( - context.data["project_settings"] - ["create"] - ["CreateRender"] - ["aov_separator"] - )] - except KeyError: - aov_separator = "_" - data = { "subset": expected_layer_name, "attachTo": attach_to, @@ -357,11 +311,18 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "useReferencedAovs": render_instance.data.get( "useReferencedAovs") or render_instance.data.get( "vrayUseReferencedAovs") or False, - "aovSeparator": aov_separator + "aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501 + "renderSetupIncludeLights": render_instance.data.get( + "renderSetupIncludeLights" + ) } - if deadline_url: - data["deadlineUrl"] = deadline_url + # Collect Deadline url if Deadline module is enabled + deadline_settings = ( + context.data["system_settings"]["modules"]["deadline"] + ) + if deadline_settings["enabled"]: + data["deadlineUrl"] = render_instance.data.get("deadlineUrl") if self.sync_workfile_version: data["version"] = context.data["version"] @@ -370,19 +331,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): if instance.data['family'] == "workfile": instance.data["version"] = context.data["version"] - # Apply each user defined attribute as data - for attr in cmds.listAttr(layer, userDefined=True) or list(): - try: - value = cmds.getAttr("{}.{}".format(layer, attr)) - except Exception: - # Some attributes cannot be read directly, - # such as mesh and color attributes. These - # are considered non-essential to this - # particular publishing pipeline. - value = None - - data[attr] = value - # handle standalone renderers if render_instance.data.get("vrayScene") is True: data["families"].append("vrayscene_render") @@ -409,6 +357,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance = context.create_instance(expected_layer_name) instance.data["label"] = label + instance.data["farm"] = True instance.data.update(data) self.log.debug("data: {}".format(json.dumps(data, indent=4))) @@ -490,10 +439,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): return pool_a, pool_b - def _get_overrides(self, layer): - rset = self.maya_layers[layer].renderSettingsCollectionInstance() - return rset.getOverrides() - @staticmethod def get_render_attribute(attr, layer): """Get attribute from render options. diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py new file mode 100644 index 0000000000..991217684a --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -0,0 +1,146 @@ +import math +import os +import json + +from maya import cmds +from maya.api import OpenMaya as om + +from bson.objectid import ObjectId + +from openpype.pipeline import legacy_io +import openpype.api + + +class ExtractLayout(openpype.api.Extractor): + """Extract a layout.""" + + label = "Extract Layout" + hosts = ["maya"] + families = ["layout"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.info("Performing extraction..") + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_data = [] + + for asset in cmds.sets(str(instance), query=True): + # Find the container + grp_name = asset.split(':')[0] + containers = cmds.ls(f"{grp_name}*_CON") + + assert len(containers) == 1, \ + f"More than one container found for {asset}" + + container = containers[0] + + representation_id = cmds.getAttr(f"{container}.representation") + + representation = legacy_io.find_one( + { + "type": "representation", + "_id": ObjectId(representation_id) + }, projection={"parent": True, "context.family": True}) + + self.log.info(representation) + + version_id = representation.get("parent") + family = representation.get("context").get("family") + + json_element = { + "family": family, + "instance_name": cmds.getAttr(f"{container}.name"), + "representation": str(representation_id), + "version": str(version_id) + } + + loc = cmds.xform(asset, query=True, translation=True) + rot = cmds.xform(asset, query=True, rotation=True, euler=True) + scl = cmds.xform(asset, query=True, relative=True, scale=True) + + json_element["transform"] = { + "translation": { + "x": loc[0], + "y": loc[1], + "z": loc[2] + }, + "rotation": { + "x": math.radians(rot[0]), + "y": math.radians(rot[1]), + "z": math.radians(rot[2]) + }, + "scale": { + "x": scl[0], + "y": scl[1], + "z": scl[2] + } + } + + row_length = 4 + t_matrix_list = cmds.xform(asset, query=True, matrix=True) + + transform_mm = om.MMatrix(t_matrix_list) + transform = om.MTransformationMatrix(transform_mm) + + t = transform.translation(om.MSpace.kWorld) + t = om.MVector(t.x, t.z, -t.y) + transform.setTranslation(t, om.MSpace.kWorld) + transform.rotateBy( + om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld) + transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kObject) + + t_matrix_list = list(transform.asMatrix()) + + t_matrix = [] + for i in range(0, len(t_matrix_list), row_length): + t_matrix.append(t_matrix_list[i:i + row_length]) + + json_element["transform_matrix"] = [] + for row in t_matrix: + json_element["transform_matrix"].append(list(row)) + + basis_list = [ + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, -1, 0, + 0, 0, 0, 1 + ] + + basis_mm = om.MMatrix(basis_list) + basis = om.MTransformationMatrix(basis_mm) + + b_matrix_list = list(basis.asMatrix()) + b_matrix = [] + + for i in range(0, len(b_matrix_list), row_length): + b_matrix.append(b_matrix_list[i:i + row_length]) + + json_element["basis"] = [] + for row in b_matrix: + json_element["basis"].append(list(row)) + + json_data.append(json_element) + + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, json_representation) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index d35b529c76..ce3b265566 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -27,6 +27,31 @@ def escape_space(path): return '"{}"'.format(path) if " " in path else path +def get_ocio_config_path(profile_folder): + """Path to OpenPype vendorized OCIO. + + Vendorized OCIO config file path is grabbed from the specific path + hierarchy specified below. + + "{OPENPYPE_ROOT}/vendor/OpenColorIO-Configs/{profile_folder}/config.ocio" + Args: + profile_folder (str): Name of folder to grab config file from. + + Returns: + str: Path to vendorized config file. + """ + + return os.path.join( + os.environ["OPENPYPE_ROOT"], + "vendor", + "bin", + "ocioconfig", + "OpenColorIOConfigs", + profile_folder, + "config.ocio" + ) + + def find_paths_by_hash(texture_hash): """Find the texture hash key in the dictionary. @@ -79,10 +104,11 @@ def maketx(source, destination, *args): # use oiio-optimized settings for tile-size, planarconfig, metadata "--oiio", "--filter lanczos3", + escape_space(source) ] cmd.extend(args) - cmd.extend(["-o", escape_space(destination), escape_space(source)]) + cmd.extend(["-o", escape_space(destination)]) cmd = " ".join(cmd) @@ -405,7 +431,19 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - if files_metadata[source]["color_space"] == "Raw": + # get the resolved files + metadata = files_metadata.get(source) + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + if not metadata: + first_file = next(iter(resource.get( + "files", [])), None) + if not first_file: + continue + first_filepath = os.path.normpath(first_file) + metadata = files_metadata[first_filepath] + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination @@ -493,6 +531,8 @@ class ExtractLook(openpype.api.Extractor): else: colorconvert = "" + config_path = get_ocio_config_path("nuke-default") + color_config = "--colorconfig {0}".format(config_path) # Ensure folder exists if not os.path.exists(os.path.dirname(converted)): os.makedirs(os.path.dirname(converted)) @@ -502,10 +542,11 @@ class ExtractLook(openpype.api.Extractor): filepath, converted, # Include `source-hash` as string metadata - "-sattrib", + "--sattrib", "sourceHash", escape_space(texture_hash), colorconvert, + color_config ) return converted, COPY, texture_hash diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 233a0b60c2..54ef09e060 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -128,8 +128,10 @@ class ExtractPlayblast(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: + panel = cmds.getPanel(with_focus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) + cmds.setFocus(panel) path = capture.capture(**preset) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 4f28aa167c..01980578cf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -100,6 +100,13 @@ class ExtractThumbnail(openpype.api.Extractor): # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] + + # Show or Hide Image Plane + image_plane = instance.data.get("imagePlane", True) + if "viewport_options" in preset: + preset["viewport_options"]["imagePlane"] = image_plane + else: + preset["viewport_options"] = {"imagePlane": image_plane} with lib.maintained_time(): # Force viewer to False in call to capture because we have our own @@ -110,14 +117,17 @@ class ExtractThumbnail(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: + panel = cmds.getPanel(with_focus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) + cmds.setFocus(panel) path = capture.capture(**preset) playblast = self._fix_playblast_output_path(path) _, thumbnail = os.path.split(playblast) + self.log.info("file list {}".format(thumbnail)) if "representations" not in instance.data: diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index 443a0ad719..b1e1d5416b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -78,14 +78,13 @@ class ValidateLookContents(pyblish.api.InstancePlugin): # Check if attributes are on a node with an ID, crucial for rebuild! for attr_changes in lookdata["attributes"]: - if not attr_changes["uuid"]: + if not attr_changes["uuid"] and not attr_changes["attributes"]: cls.log.error("Node '%s' has no cbId, please set the " "attributes to its children if it has any" % attr_changes["name"]) invalid.add(instance.name) return list(invalid) - @classmethod def validate_looks(cls, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_maya_units.py b/openpype/hosts/maya/plugins/publish/validate_maya_units.py index d5a8c350d5..5f67adec76 100644 --- a/openpype/hosts/maya/plugins/publish/validate_maya_units.py +++ b/openpype/hosts/maya/plugins/publish/validate_maya_units.py @@ -2,8 +2,8 @@ import maya.cmds as cmds import pyblish.api import openpype.api -from openpype import lib import openpype.hosts.maya.api.lib as mayalib +from openpype.pipeline.context_tools import get_current_project_asset from math import ceil @@ -41,7 +41,9 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): # now flooring the value? fps = float_round(context.data.get('fps'), 2, ceil) - asset_fps = lib.get_asset()["data"]["fps"] + # TODO repace query with using 'context.data["assetEntity"]' + asset_doc = get_current_project_asset() + asset_fps = asset_doc["data"]["fps"] self.log.info('Units (linear): {0}'.format(linearunits)) self.log.info('Units (angular): {0}'.format(angularunits)) @@ -91,5 +93,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - asset_fps = lib.get_asset()["data"]["fps"] + # TODO repace query with using 'context.data["assetEntity"]' + asset_doc = get_current_project_asset() + asset_fps = asset_doc["data"]["fps"] mayalib.set_scene_fps(asset_fps) diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 50acf2b8b7..02107d5732 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -10,7 +10,7 @@ from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) -from openpype.lib.mongo import OpenPypeMongoConnection +from openpype.client.mongo import OpenPypeMongoConnection import gridfs diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 642ca9e25d..4d3796e429 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,17 +1,15 @@ -import maya.mel as mel -import pymel.core as pm +from maya import cmds import pyblish.api import openpype.api -def get_file_rule(rule): - """Workaround for a bug in python with cmds.workspace""" - return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) - - class ValidateRenderImageRule(pyblish.api.InstancePlugin): - """Validates "images" file rule is set to "renders/" + """Validates Maya Workpace "images" file rule matches project settings. + + This validates against the configured default render image folder: + Studio Settings > Project > Maya > + Render Settings > Default render image folder. """ @@ -23,24 +21,29 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def process(self, instance): - default_render_file = self.get_default_render_image_folder(instance) + required_images_rule = self.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") - assert get_file_rule("images") == default_render_file, ( - "Workspace's `images` file rule must be set to: {}".format( - default_render_file + assert current_images_rule == required_images_rule, ( + "Invalid workspace `images` file rule value: '{}'. " + "Must be set to: '{}'".format( + current_images_rule, required_images_rule ) ) @classmethod def repair(cls, instance): - default = cls.get_default_render_image_folder(instance) - pm.workspace.fileRules["images"] = default - pm.system.Workspace.save() + + required_images_rule = cls.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") + + if current_images_rule != required_images_rule: + cmds.workspace(fileRule=("images", required_images_rule)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('create') \ - .get('CreateRender') \ + .get('RenderSettings') \ .get('default_render_image_folder') diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index e6c6ef6c9e..35b87fd0ab 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,20 +1,11 @@ import re import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action - from maya import cmds - -ImagePrefixes = { - 'mentalray': 'defaultRenderGlobals.imageFilePrefix', - 'vray': 'vraySettings.fileNamePrefix', - 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix', - 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', -} +import openpype.api +import openpype.hosts.maya.api.action +from openpype.hosts.maya.api.render_settings import RenderSettings class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): @@ -47,7 +38,11 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): # handle various renderman names if renderer.startswith('renderman'): renderer = 'renderman' - file_prefix = cmds.getAttr(ImagePrefixes[renderer]) + + file_prefix = cmds.getAttr( + RenderSettings.get_image_prefix_attr(renderer) + ) + if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 1dab3274a0..f19c0bff36 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -242,6 +242,14 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): instance.context.data["project_settings"]["maya"]["publish"]["ValidateRenderSettings"].get( # noqa: E501 "{}_render_attributes".format(renderer)) or [] ) + settings_lights_flag = instance.context.data["project_settings"].get( + "maya", {}).get( + "RenderSettings", {}).get( + "enable_all_lights", False) + + instance_lights_flag = instance.data.get("renderSetupIncludeLights") + if settings_lights_flag != instance_lights_flag: + cls.log.warning('Instance flag for "Render Setup Include Lights" is set to {0} and Settings flag is set to {1}'.format(instance_lights_flag, settings_lights_flag)) # noqa # go through definitions and test if such node.attribute exists. # if so, compare its value from the one required. diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py index d70096ee45..04cc9ab5fb 100644 --- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py +++ b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py @@ -6,7 +6,7 @@ from openpype.pipeline import PublishXmlValidationError class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): - """Validates that nodes has common root.""" + """Validates that review subset has unique name.""" order = openpype.api.ValidateContentsOrder hosts = ["maya"] @@ -17,7 +17,7 @@ class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): subset_names = [] for instance in context: - self.log.info("instance:: {}".format(instance.data)) + self.log.debug("Instance: {}".format(instance.data)) if instance.data.get('publish'): subset_names.append(instance.data.get('subset')) diff --git a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py index 0b4842d208..8e23a7c04f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py @@ -4,8 +4,7 @@ import openpype.api class ValidateSetdressRoot(pyblish.api.InstancePlugin): - """ - """ + """Validate if set dress top root node is published.""" order = openpype.api.ValidateContentsOrder label = "SetDress Root" diff --git a/openpype/hosts/nuke/__init__.py b/openpype/hosts/nuke/__init__.py index 134a6621c4..8ab565939b 100644 --- a/openpype/hosts/nuke/__init__.py +++ b/openpype/hosts/nuke/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .addon import ( + NUKE_ROOT_DIR, + NukeAddon, +) -def add_implementation_envs(env, _app): - # Add requirements to NUKE_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_nuke_paths = [ - os.path.join(pype_root, "openpype", "hosts", "nuke", "startup") - ] - old_nuke_path = env.get("NUKE_PATH") or "" - for path in old_nuke_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_nuke_paths: - new_nuke_paths.append(norm_path) - - env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "NUKE_ROOT_DIR", + "NukeAddon", +) diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py new file mode 100644 index 0000000000..54e4da5195 --- /dev/null +++ b/openpype/hosts/nuke/addon.py @@ -0,0 +1,63 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class NukeAddon(OpenPypeModule, IHostAddon): + name = "nuke" + host_name = "nuke" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to NUKE_PATH + new_nuke_paths = [ + os.path.join(NUKE_ROOT_DIR, "startup") + ] + old_nuke_path = env.get("NUKE_PATH") or "" + for path in old_nuke_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_nuke_paths: + new_nuke_paths.append(norm_path) + + env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".nk"] diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index b126de4ceb..962f31c177 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -26,8 +26,8 @@ from .lib import ( maintained_selection, reset_selection, get_view_process_node, - duplicate_node - + duplicate_node, + convert_knob_value_to_correct_type ) from .utils import ( @@ -59,6 +59,7 @@ __all__ = ( "reset_selection", "get_view_process_node", "duplicate_node", + "convert_knob_value_to_correct_type", "colorspace_exists_on_node", "get_colorspace_list" diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 9b24c9fb38..b14f1a1eb1 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -21,10 +21,7 @@ from openpype.client import ( ) from openpype.api import ( Logger, - BuildWorkfile, get_version_from_path, - get_workdir_data, - get_asset, get_current_project_settings, ) from openpype.tools.utils import host_tools @@ -35,11 +32,17 @@ from openpype.settings import ( get_anatomy_settings, ) from openpype.modules import ModulesManager +from openpype.pipeline.template_data import get_template_data_with_names from openpype.pipeline import ( discover_legacy_creator_plugins, legacy_io, Anatomy, ) +from openpype.pipeline.context_tools import ( + get_current_project_asset, + get_custom_workfile_template_from_session +) +from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu @@ -910,19 +913,17 @@ def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' avalon_knob_data = read_avalon_data(node) - data = {'avalon': avalon_knob_data} nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) - host_name = os.environ.get("AVALON_APP") - data.update({ - "app": host_name, + data = { + "avalon": avalon_knob_data, "nuke_imageio_writes": nuke_imageio_writes - }) + } anatomy_filled = format_anatomy(data) return anatomy_filled["render"]["path"].replace("\\", "/") @@ -965,12 +966,11 @@ def format_anatomy(data): data["version"] = get_version_from_path(file) project_name = anatomy.project_name - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, data["avalon"]["asset"]) + asset_name = data["avalon"]["asset"] task_name = os.environ["AVALON_TASK"] host_name = os.environ["AVALON_APP"] - context_data = get_workdir_data( - project_doc, asset_doc, task_name, host_name + context_data = get_template_data_with_names( + project_name, asset_name, task_name, host_name ) data.update(context_data) data.update({ @@ -1128,10 +1128,8 @@ def create_write_node( if knob["name"] == "file_type": representation = knob["value"] - host_name = os.environ.get("AVALON_APP") try: data.update({ - "app": host_name, "imageio_writes": imageio_writes, "representation": representation, }) @@ -1595,28 +1593,35 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): if not knob_value: continue - # first convert string types to string - # just to ditch unicode - if isinstance(knob_value, six.text_type): - knob_value = str(knob_value) - - # set correctly knob types - if knob_type == "bool": - knob_value = bool(knob_value) - elif knob_type == "decimal_number": - knob_value = float(knob_value) - elif knob_type == "number": - knob_value = int(knob_value) - elif knob_type == "text": - knob_value = knob_value - elif knob_type == "color_gui": - knob_value = color_gui_to_int(knob_value) - elif knob_type in ["2d_vector", "3d_vector", "color"]: - knob_value = [float(v) for v in knob_value] + knob_value = convert_knob_value_to_correct_type( + knob_type, knob_value) node[knob_name].setValue(knob_value) +def convert_knob_value_to_correct_type(knob_type, knob_value): + # first convert string types to string + # just to ditch unicode + if isinstance(knob_value, six.text_type): + knob_value = str(knob_value) + + # set correctly knob types + if knob_type == "bool": + knob_value = bool(knob_value) + elif knob_type == "decimal_number": + knob_value = float(knob_value) + elif knob_type == "number": + knob_value = int(knob_value) + elif knob_type == "text": + knob_value = knob_value + elif knob_type == "color_gui": + knob_value = color_gui_to_int(knob_value) + elif knob_type in ["2d_vector", "3d_vector", "color"]: + knob_value = [float(v) for v in knob_value] + + return knob_value + + def color_gui_to_int(color_gui): hex_value = ( "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) @@ -1766,7 +1771,7 @@ class WorkfileSettings(object): kwargs.get("asset_name") or legacy_io.Session["AVALON_ASSET"] ) - self._asset_entity = get_asset(self._asset) + self._asset_entity = get_current_project_asset(self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) @@ -1925,7 +1930,7 @@ class WorkfileSettings(object): families.append(avalon_knob_data.get("families")) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -1947,15 +1952,25 @@ class WorkfileSettings(object): if not write_node: return - # write all knobs to node - for knob in nuke_imageio_writes["knobs"]: - value = knob["value"] - if isinstance(value, six.text_type): - value = str(value) - if str(value).startswith("0x"): - value = int(value, 16) + try: + # write all knobs to node + for knob in nuke_imageio_writes["knobs"]: + value = knob["value"] + if isinstance(value, six.text_type): + value = str(value) + if str(value).startswith("0x"): + value = int(value, 16) - write_node[knob["name"]].setValue(value) + log.debug("knob: {}| value: {}".format( + knob["name"], value + )) + write_node[knob["name"]].setValue(value) + except TypeError: + log.warning( + "Legacy workflow didnt work, switching to current") + + set_node_knobs_from_settings( + write_node, nuke_imageio_writes["knobs"]) def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes @@ -2012,12 +2027,14 @@ class WorkfileSettings(object): # get imageio nuke_colorspace = get_nuke_imageio_settings() + log.info("Setting colorspace to workfile...") try: self.set_root_colorspace(nuke_colorspace["workfile"]) except AttributeError: msg = "set_colorspace(): missing `workfile` settings in template" nuke.message(msg) + log.info("Setting colorspace to viewers...") try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) except AttributeError: @@ -2025,24 +2042,18 @@ class WorkfileSettings(object): nuke.message(msg) log.error(msg) + log.info("Setting colorspace to write nodes...") try: self.set_writes_colorspace() except AttributeError as _error: nuke.message(_error) log.error(_error) + log.info("Setting colorspace to read nodes...") read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", []) if read_clrs_inputs: self.set_reads_colorspace(read_clrs_inputs) - try: - for key in nuke_colorspace: - log.debug("Preset's colorspace key: {}".format(key)) - except TypeError: - msg = "Nuke is not in templates! Contact your supervisor!" - nuke.message(msg) - log.error(msg) - def reset_frame_range_handles(self): """Set frame range to current asset""" @@ -2224,15 +2235,14 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) + # collecting correct data - correct_data = OrderedDict({ - "file": get_render_path(node) - }) + correct_data = OrderedDict() # adding imageio knob presets for k, v in nuke_imageio_writes.items(): @@ -2449,15 +2459,12 @@ def _launch_workfile_app(): def process_workfile_builder(): - from openpype.lib import ( - env_value_to_bool, - get_custom_workfile_template - ) # to avoid looping of the callback, remove it! nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") # get state from settings - workfile_builder = get_current_project_settings()["nuke"].get( + project_settings = get_current_project_settings() + workfile_builder = project_settings["nuke"].get( "workfile_builder", {}) # get all imortant settings @@ -2467,7 +2474,6 @@ def process_workfile_builder(): # get settings createfv_on = workfile_builder.get("create_first_version") or None - custom_templates = workfile_builder.get("custom_templates") or None builder_on = workfile_builder.get("builder_on_start") or None last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") @@ -2475,8 +2481,8 @@ def process_workfile_builder(): # generate first version in file not existing and feature is enabled if createfv_on and not os.path.exists(last_workfile_path): # get custom template path if any - custom_template_path = get_custom_workfile_template( - custom_templates + custom_template_path = get_custom_workfile_template_from_session( + project_settings=project_settings ) # if custom template is defined diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 0afc56d2f7..c1cd8f771a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -9,7 +9,6 @@ import pyblish.api import openpype from openpype.api import ( Logger, - BuildWorkfile, get_current_project_settings ) from openpype.lib import register_event_callback @@ -22,6 +21,7 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 925cab0bef..37ce03dc55 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -181,8 +181,6 @@ class ExporterReview(object): # get first and last frame self.first_frame = min(self.collection.indexes) self.last_frame = max(self.collection.indexes) - if "slate" in self.instance.data["families"]: - self.first_frame += 1 else: self.fname = os.path.basename(self.path_in) self.fhead = os.path.splitext(self.fname)[0] + "." diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py index 68fcb0927f..65b86bf01b 100644 --- a/openpype/hosts/nuke/api/workio.py +++ b/openpype/hosts/nuke/api/workio.py @@ -2,11 +2,9 @@ import os import nuke -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["nuke"] + return [".nk"] def has_unsaved_changes(): diff --git a/openpype/hosts/nuke/plugins/load/actions.py b/openpype/hosts/nuke/plugins/load/actions.py index d364a4f3a1..69f56c7305 100644 --- a/openpype/hosts/nuke/plugins/load/actions.py +++ b/openpype/hosts/nuke/plugins/load/actions.py @@ -2,10 +2,10 @@ """ -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import load -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) class SetFrameRangeLoader(load.LoaderPlugin): diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b2dc4a52d7..346773b5af 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -162,7 +162,15 @@ class LoadClip(plugin.NukeLoader): data_imprint = {} for k in add_keys: if k == 'version': - data_imprint[k] = context["version"]['name'] + version_doc = context["version"] + if version_doc["type"] == "hero_version": + version = "hero" + else: + version = version_doc.get("name") + + if version: + data_imprint[k] = version + elif k == 'colorspace': colorspace = repre["data"].get(k) colorspace = colorspace or version_data.get(k) diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 4257ed3131..bfe32d8fd1 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -33,6 +33,7 @@ class CollectSlate(pyblish.api.InstancePlugin): if slate_node: instance.data["slateNode"] = slate_node + instance.data["slate"] = True instance.data["families"].append("slate") instance.data["versionData"]["families"].append("slate") self.log.info( diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 1595fe03fb..8879f0c999 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -31,10 +31,6 @@ class NukeRenderLocal(openpype.api.Extractor): first_frame = instance.data.get("frameStartHandle", None) - # exception for slate workflow - if "slate" in families: - first_frame -= 1 - last_frame = instance.data.get("frameEndHandle", None) node_subset_name = instance.data.get("name", None) @@ -68,10 +64,6 @@ class NukeRenderLocal(openpype.api.Extractor): int(last_frame) ) - # exception for slate workflow - if "slate" in families: - first_frame += 1 - ext = node["file_type"].value() if "representations" not in instance.data: @@ -88,8 +80,11 @@ class NukeRenderLocal(openpype.api.Extractor): repre = { 'name': ext, 'ext': ext, - 'frameStart': "%0{}d".format( - len(str(last_frame))) % first_frame, + 'frameStart': ( + "{{:0>{}}}" + .format(len(str(last_frame))) + .format(first_frame) + ), 'files': filenames, "stagingDir": out_dir } @@ -105,13 +100,16 @@ class NukeRenderLocal(openpype.api.Extractor): instance.data['family'] = 'render' families.remove('render.local') families.insert(0, "render2d") + instance.data["anatomyData"]["family"] = "render" elif "prerender.local" in families: instance.data['family'] = 'prerender' families.remove('prerender.local') families.insert(0, "prerender") + instance.data["anatomyData"]["family"] = "prerender" elif "still.local" in families: instance.data['family'] = 'image' families.remove('still.local') + instance.data["anatomyData"]["family"] = "image" instance.data["families"] = families collections, remainder = clique.assemble(filenames) @@ -123,4 +121,4 @@ class NukeRenderLocal(openpype.api.Extractor): self.log.info('Finished render') - self.log.debug("instance extracted: {}".format(instance.data)) + self.log.debug("_ instance.data: {}".format(instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 99ade4cf9b..b5cad143db 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -13,6 +13,7 @@ from openpype.hosts.nuke.api import ( get_view_process_node ) + class ExtractSlateFrame(openpype.api.Extractor): """Extracts movie and thumbnail with baked in luts @@ -236,6 +237,7 @@ class ExtractSlateFrame(openpype.api.Extractor): def _render_slate_to_sequence(self, instance): # set slate frame first_frame = instance.data["frameStartHandle"] + last_frame = instance.data["frameEndHandle"] slate_first_frame = first_frame - 1 # render slate as sequence frame @@ -284,6 +286,13 @@ class ExtractSlateFrame(openpype.api.Extractor): matching_repre["files"] = [first_filename, slate_filename] elif slate_filename not in matching_repre["files"]: matching_repre["files"].insert(0, slate_filename) + matching_repre["frameStart"] = ( + "{{:0>{}}}" + .format(len(str(last_frame))) + .format(slate_first_frame) + ) + self.log.debug( + "__ matching_repre: {}".format(pformat(matching_repre))) self.log.warning("Added slate frame to representation files") diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml new file mode 100644 index 0000000000..1097909a5f --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -0,0 +1,18 @@ + + + + Shot/Asset mame + +## Invalid Shot/Asset name in subset + +Following Node with name `{node_name}`: +Is in context of `{correct_name}` but Node _asset_ knob is set as `{wrong_name}`. + +### How to repair? + +1. Either use Repair or Select button. +2. If you chose Select then rename asset knob to correct name. +3. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml new file mode 100644 index 0000000000..ab1b650773 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid output amount + +Backdrop is having more than one outgoing connections. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Reorganize nodes the way only one outgoing connection is present. +3. Hit reload button on the publisher. + + +### How could this happen? + +More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream. + + + + Empty backdrop + +## Invalid empty backdrop + +Backdrop is empty and no nodes are found above it. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Add any node above it or delete it. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml new file mode 100644 index 0000000000..f39a41a4f9 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid amount of Output nodes + +Group node `{node_name}` is having more than one Output node. + +### How to repair? + +1. Use button `Open Group`. +2. Remove redundant Output node. +3. Hit reload button on the publisher. + + +### How could this happen? + +Perhaps you had created exciently more than one Output node. + + + + Missing Input nodes + +## Missing Input nodes + +Make sure there is at least one connected Input node inside the group node with name `{node_name}` + +### How to repair? + +1. Use button `Open Group`. +2. Add at least one Input node and connect to other nodes. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml new file mode 100644 index 0000000000..76c184f653 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml @@ -0,0 +1,18 @@ + + + + Knobs value + +## Invalid node's knobs values + +Following node knobs needs to be repaired: + +{invalid_items} + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml new file mode 100644 index 0000000000..08a88a993e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml @@ -0,0 +1,16 @@ + + + + Output format + +## Invalid format setting + +Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml new file mode 100644 index 0000000000..6fe5d5d43e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml @@ -0,0 +1,16 @@ + + + + Proxy mode + +## Invalid proxy mode value + +Nuke is set to use Proxy. This is not supported by publisher. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml new file mode 100644 index 0000000000..434081c269 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml @@ -0,0 +1,17 @@ + + + + Rendered Frames + +## Missing Rendered Frames + +Render node "{node_name}" is set to "Use existing frames", but frames are missing. + +### How to repair? + +1. Use Repair button. +2. Set different target. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml new file mode 100644 index 0000000000..871fc629ce --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml @@ -0,0 +1,18 @@ + + + + Script attributes + +## Invalid Script attributes + +Following script root attributes need to be fixed: + +{failed_attributes} + +### How to repair? + +1. Use Repair. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml new file mode 100644 index 0000000000..cdf85102bc --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -0,0 +1,18 @@ + + + + Knobs values + +## Invalid node's knobs values + +Following write node knobs needs to be repaired: + +{xml_msg} + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 4b3b70fa12..b396056eb9 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -50,7 +50,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # establish families family = avalon_knob_data["family"] families_ak = avalon_knob_data.get("families", []) - families = list() + families = [] # except disabled nodes but exclude backdrops in test if ("nukenodes" not in family) and (node["disable"].value()): @@ -94,6 +94,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # Farm rendering self.log.info("flagged for farm render") instance.data["transfer"] = False + instance.data["farm"] = True families.append("{}.farm".format(family)) family = families_ak.lower() @@ -110,10 +111,10 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("__ families: `{}`".format(families)) # Get format - format = root['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() + format_ = root['format'].value() + resolution_width = format_.width() + resolution_height = format_.height() + pixel_aspect = format_.pixelAspect() # get publish knob value if "publish" not in node.knobs(): @@ -124,8 +125,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("__ _families_test: `{}`".format(_families_test)) for family_test in _families_test: if family_test in self.sync_workfile_version_on_families: - self.log.debug("Syncing version with workfile for '{}'" - .format(family_test)) + self.log.debug( + "Syncing version with workfile for '{}'".format( + family_test + ) + ) # get version to instance for integration instance.data['version'] = instance.context.data['version'] diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py index 7349a8f424..822f405a6f 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py @@ -8,6 +8,7 @@ from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data ) +from openpype.pipeline import KnownPublishError class CollectWorkfile(pyblish.api.ContextPlugin): @@ -22,6 +23,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): current_file = os.path.normpath(nuke.root().name()) + if current_file.lower() == "root": + raise KnownPublishError( + "Workfile is not correct file name. \n" + "Use workfile tool to manage the name correctly." + ) + knob_data = get_avalon_knob_data(root) add_publish_knob(root) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index a97f34b370..e37cc8a80a 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -144,8 +144,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug("colorspace: `{}`".format(colorspace)) version_data = { - "families": [f.replace(".local", "").replace(".farm", "") - for f in _families_test if "write" not in f], + "families": [ + _f.replace(".local", "").replace(".farm", "") + for _f in _families_test if "write" != _f + ], "colorspace": colorspace } diff --git a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py similarity index 74% rename from openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py rename to openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 842f74b6f6..7647471f8a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -3,20 +3,17 @@ from __future__ import absolute_import import nuke - import pyblish.api import openpype.api -from openpype.hosts.nuke.api.lib import ( - recreate_instance, - reset_selection, - select_nodes -) +import openpype.hosts.nuke.api.lib as nlib +import openpype.hosts.nuke.api as nuke_api +from openpype.pipeline import PublishXmlValidationError class SelectInvalidInstances(pyblish.api.Action): """Select invalid instances in Outliner.""" - label = "Select Instances" + label = "Select" icon = "briefcase" on = "failed" @@ -39,6 +36,7 @@ class SelectInvalidInstances(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) if instances: + self.deselect() self.log.info( "Selecting invalid nodes: %s" % ", ".join( [str(x) for x in instances] @@ -50,12 +48,12 @@ class SelectInvalidInstances(pyblish.api.Action): self.deselect() def select(self, instances): - select_nodes( + nlib.select_nodes( [nuke.toNode(str(x)) for x in instances] ) def deselect(self): - reset_selection() + nlib.reset_selection() class RepairSelectInvalidInstances(pyblish.api.Action): @@ -85,12 +83,12 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: origin_node = instance[0] - recreate_instance( + nuke_api.lib.recreate_instance( origin_node, avalon_data={"asset": context_asset} ) -class ValidateInstanceInContext(pyblish.api.InstancePlugin): +class ValidateCorrectAssetName(pyblish.api.InstancePlugin): """Validator to check if instance asset match context asset. When working in per-shot style you always publish data in context of @@ -99,15 +97,31 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - order = openpype.api.ValidateContentsOrder - label = "Instance in same Context" + label = "Validate correct asset name" hosts = ["nuke"] - actions = [SelectInvalidInstances, RepairSelectInvalidInstances] + actions = [ + SelectInvalidInstances, + RepairSelectInvalidInstances + ] optional = True def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] - msg = "{} has asset {}".format(instance.name, asset) - assert asset == context_asset, msg + + msg = ( + "Instance `{}` has wrong shot/asset name:\n" + "Correct: `{}` | Wrong: `{}`").format( + instance.name, asset, context_asset) + + self.log.debug(msg) + + if asset != context_asset: + raise PublishXmlValidationError( + self, msg, formatting_data={ + "node_name": instance[0]["name"].value(), + "wrong_name": asset, + "correct_name": context_asset + } + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index e2843d146e..17dc79dc56 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,7 @@ import nuke import pyblish from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError class SelectCenterInNodeGraph(pyblish.api.Action): @@ -47,8 +48,9 @@ class SelectCenterInNodeGraph(pyblish.api.Action): @pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): - """Validate amount of nodes on backdrop node in case user - forgotten to add nodes above the publishing backdrop node""" + """ Validate amount of nodes on backdrop node in case user + forgoten to add nodes above the publishing backdrop node. + """ order = pyblish.api.ValidatorOrder optional = True @@ -63,8 +65,25 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): msg_multiple_outputs = ( "Only one outcoming connection from " "\"{}\" is allowed").format(instance.data["name"]) - assert len(connections_out.keys()) <= 1, msg_multiple_outputs - msg_no_content = "No content on backdrop node: \"{}\"".format( + if len(connections_out.keys()) > 1: + raise PublishXmlValidationError( + self, + msg_multiple_outputs, + "multiple_outputs" + ) + + msg_no_nodes = "No content on backdrop node: \"{}\"".format( instance.data["name"]) - assert len(instance) > 1, msg_no_content + + self.log.debug( + "Amount of nodes on instance: {}".format( + len(instance)) + ) + + if len(instance) == 1: + raise PublishXmlValidationError( + self, + msg_no_nodes, + "no_nodes" + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index d0d930f50c..2321bd1fd4 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,7 @@ -import nuke import pyblish -from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke.api import maintained_selection +import nuke class OpenFailedGroupNode(pyblish.api.Action): @@ -8,7 +9,7 @@ class OpenFailedGroupNode(pyblish.api.Action): Centering failed instance node in node grap """ - label = "Open Gizmo in Node Graph" + label = "Open Group" icon = "wrench" on = "failed" @@ -48,11 +49,23 @@ class ValidateGizmo(pyblish.api.InstancePlugin): with grpn: connections_out = nuke.allNodes('Output') - msg_multiple_outputs = "Only one outcoming connection from " - "\"{}\" is allowed".format(instance.data["name"]) - assert len(connections_out) <= 1, msg_multiple_outputs + msg_multiple_outputs = ( + "Only one outcoming connection from " + "\"{}\" is allowed").format(instance.data["name"]) + + if len(connections_out) > 1: + raise PublishXmlValidationError( + self, msg_multiple_outputs, "multiple_outputs", + {"node_name": grpn["name"].value()} + ) connections_in = nuke.allNodes('Input') - msg_missing_inputs = "At least one Input node has to be used in: " - "\"{}\"".format(instance.data["name"]) - assert len(connections_in) >= 1, msg_missing_inputs + msg_missing_inputs = ( + "At least one Input node has to be inside Group: " + "\"{}\"").format(instance.data["name"]) + + if len(connections_in) == 0: + raise PublishXmlValidationError( + self, msg_missing_inputs, "no_inputs", + {"node_name": grpn["name"].value()} + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d290ff4541..e2b11892e5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,7 +1,8 @@ import nuke - +import six import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError class ValidateKnobs(pyblish.api.ContextPlugin): @@ -27,11 +28,21 @@ class ValidateKnobs(pyblish.api.ContextPlugin): optional = True def process(self, context): - invalid = self.get_invalid(context, compute=True) if invalid: - raise RuntimeError( - "Found knobs with invalid values:\n{}".format(invalid) + invalid_items = [ + ( + "Node __{node_name}__ with knob _{label}_ " + "expecting _{expected}_, " + "but is set to _{current}_" + ).format(**i) + for i in invalid + ] + raise PublishXmlValidationError( + self, + "Found knobs with invalid values:\n{}".format(invalid), + formatting_data={ + "invalid_items": "\n".join(invalid_items)} ) @classmethod @@ -54,15 +65,24 @@ class ValidateKnobs(pyblish.api.ContextPlugin): # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - families = list(set(families) & set(cls.knobs.keys())) + if not families: continue # Get all knobs to validate. knobs = {} for family in families: + # check if dot in family + if "." in family: + family = family.split(".")[0] + + # avoid families not in settings + if family not in cls.knobs: + continue + + # get presets of knobs for preset in cls.knobs[family]: - knobs.update({preset: cls.knobs[family][preset]}) + knobs[preset] = cls.knobs[family][preset] # Get invalid knobs. nodes = [] @@ -71,8 +91,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): nodes.append(node) if node.Class() == "Group": node.begin() - for i in nuke.allNodes(): - nodes.append(i) + nodes.extend(iter(nuke.allNodes())) node.end() for node in nodes: @@ -84,6 +103,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): if node[knob].value() != expected: invalid_knobs.append( { + "node_name": node.name(), "knob": node[knob], "name": node[knob].name(), "label": node[knob].label(), @@ -99,7 +119,9 @@ class ValidateKnobs(pyblish.api.ContextPlugin): def repair(cls, instance): invalid = cls.get_invalid(instance) for data in invalid: - if isinstance(data["expected"], unicode): + # TODO: will need to improve type definitions + # with the new settings for knob types + if isinstance(data["expected"], six.text_type): data["knob"].setValue(str(data["expected"])) continue diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 27094b8d74..fc07e9b83b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,43 +1,9 @@ -import nuke import pyblish.api - - -class RepairWriteResolutionDifference(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - reformat = instance[0].dependencies()[0] - if reformat.Class() != "Reformat": - reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)]) - - xpos = instance[0].xpos() - ypos = instance[0].ypos() - 26 - - dependent_ypos = instance[0].dependencies()[0].ypos() - if (instance[0].ypos() - dependent_ypos) <= 51: - xpos += 110 - - reformat.setXYpos(xpos, ypos) - - instance[0].setInput(0, reformat) - - reformat["resize"].setValue("none") +import openpype.api +from openpype.hosts.nuke.api import maintained_selection +from openpype.pipeline import PublishXmlValidationError +import nuke class ValidateOutputResolution(pyblish.api.InstancePlugin): @@ -52,27 +18,75 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): families = ["render", "render.local", "render.farm"] label = "Write Resolution" hosts = ["nuke"] - actions = [RepairWriteResolutionDifference] + actions = [openpype.api.RepairAction] + + missing_msg = "Missing Reformat node in render group node" + resolution_msg = "Reformat is set to wrong format" def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishXmlValidationError(self, invalid) - # Skip bounding box check if a reformat node exists. - if instance[0].dependencies()[0].Class() == "Reformat": - return + @classmethod + def get_reformat(cls, instance): + reformat = None + for inode in instance: + if inode.Class() != "Reformat": + continue + reformat = inode - msg = "Bounding box is outside the format." - assert self.check_resolution(instance), msg + return reformat - def check_resolution(self, instance): - node = instance[0] + @classmethod + def get_invalid(cls, instance): + def _check_resolution(instance, reformat): + root_width = instance.data["resolutionWidth"] + root_height = instance.data["resolutionHeight"] - root_width = instance.data["resolutionWidth"] - root_height = instance.data["resolutionHeight"] + write_width = reformat.format().width() + write_height = reformat.format().height() - write_width = node.format().width() - write_height = node.format().height() + if (root_width != write_width) or (root_height != write_height): + return None + else: + return True - if (root_width != write_width) or (root_height != write_height): - return None - else: - return True + # check if reformat is in render node + reformat = cls.get_reformat(instance) + if not reformat: + return cls.missing_msg + + # check if reformat is set to correct root format + correct_format = _check_resolution(instance, reformat) + if not correct_format: + return cls.resolution_msg + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + grp_node = instance[0] + + if cls.missing_msg == invalid: + # make sure we are inside of the group node + with grp_node: + # find input node and select it + _input = None + for inode in instance: + if inode.Class() != "Input": + continue + _input = inode + + # add reformat node under it + with maintained_selection(): + _input['selected'].setValue(True) + _rfn = nuke.createNode("Reformat", "name Reformat01") + _rfn["resize"].setValue(0) + _rfn["black_outside"].setValue(1) + + cls.log.info("I am adding reformat node") + + if cls.resolution_msg == invalid: + reformat = cls.get_reformat(instance) + reformat["format"].setValue(nuke.root()["format"].value()) + cls.log.info("I am fixing reformat to root.format") diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index 9c6ca03ffd..dac240ad19 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -1,5 +1,6 @@ import pyblish import nuke +from openpype.pipeline import PublishXmlValidationError class FixProxyMode(pyblish.api.Action): @@ -7,7 +8,7 @@ class FixProxyMode(pyblish.api.Action): Togger off proxy switch OFF """ - label = "Proxy toggle to OFF" + label = "Repair" icon = "wrench" on = "failed" @@ -30,4 +31,7 @@ class ValidateProxyMode(pyblish.api.ContextPlugin): rootNode = nuke.root() isProxy = rootNode["proxy"].value() - assert not isProxy, "Proxy mode should be toggled OFF" + if isProxy: + raise PublishXmlValidationError( + self, "Proxy mode should be toggled OFF" + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index af5e8e9d27..237ff423e5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.api import ValidationException import clique +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -36,7 +36,7 @@ class RepairActionBase(pyblish.api.Action): class RepairCollectionActionToLocal(RepairActionBase): - label = "Repair > rerender with `Local` machine" + label = "Repair - rerender with \"Local\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -44,7 +44,7 @@ class RepairCollectionActionToLocal(RepairActionBase): class RepairCollectionActionToFarm(RepairActionBase): - label = "Repair > rerender `On farm` with remote machines" + label = "Repair - rerender with \"On farm\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -63,6 +63,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): def process(self, instance): + f_data = { + "node_name": instance[0]["name"].value() + } + for repre in instance.data["representations"]: if not repre.get("files"): @@ -71,7 +75,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): "Check properties of write node (group) and" "select 'Local' option in 'Publish' dropdown.") self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if isinstance(repre["files"], str): return @@ -82,30 +87,33 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): collection = collections[0] - fstartH = instance.data["frameStartHandle"] - fendH = instance.data["frameEndHandle"] + f_start_h = instance.data["frameStartHandle"] + f_end_h = instance.data["frameEndHandle"] - frame_length = int(fendH - fstartH + 1) + frame_length = int(f_end_h - f_start_h + 1) if frame_length != 1: if len(collections) != 1: msg = "There are multiple collections in the folder" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if not collection.is_contiguous(): msg = "Some frames appear to be missing" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) - collected_frames_len = int(len(collection.indexes)) + collected_frames_len = len(collection.indexes) coll_start = min(collection.indexes) coll_end = max(collection.indexes) self.log.info("frame_length: {}".format(frame_length)) self.log.info("collected_frames_len: {}".format( collected_frames_len)) - self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH)) + self.log.info("f_start_h-f_end_h: {}-{}".format( + f_start_h, f_end_h)) self.log.info( "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) @@ -116,13 +124,19 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if ("slate" in instance.data["families"]) \ and (frame_length != collected_frames_len): collected_frames_len -= 1 - fstartH += 1 + f_start_h += 1 - assert ((collected_frames_len >= frame_length) - and (coll_start <= fstartH) - and (coll_end >= fendH)), ( - "{} missing frames. Use repair to render all frames" - ).format(__name__) + if ( + collected_frames_len != frame_length + and coll_start <= f_start_h + and coll_end >= f_end_h + ): + raise PublishXmlValidationError( + self, ( + "{} missing frames. Use repair to " + "render all frames" + ).format(__name__), formatting_data=f_data + ) instance.data["collection"] = collection diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py deleted file mode 100644 index 9bda0da85e..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ /dev/null @@ -1,156 +0,0 @@ -import pyblish.api - -from openpype.client import get_project, get_asset_by_id -from openpype import lib -from openpype.pipeline import legacy_io - - -@pyblish.api.log -class ValidateScript(pyblish.api.InstancePlugin): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["workfile"] - label = "Check script settings" - hosts = ["nuke"] - optional = True - - def process(self, instance): - ctx_data = instance.context.data - asset_name = ctx_data["asset"] - asset = lib.get_asset(asset_name) - asset_data = asset["data"] - project_name = legacy_io.active_project() - - # These attributes will be checked - attributes = [ - "fps", - "frameStart", - "frameEnd", - "resolutionWidth", - "resolutionHeight", - "handleStart", - "handleEnd" - ] - - # Value of these attributes can be found on parents - hierarchical_attributes = [ - "fps", - "resolutionWidth", - "resolutionHeight", - "pixelAspect", - "handleStart", - "handleEnd" - ] - - missing_attributes = [] - asset_attributes = {} - for attr in attributes: - if attr in asset_data: - asset_attributes[attr] = asset_data[attr] - - elif attr in hierarchical_attributes: - # TODO this should be probably removed - # Hierarchical attributes is not a thing since Pype 2? - - # Try to find attribute on parent - parent_id = asset['parent'] - parent_type = "project" - if asset_data['visualParent'] is not None: - parent_type = "asset" - parent_id = asset_data['visualParent'] - - value = self.check_parent_hierarchical( - project_name, parent_type, parent_id, attr - ) - if value is None: - missing_attributes.append(attr) - else: - asset_attributes[attr] = value - else: - missing_attributes.append(attr) - - # Raise error if attributes weren't found on asset in database - if len(missing_attributes) > 0: - atr = ", ".join(missing_attributes) - msg = 'Missing attributes "{}" in asset "{}"' - message = msg.format(atr, asset_name) - raise ValueError(message) - - # Get handles from database, Default is 0 (if not found) - handle_start = 0 - handle_end = 0 - if "handleStart" in asset_attributes: - handle_start = asset_attributes["handleStart"] - if "handleEnd" in asset_attributes: - handle_end = asset_attributes["handleEnd"] - - asset_attributes["fps"] = float("{0:.4f}".format( - asset_attributes["fps"])) - - # Get values from nukescript - script_attributes = { - "handleStart": ctx_data["handleStart"], - "handleEnd": ctx_data["handleEnd"], - "fps": float("{0:.4f}".format(ctx_data["fps"])), - "frameStart": ctx_data["frameStart"], - "frameEnd": ctx_data["frameEnd"], - "resolutionWidth": ctx_data["resolutionWidth"], - "resolutionHeight": ctx_data["resolutionHeight"], - "pixelAspect": ctx_data["pixelAspect"] - } - - # Compare asset's values Nukescript X Database - not_matching = [] - for attr in attributes: - self.log.debug("asset vs script attribute \"{}\": {}, {}".format( - attr, asset_attributes[attr], script_attributes[attr]) - ) - if asset_attributes[attr] != script_attributes[attr]: - not_matching.append(attr) - - # Raise error if not matching - if len(not_matching) > 0: - msg = "Attributes '{}' are not set correctly" - # Alert user that handles are set if Frame start/end not match - if ( - (("frameStart" in not_matching) or ("frameEnd" in not_matching)) and - ((handle_start > 0) or (handle_end > 0)) - ): - msg += " (`handle_start` are set to {})".format(handle_start) - msg += " (`handle_end` are set to {})".format(handle_end) - message = msg.format(", ".join(not_matching)) - raise ValueError(message) - - def check_parent_hierarchical( - self, project_name, parent_type, parent_id, attr - ): - if parent_id is None: - return None - - doc = None - if parent_type == "project": - doc = get_project(project_name) - elif parent_type == "asset": - doc = get_asset_by_id(project_name, parent_id) - - if not doc: - return None - - doc_data = doc["data"] - if attr in doc_data: - self.log.info(attr) - return doc_data[attr] - - if parent_type == "project": - return None - - parent_id = doc_data.get("visualParent") - new_parent_type = "asset" - if parent_id is None: - parent_id = doc["parent"] - new_parent_type = "project" - - return self.check_parent_hierarchical( - project_name, new_parent_type, parent_id, attr - ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py new file mode 100644 index 0000000000..106d7a2524 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -0,0 +1,127 @@ +from pprint import pformat +import pyblish.api + +import openpype.api +from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke.api.lib import ( + get_avalon_knob_data, + WorkfileSettings +) +import nuke + + +@pyblish.api.log +class ValidateScriptAttributes(pyblish.api.InstancePlugin): + """ Validates file output. """ + + order = pyblish.api.ValidatorOrder + 0.1 + families = ["workfile"] + label = "Validatte script attributes" + hosts = ["nuke"] + optional = True + actions = [openpype.api.RepairAction] + + def process(self, instance): + root = nuke.root() + knob_data = get_avalon_knob_data(root) + asset = instance.data["assetEntity"] + # get asset data frame values + frame_start = asset["data"]["frameStart"] + frame_end = asset["data"]["frameEnd"] + handle_start = asset["data"]["handleStart"] + handle_end = asset["data"]["handleEnd"] + + # These attributes will be checked + attributes = [ + "fps", + "frameStart", + "frameEnd", + "resolutionWidth", + "resolutionHeight", + "handleStart", + "handleEnd" + ] + + # get only defined attributes from asset data + asset_attributes = { + attr: asset["data"][attr] + for attr in attributes + if attr in asset["data"] + } + # fix float to max 4 digints (only for evaluating) + fps_data = float("{0:.4f}".format( + asset_attributes["fps"])) + # fix frame values to include handles + asset_attributes.update({ + "frameStart": frame_start - handle_start, + "frameEnd": frame_end + handle_end, + "fps": fps_data + }) + + self.log.debug(pformat( + asset_attributes + )) + + # Get format + _format = root["format"].value() + + # Get values from nukescript + script_attributes = { + "handleStart": int(knob_data["handleStart"]), + "handleEnd": int(knob_data["handleEnd"]), + "fps": float("{0:.4f}".format(root['fps'].value())), + "frameStart": int(root["first_frame"].getValue()), + "frameEnd": int(root["last_frame"].getValue()), + "resolutionWidth": _format.width(), + "resolutionHeight": _format.height(), + "pixelAspect": _format.pixelAspect() + } + self.log.debug(pformat( + script_attributes + )) + + # Compare asset's values Nukescript X Database + not_matching = [] + for attr in attributes: + self.log.debug( + "Asset vs Script attribute \"{}\": {}, {}".format( + attr, + asset_attributes[attr], + script_attributes[attr] + ) + ) + if asset_attributes[attr] != script_attributes[attr]: + not_matching.append({ + "name": attr, + "expected": asset_attributes[attr], + "actual": script_attributes[attr] + }) + + # Raise error if not matching + if not_matching: + msg = "Following attributes are not set correctly: \n{}" + attrs_wrong_str = "\n".join([ + ( + "`{0}` is set to `{1}`, " + "but should be set to `{2}`" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) + attrs_wrong_html = "
".join([ + ( + "-- __{0}__ is set to __{1}__, " + "but should be set to __{2}__" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) + raise PublishXmlValidationError( + self, msg.format(attrs_wrong_str), + formatting_data={ + "failed_attributes": attrs_wrong_html + } + ) + + @classmethod + def repair(cls, instance): + cls.log.debug("__ repairing instance: {}".format(instance)) + WorkfileSettings().set_context_settings() diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index c0d5c8f402..362ff31174 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,10 +1,10 @@ -import os import pyblish.api -import openpype.utils +from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, - get_node_path + set_node_knobs_from_settings ) +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -14,18 +14,29 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): - instances = openpype.utils.filter_instances(context, plugin) + instances = get_errored_instances_from_context(context) for instance in instances: - node = instance[1] - correct_data = get_write_node_template_attr(node) - for k, v in correct_data.items(): - node[k].setValue(v) + write_group_node = instance[0] + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + correct_data = get_write_node_template_attr(write_group_node) + + set_node_knobs_from_settings(write_node, correct_data["knobs"]) + self.log.info("Node attributes were fixed") class ValidateNukeWriteNode(pyblish.api.InstancePlugin): - """ Validates file output. """ + """ Validate Write node's knobs. + + Compare knobs on write node inside the render group + with settings. At the moment supporting only `file` knob. + """ order = pyblish.api.ValidatorOrder optional = True @@ -35,38 +46,69 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): hosts = ["nuke"] def process(self, instance): + write_group_node = instance[0] - node = instance[1] - correct_data = get_write_node_template_attr(node) + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + if write_node is None: + return + + correct_data = get_write_node_template_attr(write_group_node) + + if correct_data: + check_knobs = correct_data["knobs"] + else: + return check = [] - for k, v in correct_data.items(): - if k is 'file': - padding = len(v.split('#')) - ref_path = get_node_path(v, padding) - n_path = get_node_path(node[k].value(), padding) - isnt = False - for i, p in enumerate(ref_path): - if str(n_path[i]) not in str(p): - if not isnt: - isnt = True - else: - continue - if isnt: - check.append([k, v, node[k].value()]) + self.log.debug("__ write_node: {}".format( + write_node + )) + + for knob_data in check_knobs: + key = knob_data["name"] + value = knob_data["value"] + node_value = write_node[key].value() + + # fix type differences + if type(node_value) in (int, float): + value = float(value) + node_value = float(node_value) else: - if str(node[k].value()) not in str(v): - check.append([k, v, node[k].value()]) + value = str(value) + node_value = str(node_value) + + self.log.debug("__ key: {} | value: {}".format( + key, value + )) + if ( + node_value != value + and key != "file" + and key != "tile_color" + ): + check.append([key, value, write_node[key].value()]) self.log.info(check) - msg = "Node's attribute `{0}` is not correct!\n" \ - "\nCorrect: `{1}` \n\nWrong: `{2}` \n\n" - if check: - print_msg = "" - for item in check: - print_msg += msg.format(item[0], item[1], item[2]) - print_msg += "`RMB` click to the validator and `A` to fix!" + self._make_error(check) - assert not check, print_msg + def _make_error(self, check): + # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block + dbg_msg = "Write node's knobs values are not correct!\n" + msg_add = "Knob '{0}' > Correct: `{1}` > Wrong: `{2}`" + + details = [ + msg_add.format(item[0], item[1], item[2]) + for item in check + ] + xml_msg = "
".join(details) + dbg_msg += "\n\t".join(details) + + raise PublishXmlValidationError( + self, dbg_msg, formatting_data={"xml_msg": xml_msg} + ) diff --git a/openpype/hosts/nuke/startup/clear_rendered.py b/openpype/hosts/nuke/startup/clear_rendered.py index cf1d8ce170..744af71034 100644 --- a/openpype/hosts/nuke/startup/clear_rendered.py +++ b/openpype/hosts/nuke/startup/clear_rendered.py @@ -1,10 +1,11 @@ import os -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger def clear_rendered(dir_path): + log = Logger.get_logger(__name__) + for _f in os.listdir(dir_path): _f_path = os.path.join(dir_path, _f) log.info("Removing: `{}`".format(_f_path)) diff --git a/openpype/hosts/nuke/startup/write_to_read.py b/openpype/hosts/nuke/startup/write_to_read.py index f5cf66b357..b7add40f47 100644 --- a/openpype/hosts/nuke/startup/write_to_read.py +++ b/openpype/hosts/nuke/startup/write_to_read.py @@ -2,8 +2,8 @@ import re import os import glob import nuke -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger +log = Logger.get_logger(__name__) SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v', 'm2v'] diff --git a/openpype/hosts/nuke/vendor/google/protobuf/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/nuke/vendor/google/protobuf/json_format.py b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message.py b/openpype/hosts/nuke/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service.py b/openpype/hosts/nuke/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_format.py b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/photoshop/__init__.py b/openpype/hosts/photoshop/__init__.py index a91e0a65ff..773f73d624 100644 --- a/openpype/hosts/photoshop/__init__.py +++ b/openpype/hosts/photoshop/__init__.py @@ -1,9 +1,10 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8099/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .addon import ( + PhotoshopAddon, + PHOTOSHOP_HOST_DIR, +) + + +__all__ = ( + "PhotoshopAddon", + "PHOTOSHOP_HOST_DIR", +) diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py new file mode 100644 index 0000000000..a41d91554b --- /dev/null +++ b/openpype/hosts/photoshop/addon.py @@ -0,0 +1,26 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class PhotoshopAddon(OpenPypeModule, IHostAddon): + name = "photoshop" + host_name = "photoshop" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8099/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".psd", ".psb"] diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 2f57d64464..73a546604f 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -5,11 +5,10 @@ import traceback from Qt import QtWidgets -from openpype.api import Logger +from openpype.lib import env_value_to_bool, Logger +from openpype.modules import ModulesManager from openpype.pipeline import install_host from openpype.tools.utils import host_tools -from openpype.lib.remote_publish import headless_publish -from openpype.lib import env_value_to_bool from .launch_logic import ProcessLauncher, stub @@ -35,8 +34,10 @@ def main(*subprocess_args): launcher.start() if env_value_to_bool("HEADLESS_PUBLISH"): + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] launcher.execute_in_main_thread( - headless_publish, + webpublisher_addon.headless_publish, log, "ClosePS", os.environ.get("IS_TEST") diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 20a6e3169f..f660096630 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -1,6 +1,5 @@ import os from Qt import QtWidgets -from bson.objectid import ObjectId import pyblish.api @@ -13,16 +12,15 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - registered_host, ) -import openpype.hosts.photoshop +from openpype.pipeline.load import any_outdated_containers +from openpype.hosts.photoshop import PHOTOSHOP_HOST_DIR from . import lib log = Logger.get_logger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.photoshop.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(PHOTOSHOP_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -30,7 +28,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") def check_inventory(): - if not lib.any_outdated(): + if not any_outdated_containers(): return # Warn about outdated containers. diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py index 951c5dbfff..35b44d6070 100644 --- a/openpype/hosts/photoshop/api/workio.py +++ b/openpype/hosts/photoshop/api/workio.py @@ -1,7 +1,6 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from . import lib @@ -14,7 +13,7 @@ def _active_document(): def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["photoshop"] + return [".psd", ".psb"] def has_unsaved_changes(): diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index f15068b031..2cfbfa8778 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,3 +1,5 @@ +import re + from openpype.hosts.photoshop import api from openpype.lib import BoolDef from openpype.pipeline import ( @@ -5,6 +7,8 @@ from openpype.pipeline import ( CreatedInstance, legacy_io ) +from openpype.lib import prepare_template_data +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class ImageCreator(Creator): @@ -38,17 +42,24 @@ class ImageCreator(Creator): top_level_selected_items = stub.get_selected_layers() if pre_create_data.get("use_selection"): only_single_item_selected = len(top_level_selected_items) == 1 - for selected_item in top_level_selected_items: - if ( - only_single_item_selected or - pre_create_data.get("create_multiple")): + if ( + only_single_item_selected or + pre_create_data.get("create_multiple")): + for selected_item in top_level_selected_items: if selected_item.group: groups_to_create.append(selected_item) else: top_layers_to_wrap.append(selected_item) - else: - group = stub.group_selected_layers(subset_name_from_ui) - groups_to_create.append(group) + else: + group = stub.group_selected_layers(subset_name_from_ui) + groups_to_create.append(group) + else: + stub.select_layers(stub.get_layers()) + try: + group = stub.group_selected_layers(subset_name_from_ui) + except: + raise ValueError("Cannot group locked Bakcground layer!") + groups_to_create.append(group) if not groups_to_create and not top_layers_to_wrap: group = stub.create_group(subset_name_from_ui) @@ -60,6 +71,7 @@ class ImageCreator(Creator): group = stub.group_selected_layers(layer.name) groups_to_create.append(group) + layer_name = '' creating_multiple_groups = len(groups_to_create) > 1 for group in groups_to_create: subset_name = subset_name_from_ui # reset to name from creator UI @@ -67,8 +79,16 @@ class ImageCreator(Creator): created_group_name = self._clean_highlights(stub, group.name) if creating_multiple_groups: - # concatenate with layer name to differentiate subsets - subset_name += group.name.title().replace(" ", "") + layer_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + group.name + ) + if "{layer}" not in subset_name.lower(): + subset_name += "{Layer}" + + layer_fill = prepare_template_data({"layer": layer_name}) + subset_name = subset_name.format(**layer_fill) if group.long_name: for directory in group.long_name[::-1]: @@ -143,3 +163,6 @@ class ImageCreator(Creator): def _clean_highlights(self, stub, item): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') + @classmethod + def get_dynamic_data(cls, *args, **kwargs): + return {"layer": "{layer}"} diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 9736471a26..2792a775e0 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,7 +1,12 @@ +import re + from Qt import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop +from openpype.lib import prepare_template_data +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS + class CreateImage(create.LegacyCreator): """Image folder for publish.""" @@ -75,6 +80,7 @@ class CreateImage(create.LegacyCreator): groups.append(group) creator_subset_name = self.data["subset"] + layer_name = '' for group in groups: long_names = [] group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ @@ -82,7 +88,16 @@ class CreateImage(create.LegacyCreator): subset_name = creator_subset_name if len(groups) > 1: - subset_name += group.name.title().replace(" ", "") + layer_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + group.name + ) + if "{layer}" not in subset_name.lower(): + subset_name += "{Layer}" + + layer_fill = prepare_template_data({"layer": layer_name}) + subset_name = subset_name.format(**layer_fill) if group.long_name: for directory in group.long_name[::-1]: @@ -98,3 +113,7 @@ class CreateImage(create.LegacyCreator): # reusing existing group, need to rename afterwards if not create_group: stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + + @classmethod + def get_dynamic_data(cls, *args, **kwargs): + return {"layer": "{layer}"} diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index 43302329f1..e79d16d154 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -11,6 +11,8 @@ class PSWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" + default_variant = "Main" + def get_instance_attr_defs(self): return [] @@ -35,7 +37,6 @@ class PSWorkfileCreator(AutoCreator): existing_instance = instance break - variant = '' project_name = legacy_io.Session["AVALON_PROJECT"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] @@ -43,15 +44,17 @@ class PSWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) data = { "asset": asset_name, "task": task_name, - "variant": variant + "variant": self.default_variant } data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name )) new_instance = CreatedInstance( @@ -67,7 +70,9 @@ class PSWorkfileCreator(AutoCreator): ): asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name + existing_instance["subset"] = subset_name diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 2881ef0ea6..5d50a78914 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -17,11 +17,11 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + get_batch_asset_task_info, + parse_json +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 71bd2cd854..c157c932fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -9,14 +9,22 @@ from openpype.settings import get_project_settings class CollectColorCodedInstances(pyblish.api.ContextPlugin): - """Creates instances for configured color code of a layer. + """Creates instances for layers marked by configurable color. Used in remote publishing when artists marks publishable layers by color- - coding. + coding. Top level layers (group) must be marked by specific color to be + published as an instance of 'image' family. Can add group for all publishable layers to allow creation of flattened image. (Cannot contain special background layer as it cannot be grouped!) + Based on value `create_flatten_image` from Settings: + - "yes": create flattened 'image' subset of all publishable layers + create + 'image' subset per publishable layer + - "only": create ONLY flattened 'image' subset of all publishable layers + - "no": do not create flattened 'image' subset at all, + only separate subsets per marked layer. + Identifier: id (str): "pyblish.avalon.instance" """ @@ -32,8 +40,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): # TODO check if could be set globally, probably doesn't make sense when # flattened template cannot subset_template_name = "" - create_flatten_image = False - # probably not possible to configure this globally + create_flatten_image = "no" flatten_subset_template = "" def process(self, context): @@ -62,6 +69,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): publishable_layers = [] created_instances = [] + family_from_settings = None for layer in layers: self.log.debug("Layer:: {}".format(layer)) if layer.parents: @@ -80,6 +88,9 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): self.log.debug("!!! Not found family or template, skip") continue + if not family_from_settings: + family_from_settings = resolved_family + fill_pairs = { "variant": variant, "family": resolved_family, @@ -98,13 +109,16 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): "Subset {} already created, skipping.".format(subset)) continue - instance = self._create_instance(context, layer, resolved_family, - asset_name, subset, task_name) + if self.create_flatten_image != "flatten_only": + instance = self._create_instance(context, layer, + resolved_family, + asset_name, subset, task_name) + created_instances.append(instance) + existing_subset_names.append(subset) publishable_layers.append(layer) - created_instances.append(instance) - if self.create_flatten_image and publishable_layers: + if self.create_flatten_image != "no" and publishable_layers: self.log.debug("create_flatten_image") if not self.flatten_subset_template: self.log.warning("No template for flatten image") @@ -116,7 +130,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): first_layer = publishable_layers[0] # dummy layer first_layer.name = subset - family = created_instances[0].data["family"] # inherit family + family = family_from_settings # inherit family instance = self._create_instance(context, first_layer, family, asset_name, subset, task_name) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 2ea5503f3f..7f395b46d7 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectReview(pyblish.api.ContextPlugin): @@ -27,13 +27,14 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, context.data.get("variant", ''), context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance = context.create_instance(subset) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index e4f0a07b34..9a5aad5569 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect Workfile" hosts = ["photoshop"] + default_variant = "Main" + def process(self, context): existing_instance = None for instance in context: @@ -20,13 +22,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" - subset = get_subset_name_with_asset_doc( + # context.data["variant"] might come only from collect_batch_data + variant = context.data.get("variant") or self.default_variant + subset = get_subset_name( family, - "", + variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) file_path = context.data["currentFile"] diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index d076610ead..5d37c86ed8 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -1,5 +1,6 @@ import os import shutil +from PIL import Image import openpype.api import openpype.lib @@ -8,10 +9,17 @@ from openpype.hosts.photoshop import api as photoshop class ExtractReview(openpype.api.Extractor): """ - Produce a flattened or sequence image file from all 'image' instances. + Produce a flattened or sequence image files from all 'image' instances. If no 'image' instance is created, it produces flattened image from all visible layers. + + It creates review, thumbnail and mov representations. + + 'review' family could be used in other steps as a reference, as it + contains flattened image by default. (Eg. artist could load this + review as a single item and see full image. In most cases 'image' + family is separated by layers to better usage in animation or comp.) """ label = "Extract Review" @@ -22,6 +30,7 @@ class ExtractReview(openpype.api.Extractor): jpg_options = None mov_options = None make_image_sequence = None + max_downscale_size = 8192 def process(self, instance): staging_dir = self.staging_dir(instance) @@ -49,7 +58,7 @@ class ExtractReview(openpype.api.Extractor): "stagingDir": staging_dir, "tags": self.jpg_options['tags'], }) - + processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") img_list = self._saves_flattened_layers(staging_dir, layers) @@ -57,26 +66,33 @@ class ExtractReview(openpype.api.Extractor): instance.data["representations"].append({ "name": "jpg", "ext": "jpg", - "files": img_list, + "files": img_list, # cannot be [] for single frame "stagingDir": staging_dir, "tags": self.jpg_options['tags'] }) + processed_img_names = [img_list] ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") instance.data["stagingDir"] = staging_dir - # Generate thumbnail. + source_files_pattern = os.path.join(staging_dir, + self.output_seq_filename) + source_files_pattern = self._check_and_resize(processed_img_names, + source_files_pattern, + staging_dir) + # Generate thumbnail thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") self.log.info(f"Generate thumbnail {thumbnail_path}") args = [ ffmpeg_path, "-y", - "-i", os.path.join(staging_dir, self.output_seq_filename), + "-i", source_files_pattern, "-vf", "scale=300:-1", "-vframes", "1", thumbnail_path ] + self.log.debug("thumbnail args:: {}".format(args)) output = openpype.lib.run_subprocess(args) instance.data["representations"].append({ @@ -94,11 +110,12 @@ class ExtractReview(openpype.api.Extractor): args = [ ffmpeg_path, "-y", - "-i", os.path.join(staging_dir, self.output_seq_filename), + "-i", source_files_pattern, "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", "-vframes", str(img_number), mov_path ] + self.log.debug("mov args:: {}".format(args)) output = openpype.lib.run_subprocess(args) self.log.debug(output) instance.data["representations"].append({ @@ -120,6 +137,34 @@ class ExtractReview(openpype.api.Extractor): self.log.info(f"Extracted {instance} to {staging_dir}") + def _check_and_resize(self, processed_img_names, source_files_pattern, + staging_dir): + """Check if saved image could be used in ffmpeg. + + Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be + used as a source for thumbnail or review mov. + """ + Image.MAX_IMAGE_PIXELS = None + first_url = os.path.join(staging_dir, processed_img_names[0]) + with Image.open(first_url) as im: + width, height = im.size + + if width > self.max_downscale_size or height > self.max_downscale_size: + resized_dir = os.path.join(staging_dir, "resized") + os.mkdir(resized_dir) + source_files_pattern = os.path.join(resized_dir, + self.output_seq_filename) + for file_name in processed_img_names: + source_url = os.path.join(staging_dir, file_name) + with Image.open(source_url) as res_img: + # 'thumbnail' automatically keeps aspect ratio + res_img.thumbnail((self.max_downscale_size, + self.max_downscale_size), + Image.ANTIALIAS) + res_img.save(os.path.join(resized_dir, file_name)) + + return source_files_pattern + def _get_image_path_from_instances(self, instance): img_list = [] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index b53f4e8198..8106d6ff16 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api from openpype.pipeline import PublishXmlValidationError from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class ValidateNamingRepair(pyblish.api.Action): @@ -50,6 +51,13 @@ class ValidateNamingRepair(pyblish.api.Action): subset_name = re.sub(invalid_chars, replace_char, instance.data["subset"]) + # format from Tool Creator + subset_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + subset_name + ) + layer_meta["subset"] = subset_name stub.imprint(instance_id, layer_meta) diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index 3e49ce3b9b..e69de29bb2 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -1,129 +0,0 @@ -from .api.utils import ( - setup, - get_resolve_module -) - -from .api.pipeline import ( - install, - uninstall, - ls, - containerise, - update_container, - publish, - launch_workfiles_app, - maintained_selection, - remove_instance, - list_instances -) - -from .api.lib import ( - maintain_current_timeline, - publish_clip_color, - get_project_manager, - get_current_project, - get_current_timeline, - create_bin, - get_media_pool_item, - create_media_pool_item, - create_timeline_item, - get_timeline_item, - get_video_track_names, - get_current_timeline_items, - get_pype_timeline_item_by_name, - get_timeline_item_pype_tag, - set_timeline_item_pype_tag, - imprint, - set_publish_attribute, - get_publish_attribute, - create_compound_clip, - swap_clips, - get_pype_clip_metadata, - set_project_manager_to_folder_name, - get_otio_clip_instance_data, - get_reformated_path -) - -from .api.menu import launch_pype_menu - -from .api.plugin import ( - ClipLoader, - TimelineItemLoader, - Creator, - PublishClip -) - -from .api.workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - -from .api.testing_utils import TestGUI - - -__all__ = [ - # pipeline - "install", - "uninstall", - "ls", - "containerise", - "update_container", - "reload_pipeline", - "publish", - "launch_workfiles_app", - "maintained_selection", - "remove_instance", - "list_instances", - - # utils - "setup", - "get_resolve_module", - - # lib - "maintain_current_timeline", - "publish_clip_color", - "get_project_manager", - "get_current_project", - "get_current_timeline", - "create_bin", - "get_media_pool_item", - "create_media_pool_item", - "create_timeline_item", - "get_timeline_item", - "get_video_track_names", - "get_current_timeline_items", - "get_pype_timeline_item_by_name", - "get_timeline_item_pype_tag", - "set_timeline_item_pype_tag", - "imprint", - "set_publish_attribute", - "get_publish_attribute", - "create_compound_clip", - "swap_clips", - "get_pype_clip_metadata", - "set_project_manager_to_folder_name", - "get_otio_clip_instance_data", - "get_reformated_path", - - # menu - "launch_pype_menu", - - # plugin - "ClipLoader", - "TimelineItemLoader", - "Creator", - "PublishClip", - - # workio - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - - "TestGUI" -] diff --git a/openpype/hosts/resolve/api/__init__.py b/openpype/hosts/resolve/api/__init__.py index 48bd938e57..cf1edb4c35 100644 --- a/openpype/hosts/resolve/api/__init__.py +++ b/openpype/hosts/resolve/api/__init__.py @@ -1,11 +1,137 @@ """ resolve api """ -import os bmdvr = None bmdvf = None -API_DIR = os.path.dirname(os.path.abspath(__file__)) -HOST_DIR = os.path.dirname(API_DIR) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +from .utils import ( + get_resolve_module +) + +from .pipeline import ( + install, + uninstall, + ls, + containerise, + update_container, + publish, + launch_workfiles_app, + maintained_selection, + remove_instance, + list_instances +) + +from .lib import ( + maintain_current_timeline, + publish_clip_color, + get_project_manager, + get_current_project, + get_current_timeline, + create_bin, + get_media_pool_item, + create_media_pool_item, + create_timeline_item, + get_timeline_item, + get_video_track_names, + get_current_timeline_items, + get_pype_timeline_item_by_name, + get_timeline_item_pype_tag, + set_timeline_item_pype_tag, + imprint, + set_publish_attribute, + get_publish_attribute, + create_compound_clip, + swap_clips, + get_pype_clip_metadata, + set_project_manager_to_folder_name, + get_otio_clip_instance_data, + get_reformated_path +) + +from .menu import launch_pype_menu + +from .plugin import ( + ClipLoader, + TimelineItemLoader, + Creator, + PublishClip +) + +from .workio import ( + open_file, + save_file, + current_file, + has_unsaved_changes, + file_extensions, + work_root +) + +from .testing_utils import TestGUI + + +__all__ = [ + "bmdvr", + "bmdvf", + + # pipeline + "install", + "uninstall", + "ls", + "containerise", + "update_container", + "reload_pipeline", + "publish", + "launch_workfiles_app", + "maintained_selection", + "remove_instance", + "list_instances", + + # utils + "get_resolve_module", + + # lib + "maintain_current_timeline", + "publish_clip_color", + "get_project_manager", + "get_current_project", + "get_current_timeline", + "create_bin", + "get_media_pool_item", + "create_media_pool_item", + "create_timeline_item", + "get_timeline_item", + "get_video_track_names", + "get_current_timeline_items", + "get_pype_timeline_item_by_name", + "get_timeline_item_pype_tag", + "set_timeline_item_pype_tag", + "imprint", + "set_publish_attribute", + "get_publish_attribute", + "create_compound_clip", + "swap_clips", + "get_pype_clip_metadata", + "set_project_manager_to_folder_name", + "get_otio_clip_instance_data", + "get_reformated_path", + + # menu + "launch_pype_menu", + + # plugin + "ClipLoader", + "TimelineItemLoader", + "Creator", + "PublishClip", + + # workio + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", + + "TestGUI" +] diff --git a/openpype/hosts/resolve/api/action.py b/openpype/hosts/resolve/api/action.py index f8f338a850..d55a24a39a 100644 --- a/openpype/hosts/resolve/api/action.py +++ b/openpype/hosts/resolve/api/action.py @@ -4,7 +4,7 @@ from __future__ import absolute_import import pyblish.api -from ...action import get_errored_instances_from_context +from openpype.action import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/resolve/api/lib.py b/openpype/hosts/resolve/api/lib.py index 93ccdaf812..f41eb36caf 100644 --- a/openpype/hosts/resolve/api/lib.py +++ b/openpype/hosts/resolve/api/lib.py @@ -4,13 +4,13 @@ import re import os import contextlib from opentimelineio import opentime + +from openpype.lib import Logger from openpype.pipeline.editorial import is_overlapping_otio_ranges from ..otio import davinci_export as otio_export -from openpype.api import Logger - -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self.project_manager = None diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 9e0dd12376..2c7678ee5b 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -3,13 +3,13 @@ import sys from Qt import QtWidgets, QtCore +from openpype.tools.utils import host_tools + from .pipeline import ( publish, launch_workfiles_app ) -from openpype.tools.utils import host_tools - def load_stylesheet(): path = os.path.join(os.path.dirname(__file__), "menu_style.qss") diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 4a7d1c5bea..1c8d9dc01c 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -7,7 +7,7 @@ from collections import OrderedDict from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( schema, register_loader_plugin_path, @@ -16,11 +16,15 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) -from . import lib -from . import PLUGINS_DIR from openpype.tools.utils import host_tools -log = Logger().get_logger(__name__) +from . import lib +from .utils import get_resolve_module + +log = Logger.get_logger(__name__) + +HOST_DIR = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -39,7 +43,6 @@ def install(): See the Maya equivalent for inspiration on how to implement this. """ - from .. import get_resolve_module log.info("openpype.hosts.resolve installed") diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index 49b478fb3b..b03125d502 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -4,11 +4,11 @@ import uuid import qargparse from Qt import QtWidgets, QtCore -import openpype.api as pype from openpype.pipeline import ( LegacyCreator, LoaderPlugin, ) +from openpype.pipeline.context_tools import get_current_project_asset from openpype.hosts import resolve from . import lib @@ -375,7 +375,7 @@ class ClipLoader: """ asset_name = self.context["representation"]["context"]["asset"] - self.data["assetData"] = pype.get_asset(asset_name)["data"] + self.data["assetData"] = get_current_project_asset(asset_name)["data"] def load(self): # create project bin for the media to be imported into diff --git a/openpype/hosts/resolve/api/preload_console.py b/openpype/hosts/resolve/api/preload_console.py index 1e3a56b4dd..a822ea2460 100644 --- a/openpype/hosts/resolve/api/preload_console.py +++ b/openpype/hosts/resolve/api/preload_console.py @@ -1,9 +1,9 @@ #!/usr/bin/env python import time from openpype.hosts.resolve.utils import get_resolve_module -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) wait_delay = 2.5 wait = 0.00 diff --git a/openpype/hosts/resolve/api/utils.py b/openpype/hosts/resolve/api/utils.py index 9b3762f328..871b3af38d 100644 --- a/openpype/hosts/resolve/api/utils.py +++ b/openpype/hosts/resolve/api/utils.py @@ -4,21 +4,21 @@ Resolve's tools for setting environment """ -import sys import os -import shutil -from . import HOST_DIR -from openpype.api import Logger -log = Logger().get_logger(__name__) +import sys + +from openpype.lib import Logger + +log = Logger.get_logger(__name__) def get_resolve_module(): - from openpype.hosts import resolve + from openpype.hosts.resolve import api # dont run if already loaded - if resolve.api.bmdvr: + if api.bmdvr: log.info(("resolve module is assigned to " - f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}")) - return resolve.api.bmdvr + f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}")) + return api.bmdvr try: """ The PYTHONPATH needs to be set correctly for this import @@ -71,79 +71,9 @@ def get_resolve_module(): # assign global var and return bmdvr = bmd.scriptapp("Resolve") bmdvf = bmd.scriptapp("Fusion") - resolve.api.bmdvr = bmdvr - resolve.api.bmdvf = bmdvf + api.bmdvr = bmdvr + api.bmdvf = bmdvf log.info(("Assigning resolve module to " - f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}")) + f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}")) log.info(("Assigning resolve module to " - f"`pype.hosts.resolve.api.bmdvf`: {resolve.api.bmdvf}")) - - -def _sync_utility_scripts(env=None): - """ Synchronizing basic utlility scripts for resolve. - - To be able to run scripts from inside `Resolve/Workspace/Scripts` menu - all scripts has to be accessible from defined folder. - """ - if not env: - env = os.environ - - # initiate inputs - scripts = {} - us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") - us_paths = [os.path.join( - HOST_DIR, - "utility_scripts" - )] - - # collect script dirs - if us_env: - log.info(f"Utility Scripts Env: `{us_env}`") - us_paths = us_env.split( - os.pathsep) + us_paths - - # collect scripts from dirs - for path in us_paths: - scripts.update({path: os.listdir(path)}) - - log.info(f"Utility Scripts Dir: `{us_paths}`") - log.info(f"Utility Scripts: `{scripts}`") - - # make sure no script file is in folder - if next((s for s in os.listdir(us_dir)), None): - for s in os.listdir(us_dir): - path = os.path.join(us_dir, s) - log.info(f"Removing `{path}`...") - if os.path.isdir(path): - shutil.rmtree(path, onerror=None) - else: - os.remove(path) - - # copy scripts into Resolve's utility scripts dir - for d, sl in scripts.items(): - # directory and scripts list - for s in sl: - # script in script list - src = os.path.join(d, s) - dst = os.path.join(us_dir, s) - log.info(f"Copying `{src}` to `{dst}`...") - if os.path.isdir(src): - shutil.copytree( - src, dst, symlinks=False, - ignore=None, ignore_dangling_symlinks=False - ) - else: - shutil.copy2(src, dst) - - -def setup(env=None): - """ Wrapper installer started from pype.hooks.resolve.ResolvePrelaunch() - """ - if not env: - env = os.environ - - # synchronize resolve utility scripts - _sync_utility_scripts(env) - - log.info("Resolve OpenPype wrapper has been installed") + f"`pype.hosts.resolve.api.bmdvf`: {api.bmdvf}")) diff --git a/openpype/hosts/resolve/api/workio.py b/openpype/hosts/resolve/api/workio.py index f175769387..5a742ecf7e 100644 --- a/openpype/hosts/resolve/api/workio.py +++ b/openpype/hosts/resolve/api/workio.py @@ -2,14 +2,14 @@ import os from openpype.api import Logger -from .. import ( +from .lib import ( get_project_manager, get_current_project, set_project_manager_to_folder_name ) -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) exported_projet_ext = ".drp" @@ -60,7 +60,7 @@ def open_file(filepath): # load project from input path project = pm.LoadProject(fname) log.info(f"Project {project.GetName()} opened...") - return True + except AttributeError: log.warning((f"Project with name `{fname}` does not exist! It will " f"be imported from {filepath} and then loaded...")) @@ -69,9 +69,8 @@ def open_file(filepath): project = pm.LoadProject(fname) log.info(f"Project imported/loaded {project.GetName()}...") return True - else: - return False - + return False + return True def current_file(): pm = get_project_manager() @@ -80,13 +79,9 @@ def current_file(): name = project.GetName() fname = name + exported_projet_ext current_file = os.path.join(current_dir, fname) - normalised = os.path.normpath(current_file) - - # Unsaved current file - if normalised == "": + if not current_file: return None - - return normalised + return os.path.normpath(current_file) def work_root(session): diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 978e3760fd..1d977e2d8e 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os -import importlib + from openpype.lib import PreLaunchHook -from openpype.hosts.resolve.api import utils +from openpype.hosts.resolve.utils import setup class ResolvePrelaunch(PreLaunchHook): @@ -43,18 +43,6 @@ class ResolvePrelaunch(PreLaunchHook): self.launch_context.env.get("PRE_PYTHON_SCRIPT", "")) self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") - try: - __import__("openpype.hosts.resolve") - __import__("pyblish") - except ImportError: - self.log.warning( - "pyblish: Could not load Resolve integration.", - exc_info=True - ) - - else: - # Resolve Setup integration - importlib.reload(utils) - self.log.debug(f"-- utils.__file__: `{utils.__file__}`") - utils.setup(self.launch_context.env) + # Resolve Setup integration + setup(self.launch_context.env) diff --git a/openpype/hosts/resolve/plugins/create/create_shot_clip.py b/openpype/hosts/resolve/plugins/create/create_shot_clip.py index dbf10c5163..4b14f2493f 100644 --- a/openpype/hosts/resolve/plugins/create/create_shot_clip.py +++ b/openpype/hosts/resolve/plugins/create/create_shot_clip.py @@ -1,9 +1,12 @@ # from pprint import pformat -from openpype.hosts import resolve -from openpype.hosts.resolve.api import lib +from openpype.hosts.resolve.api import plugin, lib +from openpype.hosts.resolve.api.lib import ( + get_video_track_names, + create_bin, +) -class CreateShotClip(resolve.Creator): +class CreateShotClip(plugin.Creator): """Publishable clip""" label = "Create Publishable Clip" @@ -11,7 +14,7 @@ class CreateShotClip(resolve.Creator): icon = "film" defaults = ["Main"] - gui_tracks = resolve.get_video_track_names() + gui_tracks = get_video_track_names() gui_name = "OpenPype publish attributes creator" gui_info = "Define sequential rename and fill hierarchy data." gui_inputs = { @@ -250,7 +253,7 @@ class CreateShotClip(resolve.Creator): sq_markers = self.timeline.GetMarkers() # create media bin for compound clips (trackItems) - mp_folder = resolve.create_bin(self.timeline.GetName()) + mp_folder = create_bin(self.timeline.GetName()) kwargs = { "ui_inputs": widget.result, @@ -264,6 +267,6 @@ class CreateShotClip(resolve.Creator): self.rename_index = i self.log.info(track_item_data) # convert track item to timeline media pool item - track_item = resolve.PublishClip( + track_item = plugin.PublishClip( self, track_item_data, **kwargs).convert() track_item.SetClipColor(lib.publish_clip_color) diff --git a/openpype/hosts/resolve/plugins/load/load_clip.py b/openpype/hosts/resolve/plugins/load/load_clip.py index 190a5a7206..a0c78c182f 100644 --- a/openpype/hosts/resolve/plugins/load/load_clip.py +++ b/openpype/hosts/resolve/plugins/load/load_clip.py @@ -1,21 +1,22 @@ from copy import deepcopy -from importlib import reload from openpype.client import ( get_version_by_id, get_last_version_by_subset_id, ) -from openpype.hosts import resolve +# from openpype.hosts import resolve from openpype.pipeline import ( get_representation_path, legacy_io, ) from openpype.hosts.resolve.api import lib, plugin -reload(plugin) -reload(lib) +from openpype.hosts.resolve.api.pipeline import ( + containerise, + update_container, +) -class LoadClip(resolve.TimelineItemLoader): +class LoadClip(plugin.TimelineItemLoader): """Load a subset to timeline as clip Place clip to timeline on its asset origin timings collected @@ -46,7 +47,7 @@ class LoadClip(resolve.TimelineItemLoader): }) # load clip to timeline and get main variables - timeline_item = resolve.ClipLoader( + timeline_item = plugin.ClipLoader( self, context, **options).load() namespace = namespace or timeline_item.GetName() version = context['version'] @@ -80,7 +81,7 @@ class LoadClip(resolve.TimelineItemLoader): self.log.info("Loader done: `{}`".format(name)) - return resolve.containerise( + return containerise( timeline_item, name, namespace, context, self.__class__.__name__, @@ -98,7 +99,7 @@ class LoadClip(resolve.TimelineItemLoader): context.update({"representation": representation}) name = container['name'] namespace = container['namespace'] - timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace) + timeline_item_data = lib.get_pype_timeline_item_by_name(namespace) timeline_item = timeline_item_data["clip"]["item"] project_name = legacy_io.active_project() version = get_version_by_id(project_name, representation["parent"]) @@ -109,7 +110,7 @@ class LoadClip(resolve.TimelineItemLoader): self.fname = get_representation_path(representation) context["version"] = {"data": version_data} - loader = resolve.ClipLoader(self, context) + loader = plugin.ClipLoader(self, context) timeline_item = loader.update(timeline_item) # add additional metadata from the version to imprint Avalon knob @@ -136,7 +137,7 @@ class LoadClip(resolve.TimelineItemLoader): # update color of clip regarding the version order self.set_item_color(timeline_item, version) - return resolve.update_container(timeline_item, data_imprint) + return update_container(timeline_item, data_imprint) @classmethod def set_item_color(cls, timeline_item, version): diff --git a/openpype/hosts/resolve/plugins/publish/extract_workfile.py b/openpype/hosts/resolve/plugins/publish/extract_workfile.py index e3d60465a2..ea8f19cd8c 100644 --- a/openpype/hosts/resolve/plugins/publish/extract_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/extract_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts import resolve +from openpype.hosts.resolve.api.lib import get_project_manager class ExtractWorkfile(openpype.api.Extractor): @@ -29,7 +29,7 @@ class ExtractWorkfile(openpype.api.Extractor): os.path.join(staging_dir, drp_file_name)) # write out the drp workfile - resolve.get_project_manager().ExportProject( + get_project_manager().ExportProject( project.GetName(), drp_file_path) # create drp workfile representation diff --git a/openpype/hosts/resolve/plugins/publish/precollect_instances.py b/openpype/hosts/resolve/plugins/publish/precollect_instances.py index 8f1a13a4e5..8ec169ad65 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_instances.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_instances.py @@ -1,9 +1,15 @@ -import pyblish -from openpype.hosts import resolve - -# # developer reload modules from pprint import pformat +import pyblish + +from openpype.hosts.resolve.api.lib import ( + get_current_timeline_items, + get_timeline_item_pype_tag, + publish_clip_color, + get_publish_attribute, + get_otio_clip_instance_data, +) + class PrecollectInstances(pyblish.api.ContextPlugin): """Collect all Track items selection.""" @@ -14,8 +20,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): def process(self, context): otio_timeline = context.data["otioTimeline"] - selected_timeline_items = resolve.get_current_timeline_items( - filter=True, selecting_color=resolve.publish_clip_color) + selected_timeline_items = get_current_timeline_items( + filter=True, selecting_color=publish_clip_color) self.log.info( "Processing enabled track items: {}".format( @@ -27,7 +33,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): timeline_item = timeline_item_data["clip"]["item"] # get pype tag data - tag_data = resolve.get_timeline_item_pype_tag(timeline_item) + tag_data = get_timeline_item_pype_tag(timeline_item) self.log.debug(f"__ tag_data: {pformat(tag_data)}") if not tag_data: @@ -67,14 +73,15 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "asset": asset, "item": timeline_item, "families": families, - "publish": resolve.get_publish_attribute(timeline_item), + "publish": get_publish_attribute(timeline_item), "fps": context.data["fps"], "handleStart": handle_start, - "handleEnd": handle_end + "handleEnd": handle_end, + "newAssetPublishing": True }) # otio clip data - otio_data = resolve.get_otio_clip_instance_data( + otio_data = get_otio_clip_instance_data( otio_timeline, timeline_item_data) or {} data.update(otio_data) @@ -133,7 +140,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "asset": asset, "family": family, "families": [], - "publish": resolve.get_publish_attribute(timeline_item) + "publish": get_publish_attribute(timeline_item) }) context.create_instance(**data) diff --git a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py index 53e67aee0e..0f94216556 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py @@ -1,11 +1,9 @@ import pyblish.api from pprint import pformat -from importlib import reload -from openpype.hosts import resolve +from openpype.hosts.resolve import api as rapi from openpype.pipeline import legacy_io from openpype.hosts.resolve.otio import davinci_export -reload(davinci_export) class PrecollectWorkfile(pyblish.api.ContextPlugin): @@ -18,9 +16,9 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" - project = resolve.get_current_project() + project = rapi.get_current_project() fps = project.GetSetting("timelineFrameRate") - video_tracks = resolve.get_video_track_names() + video_tracks = rapi.get_video_track_names() # adding otio timeline to context otio_timeline = davinci_export.create_otio_timeline(project) diff --git a/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py b/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py index 3a16b9c966..8f3917bece 100644 --- a/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py +++ b/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py @@ -6,10 +6,11 @@ from openpype.pipeline import install_host def main(env): - import openpype.hosts.resolve as bmdvr + from openpype.hosts.resolve.utils import setup + import openpype.hosts.resolve.api as bmdvr # Registers openpype's Global pyblish plugins install_host(bmdvr) - bmdvr.setup(env) + setup(env) if __name__ == "__main__": diff --git a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py index 89ade9238b..1087a7b7a0 100644 --- a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py +++ b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py @@ -2,13 +2,13 @@ import os import sys from openpype.pipeline import install_host -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def main(env): - import openpype.hosts.resolve as bmdvr + import openpype.hosts.resolve.api as bmdvr # activate resolve from openpype install_host(bmdvr) diff --git a/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py b/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py index 8433bd9172..92f2e43a72 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py +++ b/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py @@ -6,8 +6,8 @@ import opentimelineio as otio from openpype.pipeline import install_host -from openpype.hosts.resolve import TestGUI -import openpype.hosts.resolve as bmdvr +import openpype.hosts.resolve.api as bmdvr +from openpype.hosts.resolve.api.testing_utils import TestGUI from openpype.hosts.resolve.otio import davinci_export as otio_export diff --git a/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py b/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py index 477955d527..91a361ec08 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py +++ b/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py @@ -2,11 +2,16 @@ import os import sys -from openpype.pipeline import install_host -from openpype.hosts.resolve import TestGUI -import openpype.hosts.resolve as bmdvr import clique +from openpype.pipeline import install_host +from openpype.hosts.resolve.api.testing_utils import TestGUI +import openpype.hosts.resolve.api as bmdvr +from openpype.hosts.resolve.api.lib import ( + create_media_pool_item, + create_timeline_item, +) + class ThisTestGUI(TestGUI): extensions = [".exr", ".jpg", ".mov", ".png", ".mp4", ".ari", ".arx"] @@ -55,10 +60,10 @@ class ThisTestGUI(TestGUI): # skip if unwanted extension if ext not in self.extensions: return - media_pool_item = bmdvr.create_media_pool_item(fpath) + media_pool_item = create_media_pool_item(fpath) print(media_pool_item) - track_item = bmdvr.create_timeline_item(media_pool_item) + track_item = create_timeline_item(media_pool_item) print(track_item) diff --git a/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py b/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py index 872d620162..2e83188bde 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py +++ b/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py @@ -1,13 +1,17 @@ #! python3 from openpype.pipeline import install_host -import openpype.hosts.resolve as bmdvr +from openpype.hosts.resolve import api as bmdvr +from openpype.hosts.resolve.api.lib import ( + create_media_pool_item, + create_timeline_item, +) def file_processing(fpath): - media_pool_item = bmdvr.create_media_pool_item(fpath) + media_pool_item = create_media_pool_item(fpath) print(media_pool_item) - track_item = bmdvr.create_timeline_item(media_pool_item) + track_item = create_timeline_item(media_pool_item) print(track_item) diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py new file mode 100644 index 0000000000..382a7cf344 --- /dev/null +++ b/openpype/hosts/resolve/utils.py @@ -0,0 +1,54 @@ +import os +import shutil +from openpype.lib import Logger + +RESOLVE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def setup(env): + log = Logger.get_logger("ResolveSetup") + scripts = {} + us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") + us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") + us_paths = [os.path.join( + RESOLVE_ROOT_DIR, + "utility_scripts" + )] + + # collect script dirs + if us_env: + log.info(f"Utility Scripts Env: `{us_env}`") + us_paths = us_env.split( + os.pathsep) + us_paths + + # collect scripts from dirs + for path in us_paths: + scripts.update({path: os.listdir(path)}) + + log.info(f"Utility Scripts Dir: `{us_paths}`") + log.info(f"Utility Scripts: `{scripts}`") + + # make sure no script file is in folder + for s in os.listdir(us_dir): + path = os.path.join(us_dir, s) + log.info(f"Removing `{path}`...") + if os.path.isdir(path): + shutil.rmtree(path, onerror=None) + else: + os.remove(path) + + # copy scripts into Resolve's utility scripts dir + for d, sl in scripts.items(): + # directory and scripts list + for s in sl: + # script in script list + src = os.path.join(d, s) + dst = os.path.join(us_dir, s) + log.info(f"Copying `{src}` to `{dst}`...") + if os.path.isdir(src): + shutil.copytree( + src, dst, symlinks=False, + ignore=None, ignore_dangling_symlinks=False + ) + else: + shutil.copy2(src, dst) diff --git a/openpype/hosts/standalonepublisher/__init__.py b/openpype/hosts/standalonepublisher/__init__.py index e69de29bb2..f47fa6b573 100644 --- a/openpype/hosts/standalonepublisher/__init__.py +++ b/openpype/hosts/standalonepublisher/__init__.py @@ -0,0 +1,6 @@ +from .addon import StandAlonePublishAddon + + +__all__ = ( + "StandAlonePublishAddon", +) diff --git a/openpype/hosts/standalonepublisher/addon.py b/openpype/hosts/standalonepublisher/addon.py new file mode 100644 index 0000000000..98ec44d4e2 --- /dev/null +++ b/openpype/hosts/standalonepublisher/addon.py @@ -0,0 +1,57 @@ +import os + +import click + +from openpype.lib import get_openpype_execute_args +from openpype.lib.execute import run_detached_process +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import ITrayAction, IHostAddon + +STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class StandAlonePublishAddon(OpenPypeModule, ITrayAction, IHostAddon): + label = "Publish" + name = "standalonepublisher" + host_name = "standalonepublisher" + + def initialize(self, modules_settings): + self.enabled = modules_settings["standalonepublish_tool"]["enabled"] + self.publish_paths = [ + os.path.join(STANDALONEPUBLISH_ROOT_DIR, "plugins", "publish") + ] + + def tray_init(self): + return + + def on_action_trigger(self): + self.run_standalone_publisher() + + def connect_with_modules(self, enabled_modules): + """Collect publish paths from other modules.""" + + publish_paths = self.manager.collect_plugin_paths()["publish"] + self.publish_paths.extend(publish_paths) + + def run_standalone_publisher(self): + args = get_openpype_execute_args("module", self.name, "launch") + run_detached_process(args) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group( + StandAlonePublishAddon.name, + help="StandalonePublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +def launch(): + """Launch StandalonePublisher tool UI.""" + + from openpype.tools import standalonepublish + + standalonepublish.main() diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 052a97af7d..7925b0ecf3 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -2,8 +2,8 @@ import copy import json import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.client import get_asset_by_name +from openpype.pipeline.create import get_subset_name class CollectBulkMovInstances(pyblish.api.InstancePlugin): @@ -44,12 +44,14 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): task_name = available_task_names[_task_name_low] break - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.new_instance_family, self.subset_name_variant, task_name, asset_doc, - project_name + project_name, + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance_name = f"{asset_name}_{subset_name}" diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py index 0a1d29ccdc..8633d4bf9d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py @@ -19,6 +19,7 @@ import os import opentimelineio as otio import pyblish.api from openpype import lib as plib +from openpype.pipeline.context_tools import get_current_project_asset class OTIO_View(pyblish.api.Action): @@ -116,7 +117,7 @@ class CollectEditorial(pyblish.api.InstancePlugin): if extension == ".edl": # EDL has no frame rate embedded so needs explicit # frame rate else 24 is asssumed. - kwargs["rate"] = plib.get_asset()["data"]["fps"] + kwargs["rate"] = get_current_project_asset()["data"]["fps"] instance.data["otio_timeline"] = otio.adapters.read_from_file( file_path, **kwargs) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py index d0d36bb717..75c260bad7 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py @@ -1,8 +1,12 @@ import os +from copy import deepcopy + import opentimelineio as otio import pyblish.api + from openpype import lib as plib -from copy import deepcopy +from openpype.pipeline.context_tools import get_current_project_asset + class CollectInstances(pyblish.api.InstancePlugin): """Collect instances from editorial's OTIO sequence""" @@ -48,7 +52,7 @@ class CollectInstances(pyblish.api.InstancePlugin): # get timeline otio data timeline = instance.data["otio_timeline"] - fps = plib.get_asset()["data"]["fps"] + fps = get_current_project_asset()["data"]["fps"] tracks = timeline.each_child( descended_from_type=otio.schema.Track @@ -166,7 +170,8 @@ class CollectInstances(pyblish.api.InstancePlugin): "frameStart": frame_start, "frameEnd": frame_end, "frameStartH": frame_start - handle_start, - "frameEndH": frame_end + handle_end + "frameEndH": frame_end + handle_end, + "newAssetPublishing": True } for data_key in instance_data_filter: diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py index 005157af62..ff7f60354e 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py @@ -3,8 +3,8 @@ import re import pyblish.api import openpype.api -from openpype import lib from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.context_tools import get_current_project_asset class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -27,7 +27,8 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): for pattern in self.skip_timelines_check): self.log.info("Skipping for {} task".format(instance.data["task"])) - asset_data = lib.get_asset(instance.data["asset"])["data"] + # TODO repace query with using 'instance.data["assetEntity"]' + asset_data = get_current_project_asset(instance.data["asset"])["data"] frame_start = asset_data["frameStart"] frame_end = asset_data["frameEnd"] handle_start = asset_data["handleStart"] diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py index 285fe8f8d6..1e05f336fb 100644 --- a/openpype/hosts/testhost/api/pipeline.py +++ b/openpype/hosts/testhost/api/pipeline.py @@ -1,6 +1,6 @@ import os import json -from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name class HostContext: @@ -17,10 +17,10 @@ class HostContext: if not asset_name: return project_name - asset_doc = legacy_io.find_one( - {"type": "asset", "name": asset_name}, - {"data.parents": 1} + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.parents"] ) + parents = asset_doc.get("data", {}).get("parents") or [] hierarchy = [project_name] diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py index 06b95375b1..8d59fc3242 100644 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ b/openpype/hosts/testhost/plugins/create/auto_creator.py @@ -1,10 +1,11 @@ from openpype.lib import NumberDef -from openpype.hosts.testhost.api import pipeline +from openpype.client import get_asset_by_name from openpype.pipeline import ( legacy_io, AutoCreator, CreatedInstance, ) +from openpype.hosts.testhost.api import pipeline class MyAutoCreator(AutoCreator): @@ -44,10 +45,7 @@ class MyAutoCreator(AutoCreator): host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -69,10 +67,7 @@ class MyAutoCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": asset_name - }) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/traypublisher/__init__.py b/openpype/hosts/traypublisher/__init__.py new file mode 100644 index 0000000000..77ba908ddd --- /dev/null +++ b/openpype/hosts/traypublisher/__init__.py @@ -0,0 +1,6 @@ +from .addon import TrayPublishAddon + + +__all__ = ( + "TrayPublishAddon", +) diff --git a/openpype/modules/traypublish_action.py b/openpype/hosts/traypublisher/addon.py similarity index 57% rename from openpype/modules/traypublish_action.py rename to openpype/hosts/traypublisher/addon.py index 39163b8eb8..c86c835ed9 100644 --- a/openpype/modules/traypublish_action.py +++ b/openpype/hosts/traypublisher/addon.py @@ -1,25 +1,24 @@ import os + +import click + from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules.interfaces import ITrayAction, IHostAddon + +TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishAction(OpenPypeModule, ITrayAction): +class TrayPublishAddon(OpenPypeModule, IHostAddon, ITrayAction): label = "New Publish (beta)" - name = "traypublish_tool" + name = "traypublisher" + host_name = "traypublisher" def initialize(self, modules_settings): - import openpype self.enabled = True self.publish_paths = [ - os.path.join( - openpype.PACKAGE_DIR, - "hosts", - "traypublisher", - "plugins", - "publish" - ) + os.path.join(TRAYPUBLISH_ROOT_DIR, "plugins", "publish") ] self._experimental_tools = None @@ -29,7 +28,7 @@ class TrayPublishAction(OpenPypeModule, ITrayAction): self._experimental_tools = ExperimentalTools() def tray_menu(self, *args, **kwargs): - super(TrayPublishAction, self).tray_menu(*args, **kwargs) + super(TrayPublishAddon, self).tray_menu(*args, **kwargs) traypublisher = self._experimental_tools.get("traypublisher") visible = False if traypublisher and traypublisher.enabled: @@ -45,5 +44,24 @@ class TrayPublishAction(OpenPypeModule, ITrayAction): self.publish_paths.extend(publish_paths) def run_traypublisher(self): - args = get_openpype_execute_args("traypublisher") + args = get_openpype_execute_args( + "module", self.name, "launch" + ) run_detached_process(args) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group(TrayPublishAddon.name, help="TrayPublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +def launch(): + """Launch TrayPublish tool UI.""" + + from openpype.tools import traypublisher + + traypublisher.main() diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py new file mode 100644 index 0000000000..7c392ef508 --- /dev/null +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -0,0 +1,331 @@ +import re +from copy import deepcopy + +from openpype.client import get_asset_by_id +from openpype.pipeline.create import CreatorError + + +class ShotMetadataSolver: + """ Solving hierarchical metadata + + Used during editorial publishing. Works with imput + clip name and settings defining python formatable + template. Settings also define searching patterns + and its token keys used for formating in templates. + """ + + NO_DECOR_PATERN = re.compile(r"\{([a-z]*?)\}") + + # presets + clip_name_tokenizer = None + shot_rename = True + shot_hierarchy = None + shot_add_tasks = None + + def __init__( + self, + clip_name_tokenizer, + shot_rename, + shot_hierarchy, + shot_add_tasks, + logger + ): + self.clip_name_tokenizer = clip_name_tokenizer + self.shot_rename = shot_rename + self.shot_hierarchy = shot_hierarchy + self.shot_add_tasks = shot_add_tasks + self.log = logger + + def _rename_template(self, data): + """Shot renaming function + + Args: + data (dict): formating data + + Raises: + CreatorError: If missing keys + + Returns: + str: formated new name + """ + shot_rename_template = self.shot_rename[ + "shot_rename_template"] + try: + # format to new shot name + return shot_rename_template.format(**data) + except KeyError as _E: + raise CreatorError(( + "Make sure all keys in settings are correct:: \n\n" + f"From template string {shot_rename_template} > " + f"`{_E}` has no equivalent in \n" + f"{list(data.keys())} input formating keys!" + )) + + def _generate_tokens(self, clip_name, source_data): + """Token generator + + Settings defines token pairs key and regex expression. + + Args: + clip_name (str): name of clip in editorial + source_data (dict): data for formating + + Raises: + CreatorError: if missing key + + Returns: + dict: updated source_data + """ + output_data = deepcopy(source_data["anatomy_data"]) + output_data["clip_name"] = clip_name + + if not self.clip_name_tokenizer: + return output_data + + parent_name = source_data["selected_asset_doc"]["name"] + + search_text = parent_name + clip_name + + for token_key, pattern in self.clip_name_tokenizer.items(): + p = re.compile(pattern) + match = p.findall(search_text) + if not match: + raise CreatorError(( + "Make sure regex expression works with your data: \n\n" + f"'{token_key}' with regex '{pattern}' in your settings\n" + "can't find any match in your clip name " + f"'{search_text}'!\n\nLook to: " + "'project_settings/traypublisher/editorial_creators" + "/editorial_simple/clip_name_tokenizer'\n" + "at your project settings..." + )) + + # QUESTION:how to refactory `match[-1]` to some better way? + output_data[token_key] = match[-1] + + return output_data + + def _create_parents_from_settings(self, parents, data): + """Formating parent components. + + Args: + parents (list): list of dict parent components + data (dict): formating data + + Raises: + CreatorError: missing formating key + CreatorError: missing token key + KeyError: missing parent token + + Returns: + list: list of dict of parent components + """ + # fill the parents parts from presets + shot_hierarchy = deepcopy(self.shot_hierarchy) + hierarchy_parents = shot_hierarchy["parents"] + + # fill parent keys data template from anatomy data + try: + _parent_tokens_formating_data = { + parent_token["name"]: parent_token["value"].format(**data) + for parent_token in hierarchy_parents + } + except KeyError as _E: + raise CreatorError(( + "Make sure all keys in settings are correct : \n" + f"`{_E}` has no equivalent in \n{list(data.keys())}" + )) + + _parent_tokens_type = { + parent_token["name"]: parent_token["type"] + for parent_token in hierarchy_parents + } + for _index, _parent in enumerate( + shot_hierarchy["parents_path"].split("/") + ): + # format parent token with value which is formated + try: + parent_name = _parent.format( + **_parent_tokens_formating_data) + except KeyError as _E: + raise CreatorError(( + "Make sure all keys in settings are correct : \n\n" + f"`{_E}` from template string " + f"{shot_hierarchy['parents_path']}, " + f" has no equivalent in \n" + f"{list(_parent_tokens_formating_data.keys())} parents" + )) + + parent_token_name = ( + self.NO_DECOR_PATERN.findall(_parent).pop()) + + if not parent_token_name: + raise KeyError( + f"Parent token is not found in: `{_parent}`") + + # find parent type + parent_token_type = _parent_tokens_type[parent_token_name] + + # in case selected context is set to the same asset + if ( + _index == 0 + and parents[-1]["entity_name"] == parent_name + ): + self.log.debug(f" skipping : {parent_name}") + continue + + # in case first parent is project then start parents from start + if ( + _index == 0 + and parent_token_type == "Project" + ): + self.log.debug("rebuilding parents from scratch") + project_parent = parents[0] + parents = [project_parent] + continue + + parents.append({ + "entity_type": parent_token_type, + "entity_name": parent_name + }) + + self.log.debug(f"__ parents: {parents}") + + return parents + + def _create_hierarchy_path(self, parents): + """Converting hierarchy path from parents + + Args: + parents (list): list of dict parent components + + Returns: + str: hierarchy path + """ + return "/".join( + [ + p["entity_name"] for p in parents + if p["entity_type"] != "Project" + ] + ) if parents else "" + + def _get_parents_from_selected_asset( + self, + asset_doc, + project_doc + ): + """Returning parents from context on selected asset. + + Context defined in Traypublisher project tree. + + Args: + asset_doc (db obj): selected asset doc + project_doc (db obj): actual project doc + + Returns: + list: list of dict parent components + """ + project_name = project_doc["name"] + visual_hierarchy = [asset_doc] + current_doc = asset_doc + + # looping trought all available visual parents + # if they are not available anymore than it breaks + while True: + visual_parent_id = current_doc["data"]["visualParent"] + visual_parent = None + if visual_parent_id: + visual_parent = get_asset_by_id(project_name, visual_parent_id) + + if not visual_parent: + visual_hierarchy.append(project_doc) + break + visual_hierarchy.append(visual_parent) + current_doc = visual_parent + + # add current selection context hierarchy + return [ + { + "entity_type": entity["data"]["entityType"], + "entity_name": entity["name"] + } + for entity in reversed(visual_hierarchy) + ] + + def _generate_tasks_from_settings(self, project_doc): + """Convert settings inputs to task data. + + Args: + project_doc (db obj): actual project doc + + Raises: + KeyError: Missing task type in project doc + + Returns: + dict: tasks data + """ + tasks_to_add = {} + + project_tasks = project_doc["config"]["tasks"] + for task_name, task_data in self.shot_add_tasks.items(): + _task_data = deepcopy(task_data) + + # check if task type in project task types + if _task_data["type"] in project_tasks.keys(): + tasks_to_add[task_name] = _task_data + else: + raise KeyError( + "Missing task type `{}` for `{}` is not" + " existing in `{}``".format( + _task_data["type"], + task_name, + list(project_tasks.keys()) + ) + ) + + return tasks_to_add + + def generate_data(self, clip_name, source_data): + """Metadata generator. + + Converts input data to hierarchy mentadata. + + Args: + clip_name (str): clip name + source_data (dict): formating data + + Returns: + (str, dict): shot name and hierarchy data + """ + self.log.info(f"_ source_data: {source_data}") + + tasks = {} + asset_doc = source_data["selected_asset_doc"] + project_doc = source_data["project_doc"] + + # match clip to shot name at start + shot_name = clip_name + + # parse all tokens and generate formating data + formating_data = self._generate_tokens(shot_name, source_data) + + # generate parents from selected asset + parents = self._get_parents_from_selected_asset(asset_doc, project_doc) + + if self.shot_rename["enabled"]: + shot_name = self._rename_template(formating_data) + self.log.info(f"Renamed shot name: {shot_name}") + + if self.shot_hierarchy["enabled"]: + parents = self._create_parents_from_settings( + parents, formating_data) + + if self.shot_add_tasks: + tasks = self._generate_tasks_from_settings( + project_doc) + + return shot_name, { + "hierarchy": self._create_hierarchy_path(parents), + "parents": parents, + "tasks": tasks + } diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 9b9425855e..a3eead51c8 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,6 +1,7 @@ from openpype.lib.attribute_definitions import FileDef -from openpype.pipeline import ( +from openpype.pipeline.create import ( Creator, + HiddenCreator, CreatedInstance ) @@ -11,7 +12,6 @@ from .pipeline import ( HostContext, ) - IMAGE_EXTENSIONS = [ ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", @@ -35,6 +35,42 @@ VIDEO_EXTENSIONS = [ REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS +class HiddenTrayPublishCreator(HiddenCreator): + host_name = "traypublisher" + + def collect_instances(self): + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + update_instances(update_list) + + def remove_instances(self, instances): + remove_instances(instances) + for instance in instances: + self._remove_instance_from_context(instance) + + def _store_new_instance(self, new_instance): + """Tray publisher specific method to store instance. + + Instance is stored into "workfile" of traypublisher and also add it + to CreateContext. + + Args: + new_instance (CreatedInstance): Instance that should be stored. + """ + + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + class TrayPublishCreator(Creator): create_allow_context_change = True host_name = "traypublisher" @@ -56,10 +92,6 @@ class TrayPublishCreator(Creator): for instance in instances: self._remove_instance_from_context(instance) - def get_pre_create_attr_defs(self): - # Use same attributes as for instance attrobites - return self.get_instance_attr_defs() - def _store_new_instance(self, new_instance): """Tray publisher specific method to store instance. @@ -81,15 +113,6 @@ class SettingsCreator(TrayPublishCreator): extensions = [] - def collect_instances(self): - for instance_data in list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, data, pre_create_data): # Pass precreate data to creator attributes data["creator_attributes"] = pre_create_data @@ -120,6 +143,10 @@ class SettingsCreator(TrayPublishCreator): ) ] + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attrobites + return self.get_instance_attr_defs() + @classmethod def from_settings(cls, item_data): identifier = item_data["identifier"] diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py new file mode 100644 index 0000000000..28a115629e --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -0,0 +1,869 @@ +import os +from copy import deepcopy +from pprint import pformat +import opentimelineio as otio +from openpype.client import ( + get_asset_by_name, + get_project +) +from openpype.hosts.traypublisher.api.plugin import ( + TrayPublishCreator, + HiddenTrayPublishCreator +) +from openpype.hosts.traypublisher.api.editorial import ( + ShotMetadataSolver +) + +from openpype.pipeline import CreatedInstance + +from openpype.lib import ( + get_ffprobe_data, + convert_ffprobe_fps_value, + + FileDef, + TextDef, + NumberDef, + EnumDef, + BoolDef, + UISeparatorDef, + UILabelDef +) + + +CLIP_ATTR_DEFS = [ + EnumDef( + "fps", + items={ + "from_selection": "From selection", + 23.997: "23.976", + 24: "24", + 25: "25", + 29.97: "29.97", + 30: "30" + }, + label="FPS" + ), + NumberDef( + "workfile_start_frame", + default=1001, + label="Workfile start frame" + ), + NumberDef( + "handle_start", + default=0, + label="Handle start" + ), + NumberDef( + "handle_end", + default=0, + label="Handle end" + ) +] + + +class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): + """ Wrapper class for clip family creators + + Args: + HiddenTrayPublishCreator (BaseCreator): hidden supporting class + """ + host_name = "traypublisher" + + def create(self, instance_data, source_data=None): + self.log.info(f"instance_data: {instance_data}") + subset_name = instance_data["subset"] + + # Create new instance + new_instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self.log.info(f"instance_data: {pformat(new_instance.data)}") + + self._store_new_instance(new_instance) + + return new_instance + + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + + +class EditorialShotInstanceCreator(EditorialClipInstanceCreatorBase): + """ Shot family class + + The shot metadata instance carrier. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ + identifier = "editorial_shot" + family = "shot" + label = "Editorial Shot" + + def get_instance_attr_defs(self): + attr_defs = [ + TextDef( + "asset_name", + label="Asset name", + ) + ] + attr_defs.extend(CLIP_ATTR_DEFS) + return attr_defs + + +class EditorialPlateInstanceCreator(EditorialClipInstanceCreatorBase): + """ Plate family class + + Plate representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ + identifier = "editorial_plate" + family = "plate" + label = "Editorial Plate" + + +class EditorialAudioInstanceCreator(EditorialClipInstanceCreatorBase): + """ Audio family class + + Audio representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ + identifier = "editorial_audio" + family = "audio" + label = "Editorial Audio" + + +class EditorialReviewInstanceCreator(EditorialClipInstanceCreatorBase): + """ Review family class + + Review representation instance. + + Args: + EditorialClipInstanceCreatorBase (BaseCreator): hidden supporting class + """ + identifier = "editorial_review" + family = "review" + label = "Editorial Review" + + +class EditorialSimpleCreator(TrayPublishCreator): + """ Editorial creator class + + Simple workflow creator. This creator only disecting input + video file into clip chunks and then converts each to + defined format defined Settings for each subset preset. + + Args: + TrayPublishCreator (Creator): Tray publisher plugin class + """ + + label = "Editorial Simple" + family = "editorial" + identifier = "editorial_simple" + default_variants = [ + "main" + ] + description = "Editorial files to generate shots." + detailed_description = """ +Supporting publishing new shots to project +or updating already created. Publishing will create OTIO file. +""" + icon = "fa.file" + + def __init__( + self, project_settings, *args, **kwargs + ): + super(EditorialSimpleCreator, self).__init__( + project_settings, *args, **kwargs + ) + editorial_creators = deepcopy( + project_settings["traypublisher"]["editorial_creators"] + ) + # get this creator settings by identifier + self._creator_settings = editorial_creators.get(self.identifier) + + clip_name_tokenizer = self._creator_settings["clip_name_tokenizer"] + shot_rename = self._creator_settings["shot_rename"] + shot_hierarchy = self._creator_settings["shot_hierarchy"] + shot_add_tasks = self._creator_settings["shot_add_tasks"] + + self._shot_metadata_solver = ShotMetadataSolver( + clip_name_tokenizer, + shot_rename, + shot_hierarchy, + shot_add_tasks, + self.log + ) + + # try to set main attributes from settings + if self._creator_settings.get("default_variants"): + self.default_variants = self._creator_settings["default_variants"] + + def create(self, subset_name, instance_data, pre_create_data): + allowed_family_presets = self._get_allowed_family_presets( + pre_create_data) + + clip_instance_properties = { + k: v for k, v in pre_create_data.items() + if k != "sequence_filepath_data" + if k not in [ + i["family"] for i in self._creator_settings["family_presets"] + ] + } + # Create otio editorial instance + asset_name = instance_data["asset"] + asset_doc = get_asset_by_name(self.project_name, asset_name) + + self.log.info(pre_create_data["fps"]) + + if pre_create_data["fps"] == "from_selection": + # get asset doc data attributes + fps = asset_doc["data"]["fps"] + else: + fps = float(pre_create_data["fps"]) + + instance_data.update({ + "fps": fps + }) + + # get path of sequence + sequence_path_data = pre_create_data["sequence_filepath_data"] + media_path_data = pre_create_data["media_filepaths_data"] + + sequence_path = self._get_path_from_file_data(sequence_path_data) + media_path = self._get_path_from_file_data(media_path_data) + + # get otio timeline + otio_timeline = self._create_otio_timeline( + sequence_path, fps) + + # Create all clip instances + clip_instance_properties.update({ + "fps": fps, + "parent_asset_name": asset_name, + "variant": instance_data["variant"] + }) + + # create clip instances + self._get_clip_instances( + otio_timeline, + media_path, + clip_instance_properties, + family_presets=allowed_family_presets + + ) + + # create otio editorial instance + self._create_otio_instance( + subset_name, instance_data, + sequence_path, media_path, + otio_timeline + ) + + def _create_otio_instance( + self, + subset_name, + data, + sequence_path, + media_path, + otio_timeline + ): + """Otio instance creating function + + Args: + subset_name (str): name of subset + data (dict): instnance data + sequence_path (str): path to sequence file + media_path (str): path to media file + otio_timeline (otio.Timeline): otio timeline object + """ + # Pass precreate data to creator attributes + data.update({ + "sequenceFilePath": sequence_path, + "editorialSourcePath": media_path, + "otioTimeline": otio.adapters.write_to_string(otio_timeline) + }) + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._store_new_instance(new_instance) + + def _create_otio_timeline(self, sequence_path, fps): + """Creating otio timeline from sequence path + + Args: + sequence_path (str): path to sequence file + fps (float): frame per second + + Returns: + otio.Timeline: otio timeline object + """ + # get editorial sequence file into otio timeline object + extension = os.path.splitext(sequence_path)[1] + + kwargs = {} + if extension == ".edl": + # EDL has no frame rate embedded so needs explicit + # frame rate else 24 is asssumed. + kwargs["rate"] = fps + kwargs["ignore_timecode_mismatch"] = True + + self.log.info(f"kwargs: {kwargs}") + return otio.adapters.read_from_file(sequence_path, **kwargs) + + def _get_path_from_file_data(self, file_path_data): + """Converting creator path data to single path string + + Args: + file_path_data (FileDefItem): creator path data inputs + + Raises: + FileExistsError: in case nothing had been set + + Returns: + str: path string + """ + # TODO: just temporarly solving only one media file + if isinstance(file_path_data, list): + file_path_data = file_path_data.pop() + + if len(file_path_data["filenames"]) == 0: + raise FileExistsError( + f"File path was not added: {file_path_data}") + + return os.path.join( + file_path_data["directory"], file_path_data["filenames"][0]) + + def _get_clip_instances( + self, + otio_timeline, + media_path, + instance_data, + family_presets + ): + """Helping function fro creating clip instance + + Args: + otio_timeline (otio.Timeline): otio timeline object + media_path (str): media file path string + instance_data (dict): clip instance data + family_presets (list): list of dict settings subset presets + """ + self.asset_name_check = [] + + tracks = otio_timeline.each_child( + descended_from_type=otio.schema.Track + ) + + # media data for audio sream and reference solving + media_data = self._get_media_source_metadata(media_path) + + for track in tracks: + self.log.debug(f"track.name: {track.name}") + try: + track_start_frame = ( + abs(track.source_range.start_time.value) + ) + self.log.debug(f"track_start_frame: {track_start_frame}") + track_start_frame -= self.timeline_frame_start + except AttributeError: + track_start_frame = 0 + + self.log.debug(f"track_start_frame: {track_start_frame}") + + for clip in track.each_child(): + if not self._validate_clip_for_processing(clip): + continue + + # get available frames info to clip data + self._create_otio_reference(clip, media_path, media_data) + + # convert timeline range to source range + self._restore_otio_source_range(clip) + + base_instance_data = self._get_base_instance_data( + clip, + instance_data, + track_start_frame + ) + + parenting_data = { + "instance_label": None, + "instance_id": None + } + self.log.info(( + "Creating subsets from presets: \n" + f"{pformat(family_presets)}" + )) + + for _fpreset in family_presets: + # exclude audio family if no audio stream + if ( + _fpreset["family"] == "audio" + and not media_data.get("audio") + ): + continue + + instance = self._make_subset_instance( + clip, + _fpreset, + deepcopy(base_instance_data), + parenting_data + ) + self.log.debug(f"{pformat(dict(instance.data))}") + + def _restore_otio_source_range(self, otio_clip): + """Infusing source range. + + Otio clip is missing proper source clip range so + here we add them from from parent timeline frame range. + + Args: + otio_clip (otio.Clip): otio clip object + """ + otio_clip.source_range = otio_clip.range_in_parent() + + def _create_otio_reference( + self, + otio_clip, + media_path, + media_data + ): + """Creating otio reference at otio clip. + + Args: + otio_clip (otio.Clip): otio clip object + media_path (str): media file path string + media_data (dict): media metadata + """ + start_frame = media_data["start_frame"] + frame_duration = media_data["duration"] + fps = media_data["fps"] + + available_range = otio.opentime.TimeRange( + start_time=otio.opentime.RationalTime( + start_frame, fps), + duration=otio.opentime.RationalTime( + frame_duration, fps) + ) + # in case old OTIO or video file create `ExternalReference` + media_reference = otio.schema.ExternalReference( + target_url=media_path, + available_range=available_range + ) + + otio_clip.media_reference = media_reference + + def _get_media_source_metadata(self, path): + """Get all available metadata from file + + Args: + path (str): media file path string + + Raises: + AssertionError: ffprobe couldn't read metadata + + Returns: + dict: media file metadata + """ + return_data = {} + + try: + media_data = get_ffprobe_data( + path, self.log + ) + self.log.debug(f"__ media_data: {pformat(media_data)}") + + # get video stream data + video_stream = media_data["streams"][0] + return_data = { + "video": True, + "start_frame": 0, + "duration": int(video_stream["nb_frames"]), + "fps": float( + convert_ffprobe_fps_value( + video_stream["r_frame_rate"] + ) + ) + } + + # get audio streams data + audio_stream = [ + stream for stream in media_data["streams"] + if stream["codec_type"] == "audio" + ] + + if audio_stream: + return_data["audio"] = True + + except Exception as exc: + raise AssertionError(( + "FFprobe couldn't read information about input file: " + f"\"{path}\". Error message: {exc}" + )) + + return return_data + + def _make_subset_instance( + self, + otio_clip, + preset, + instance_data, + parenting_data + ): + """Making subset instance from input preset + + Args: + otio_clip (otio.Clip): otio clip object + preset (dict): sigle family preset + instance_data (dict): instance data + parenting_data (dict): shot instance parent data + + Returns: + CreatedInstance: creator instance object + """ + family = preset["family"] + label = self._make_subset_naming( + preset, + instance_data + ) + instance_data["label"] = label + + # add file extension filter only if it is not shot family + if family == "shot": + instance_data["otioClip"] = ( + otio.adapters.write_to_string(otio_clip)) + c_instance = self.create_context.creators[ + "editorial_shot"].create( + instance_data) + parenting_data.update({ + "instance_label": label, + "instance_id": c_instance.data["instance_id"] + }) + else: + # add review family if defined + instance_data.update({ + "outputFileType": preset["output_file_type"], + "parent_instance_id": parenting_data["instance_id"], + "creator_attributes": { + "parent_instance": parenting_data["instance_label"], + "add_review_family": preset.get("review") + } + }) + + creator_identifier = f"editorial_{family}" + editorial_clip_creator = self.create_context.creators[ + creator_identifier] + c_instance = editorial_clip_creator.create( + instance_data) + + return c_instance + + def _make_subset_naming( + self, + preset, + instance_data + ): + """ Subset name maker + + Args: + preset (dict): single preset item + instance_data (dict): instance data + + Returns: + str: label string + """ + shot_name = instance_data["shotName"] + variant_name = instance_data["variant"] + family = preset["family"] + + # get variant name from preset or from inharitance + _variant_name = preset.get("variant") or variant_name + + self.log.debug(f"__ family: {family}") + self.log.debug(f"__ preset: {preset}") + + # subset name + subset_name = "{}{}".format( + family, _variant_name.capitalize() + ) + label = "{}_{}".format( + shot_name, + subset_name + ) + + instance_data.update({ + "family": family, + "label": label, + "variant": _variant_name, + "subset": subset_name, + }) + + return label + + def _get_base_instance_data( + self, + otio_clip, + instance_data, + track_start_frame, + ): + """ Factoring basic set of instance data. + + Args: + otio_clip (otio.Clip): otio clip object + instance_data (dict): precreate instance data + track_start_frame (int): track start frame + + Returns: + dict: instance data + """ + # get clip instance properties + parent_asset_name = instance_data["parent_asset_name"] + handle_start = instance_data["handle_start"] + handle_end = instance_data["handle_end"] + timeline_offset = instance_data["timeline_offset"] + workfile_start_frame = instance_data["workfile_start_frame"] + fps = instance_data["fps"] + variant_name = instance_data["variant"] + + # basic unique asset name + clip_name = os.path.splitext(otio_clip.name)[0].lower() + project_doc = get_project(self.project_name) + + shot_name, shot_metadata = self._shot_metadata_solver.generate_data( + clip_name, + { + "anatomy_data": { + "project": { + "name": self.project_name, + "code": project_doc["data"]["code"] + }, + "parent": parent_asset_name, + "app": self.host_name + }, + "selected_asset_doc": get_asset_by_name( + self.project_name, parent_asset_name), + "project_doc": project_doc + } + ) + + self._validate_name_uniqueness(shot_name) + + timing_data = self._get_timing_data( + otio_clip, + timeline_offset, + track_start_frame, + workfile_start_frame + ) + + # create creator attributes + creator_attributes = { + "asset_name": shot_name, + "Parent hierarchy path": shot_metadata["hierarchy"], + "workfile_start_frame": workfile_start_frame, + "fps": fps, + "handle_start": int(handle_start), + "handle_end": int(handle_end) + } + creator_attributes.update(timing_data) + + # create shared new instance data + base_instance_data = { + "shotName": shot_name, + "variant": variant_name, + + # HACK: just for temporal bug workaround + # TODO: should loockup shot name for update + "asset": parent_asset_name, + "task": "", + + "newAssetPublishing": True, + + # parent time properties + "trackStartFrame": track_start_frame, + "timelineOffset": timeline_offset, + # creator_attributes + "creator_attributes": creator_attributes + } + # add hierarchy shot metadata + base_instance_data.update(shot_metadata) + + return base_instance_data + + def _get_timing_data( + self, + otio_clip, + timeline_offset, + track_start_frame, + workfile_start_frame + ): + """Returning available timing data + + Args: + otio_clip (otio.Clip): otio clip object + timeline_offset (int): offset value + track_start_frame (int): starting frame input + workfile_start_frame (int): start frame for shot's workfiles + + Returns: + dict: timing metadata + """ + # frame ranges data + clip_in = otio_clip.range_in_parent().start_time.value + clip_in += track_start_frame + clip_out = otio_clip.range_in_parent().end_time_inclusive().value + clip_out += track_start_frame + self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") + + # add offset in case there is any + self.log.debug(f"__ timeline_offset: {timeline_offset}") + if timeline_offset: + clip_in += timeline_offset + clip_out += timeline_offset + + clip_duration = otio_clip.duration().value + self.log.info(f"clip duration: {clip_duration}") + + source_in = otio_clip.trimmed_range().start_time.value + source_out = source_in + clip_duration + + # define starting frame for future shot + frame_start = ( + clip_in if workfile_start_frame is None + else workfile_start_frame + ) + frame_end = frame_start + (clip_duration - 1) + + return { + "frameStart": int(frame_start), + "frameEnd": int(frame_end), + "clipIn": int(clip_in), + "clipOut": int(clip_out), + "clipDuration": int(otio_clip.duration().value), + "sourceIn": int(source_in), + "sourceOut": int(source_out) + } + + def _get_allowed_family_presets(self, pre_create_data): + """ Filter out allowed family presets. + + Args: + pre_create_data (dict): precreate attributes inputs + + Returns: + list: lit of dict with preset items + """ + self.log.debug(f"__ pre_create_data: {pre_create_data}") + return [ + {"family": "shot"}, + *[ + preset for preset in self._creator_settings["family_presets"] + if pre_create_data[preset["family"]] + ] + ] + + def _validate_clip_for_processing(self, otio_clip): + """Validate otio clip attribues + + Args: + otio_clip (otio.Clip): otio clip object + + Returns: + bool: True if all passing conditions + """ + if otio_clip.name is None: + return False + + if isinstance(otio_clip, otio.schema.Gap): + return False + + # skip all generators like black empty + if isinstance( + otio_clip.media_reference, + otio.schema.GeneratorReference): + return False + + # Transitions are ignored, because Clips have the full frame + # range. + if isinstance(otio_clip, otio.schema.Transition): + return False + + return True + + def _validate_name_uniqueness(self, name): + """ Validating name uniqueness. + + In context of other clip names in sequence file. + + Args: + name (str): shot name string + """ + if name not in self.asset_name_check: + self.asset_name_check.append(name) + else: + self.log.warning( + f"Duplicate shot name: {name}! " + "Please check names in the input sequence files." + ) + + def get_pre_create_attr_defs(self): + """ Creating pre-create attributes at creator plugin. + + Returns: + list: list of attribute object instances + """ + # Use same attributes as for instance attrobites + attr_defs = [ + FileDef( + "sequence_filepath_data", + folders=False, + extensions=[ + ".edl", + ".xml", + ".aaf", + ".fcpxml" + ], + allow_sequences=False, + single_item=True, + label="Sequence file", + ), + FileDef( + "media_filepaths_data", + folders=False, + extensions=[ + ".mov", + ".mp4", + ".wav" + ], + allow_sequences=False, + single_item=False, + label="Media files", + ), + # TODO: perhpas better would be timecode and fps input + NumberDef( + "timeline_offset", + default=0, + label="Timeline offset" + ), + UISeparatorDef(), + UILabelDef("Clip instance attributes"), + UISeparatorDef() + ] + # add variants swithers + attr_defs.extend( + BoolDef(_var["family"], label=_var["family"]) + for _var in self._creator_settings["family_presets"] + ) + attr_defs.append(UISeparatorDef()) + + attr_defs.extend(CLIP_ATTR_DEFS) + return attr_defs diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index baca274ea6..41c1c29bb0 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,6 +1,7 @@ import os +from openpype.api import get_project_settings, Logger -from openpype.api import get_project_settings +log = Logger.get_logger(__name__) def initialize(): @@ -13,6 +14,7 @@ def initialize(): global_variables = globals() for item in simple_creators: + dynamic_plugin = SettingsCreator.from_settings(item) global_variables[dynamic_plugin.__name__] = dynamic_plugin diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index c5f0d6b75e..abe29d7473 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -6,13 +6,15 @@ from openpype.client import get_assets, get_asset_by_name from openpype.lib import ( FileDef, BoolDef, - get_subset_name_with_asset_doc, - TaskNotSetError, ) from openpype.pipeline import ( CreatedInstance, CreatorError ) +from openpype.pipeline.create import ( + get_subset_name, + TaskNotSetError, +) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator @@ -130,7 +132,7 @@ class BatchMovieCreator(TrayPublishCreator): task_name = self._get_task_name(asset_doc) try: - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, @@ -143,7 +145,7 @@ class BatchMovieCreator(TrayPublishCreator): # but user have ability to change it # NOTE: This expect that there is not task 'Undefined' on asset task_name = "Undefined" - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py new file mode 100644 index 0000000000..bdf7c05f3d --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_clip_instances.py @@ -0,0 +1,36 @@ +from pprint import pformat +import pyblish.api + + +class CollectClipInstance(pyblish.api.InstancePlugin): + """Collect clip instances and resolve its parent""" + + label = "Collect Clip Instances" + order = pyblish.api.CollectorOrder - 0.081 + + hosts = ["traypublisher"] + families = ["plate", "review", "audio"] + + def process(self, instance): + creator_identifier = instance.data["creator_identifier"] + if creator_identifier not in [ + "editorial_plate", + "editorial_audio", + "editorial_review" + ]: + return + + instance.data["families"].append("clip") + + parent_instance_id = instance.data["parent_instance_id"] + edit_shared_data = instance.context.data["editorialSharedData"] + instance.data.update( + edit_shared_data[parent_instance_id] + ) + + if "editorialSourcePath" in instance.context.data.keys(): + instance.data["editorialSourcePath"] = ( + instance.context.data["editorialSourcePath"]) + instance.data["families"].append("trimming") + + self.log.debug(pformat(instance.data)) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py new file mode 100644 index 0000000000..e181d0abe5 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_instances.py @@ -0,0 +1,48 @@ +import os +from pprint import pformat +import pyblish.api +import opentimelineio as otio + + +class CollectEditorialInstance(pyblish.api.InstancePlugin): + """Collect data for instances created by settings creators.""" + + label = "Collect Editorial Instances" + order = pyblish.api.CollectorOrder - 0.1 + + hosts = ["traypublisher"] + families = ["editorial"] + + def process(self, instance): + + if "families" not in instance.data: + instance.data["families"] = [] + + if "representations" not in instance.data: + instance.data["representations"] = [] + + fpath = instance.data["sequenceFilePath"] + otio_timeline_string = instance.data.pop("otioTimeline") + otio_timeline = otio.adapters.read_from_string( + otio_timeline_string) + + instance.context.data["otioTimeline"] = otio_timeline + instance.context.data["editorialSourcePath"] = ( + instance.data["editorialSourcePath"]) + + self.log.info(fpath) + + instance.data["stagingDir"] = os.path.dirname(fpath) + + _, ext = os.path.splitext(fpath) + + instance.data["representations"].append({ + "ext": ext[1:], + "name": ext[1:], + "stagingDir": instance.data["stagingDir"], + "files": os.path.basename(fpath) + }) + + self.log.debug("Created Editorial Instance {}".format( + pformat(instance.data) + )) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py new file mode 100644 index 0000000000..4af4fb94e9 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_editorial_reviewable.py @@ -0,0 +1,30 @@ +import pyblish.api + + +class CollectEditorialReviewable(pyblish.api.InstancePlugin): + """ Collect review input from user. + + Adds the input to instance data. + """ + + label = "Collect Editorial Reviewable" + order = pyblish.api.CollectorOrder + + families = ["plate", "review", "audio"] + hosts = ["traypublisher"] + + def process(self, instance): + creator_identifier = instance.data["creator_identifier"] + if creator_identifier not in [ + "editorial_plate", + "editorial_audio", + "editorial_review" + ]: + return + + creator_attributes = instance.data["creator_attributes"] + + if creator_attributes["add_review_family"]: + instance.data["families"].append("review") + + self.log.debug("instance.data {}".format(instance.data)) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py new file mode 100644 index 0000000000..716f73022e --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -0,0 +1,213 @@ +from pprint import pformat +import pyblish.api +import opentimelineio as otio + + +class CollectShotInstance(pyblish.api.InstancePlugin): + """ Collect shot instances + + Resolving its user inputs from creator attributes + to instance data. + """ + + label = "Collect Shot Instances" + order = pyblish.api.CollectorOrder - 0.09 + + hosts = ["traypublisher"] + families = ["shot"] + + SHARED_KEYS = [ + "asset", + "fps", + "handleStart", + "handleEnd", + "frameStart", + "frameEnd", + "clipIn", + "clipOut", + "clipDuration", + "sourceIn", + "sourceOut", + "otioClip", + "workfileFrameStart" + ] + + def process(self, instance): + self.log.debug(pformat(instance.data)) + + creator_identifier = instance.data["creator_identifier"] + if "editorial" not in creator_identifier: + return + + # get otio clip object + otio_clip = self._get_otio_clip(instance) + instance.data["otioClip"] = otio_clip + + # first solve the inputs from creator attr + data = self._solve_inputs_to_data(instance) + instance.data.update(data) + + # distribute all shared keys to clips instances + self._distribute_shared_data(instance) + self._solve_hierarchy_context(instance) + + self.log.debug(pformat(instance.data)) + + def _get_otio_clip(self, instance): + """ Converts otio string data. + + Convert them to proper otio object + and finds its equivalent at otio timeline. + This process is a hack to support also + resolving parent range. + + Args: + instance (obj): publishing instance + + Returns: + otio.Clip: otio clip object + """ + context = instance.context + # convert otio clip from string to object + otio_clip_string = instance.data.pop("otioClip") + otio_clip = otio.adapters.read_from_string( + otio_clip_string) + + otio_timeline = context.data["otioTimeline"] + + clips = [ + clip for clip in otio_timeline.each_child( + descended_from_type=otio.schema.Clip) + if clip.name == otio_clip.name + ] + + otio_clip = clips.pop() + self.log.debug(f"__ otioclip.parent: {otio_clip.parent}") + + return otio_clip + + def _distribute_shared_data(self, instance): + """ Distribute all defined keys. + + All data are shared between all related + instances in context. + + Args: + instance (obj): publishing instance + """ + context = instance.context + + instance_id = instance.data["instance_id"] + + if not context.data.get("editorialSharedData"): + context.data["editorialSharedData"] = {} + + context.data["editorialSharedData"][instance_id] = { + _k: _v for _k, _v in instance.data.items() + if _k in self.SHARED_KEYS + } + + def _solve_inputs_to_data(self, instance): + """ Resolve all user inputs into instance data. + + Args: + instance (obj): publishing instance + + Returns: + dict: instance data updating data + """ + _cr_attrs = instance.data["creator_attributes"] + workfile_start_frame = _cr_attrs["workfile_start_frame"] + frame_start = _cr_attrs["frameStart"] + frame_end = _cr_attrs["frameEnd"] + frame_dur = frame_end - frame_start + + return { + "asset": _cr_attrs["asset_name"], + "fps": float(_cr_attrs["fps"]), + "handleStart": _cr_attrs["handle_start"], + "handleEnd": _cr_attrs["handle_end"], + "frameStart": workfile_start_frame, + "frameEnd": workfile_start_frame + frame_dur, + "clipIn": _cr_attrs["clipIn"], + "clipOut": _cr_attrs["clipOut"], + "clipDuration": _cr_attrs["clipDuration"], + "sourceIn": _cr_attrs["sourceIn"], + "sourceOut": _cr_attrs["sourceOut"], + "workfileFrameStart": workfile_start_frame + } + + def _solve_hierarchy_context(self, instance): + """ Adding hierarchy data to context shared data. + + Args: + instance (obj): publishing instance + """ + context = instance.context + + final_context = ( + context.data["hierarchyContext"] + if context.data.get("hierarchyContext") + else {} + ) + + name = instance.data["asset"] + + # get handles + handle_start = int(instance.data["handleStart"]) + handle_end = int(instance.data["handleEnd"]) + + in_info = { + "entity_type": "Shot", + "custom_attributes": { + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + "clipIn": instance.data["clipIn"], + "clipOut": instance.data["clipOut"], + "fps": instance.data["fps"] + }, + "tasks": instance.data["tasks"] + } + + parents = instance.data.get('parents', []) + self.log.debug(f"parents: {pformat(parents)}") + + actual = {name: in_info} + + for parent in reversed(parents): + parent_name = parent["entity_name"] + next_dict = { + parent_name: { + "entity_type": parent["entity_type"], + "childs": actual + } + } + actual = next_dict + + final_context = self._update_dict(final_context, actual) + + # adding hierarchy context to instance + context.data["hierarchyContext"] = final_context + self.log.debug(pformat(final_context)) + + def _update_dict(self, ex_dict, new_dict): + """ Recursion function + + Updating nested data with another nested data. + + Args: + ex_dict (dict): nested data + new_dict (dict): nested data + + Returns: + dict: updated nested data + """ + for key in ex_dict: + if key in new_dict and isinstance(ex_dict[key], dict): + new_dict[key] = self._update_dict(ex_dict[key], new_dict[key]) + elif not ex_dict.get(key) or not new_dict.get(key): + new_dict[key] = ex_dict[key] + + return new_dict diff --git a/openpype/hosts/tvpaint/__init__.py b/openpype/hosts/tvpaint/__init__.py index 09b7c52cd1..b98680f204 100644 --- a/openpype/hosts/tvpaint/__init__.py +++ b/openpype/hosts/tvpaint/__init__.py @@ -1,20 +1,12 @@ -import os +from .addon import ( + get_launch_script_path, + TVPaintAddon, + TVPAINT_ROOT_DIR, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - -def get_launch_script_path(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - return os.path.join( - current_dir, - "api", - "launch_script.py" - ) +__all__ = ( + "get_launch_script_path", + "TVPaintAddon", + "TVPAINT_ROOT_DIR", +) diff --git a/openpype/hosts/tvpaint/addon.py b/openpype/hosts/tvpaint/addon.py new file mode 100644 index 0000000000..d710e63f93 --- /dev/null +++ b/openpype/hosts/tvpaint/addon.py @@ -0,0 +1,41 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_launch_script_path(): + return os.path.join( + TVPAINT_ROOT_DIR, + "api", + "launch_script.py" + ) + + +class TVPaintAddon(OpenPypeModule, IHostAddon): + name = "tvpaint" + host_name = "tvpaint" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(TVPAINT_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".tvpp"] diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index c461b33f4b..43d411d8f9 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -6,7 +6,6 @@ from . import pipeline from . import plugin from .pipeline import ( install, - uninstall, maintained_selection, remove_instance, list_instances, @@ -33,7 +32,6 @@ __all__ = ( "plugin", "install", - "uninstall", "maintained_selection", "remove_instance", "list_instances", diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 0118c0104b..427c927264 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -16,8 +16,6 @@ from openpype.pipeline import ( legacy_io, register_loader_plugin_path, register_creator_plugin_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) @@ -91,19 +89,6 @@ def install(): register_event_callback("application.exit", application_exit) -def uninstall(): - """Uninstall TVPaint-specific functionality. - - This function is called automatically on calling `uninstall_host()`. - """ - - log.info("OpenPype - Uninstalling TVPaint integration") - pyblish.api.deregister_host("tvpaint") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) - - def containerise( name, namespace, members, context, loader, current_containers=None ): diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index c6dc765a27..a99b300730 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,17 +1,16 @@ import os -from openpype.client import get_project, get_asset_by_name -from openpype.lib import ( - StringTemplate, - get_workfile_template_key_from_context, - get_workdir_data, - get_last_workfile_with_version, -) +from openpype.lib import StringTemplate from openpype.pipeline import ( registered_host, legacy_io, Anatomy, ) +from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context, + get_last_workfile_with_version, +) +from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -54,19 +53,17 @@ class LoadWorkfile(plugin.Loader): asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) - template_key = get_workfile_template_key_from_context( asset_name, task_name, host_name, - project_name=project_name, - dbcon=legacy_io + project_name=project_name ) anatomy = Anatomy(project_name) - data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data = get_template_data_with_names( + project_name, asset_name, task_name, host_name + ) data["root"] = anatomy.roots file_template = anatomy.templates[template_key]["file"] diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 9b6d5c4879..ae1326a5bd 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -3,8 +3,8 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectInstances(pyblish.api.ContextPlugin): @@ -107,13 +107,14 @@ class CollectInstances(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - new_subset_name = get_subset_name_with_asset_doc( + new_subset_name = get_subset_name( family, variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) instance_data["subset"] = new_subset_name diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 20c5bb586a..92a2815ba0 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -3,7 +3,7 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectRenderScene(pyblish.api.ContextPlugin): @@ -75,14 +75,15 @@ class CollectRenderScene(pyblish.api.ContextPlugin): dynamic_data["render_pass"] = dynamic_data["renderpass"] task_name = workfile_context["task"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( "render", variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance_data = { diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 88c5f4dbc7..8c7c8c3899 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -3,8 +3,8 @@ import json import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -39,13 +39,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) # Create Workfile instance diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 1c785ab2ee..95c0a678bc 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -9,7 +9,7 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six -from openpype.api import PypeLogger +from openpype.lib import Logger from openpype.modules import ModulesManager @@ -328,7 +328,7 @@ class TVPaintCommands: def log(self): """Access to logger object.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @property diff --git a/openpype/hosts/unreal/__init__.py b/openpype/hosts/unreal/__init__.py index 10e9c5100e..42dd8f0ac4 100644 --- a/openpype/hosts/unreal/__init__.py +++ b/openpype/hosts/unreal/__init__.py @@ -1,24 +1,6 @@ -import os -import openpype.hosts -from openpype.lib.applications import Application +from .addon import UnrealAddon -def add_implementation_envs(env: dict, _app: Application) -> None: - """Modify environments to contain all required for implementation.""" - # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation - - ue_plugin = "UE_5.0" if _app.name[:1] == "5" else "UE_4.7" - unreal_plugin_path = os.path.join( - os.path.dirname(os.path.abspath(openpype.hosts.__file__)), - "unreal", "integration", ue_plugin - ) - if not env.get("OPENPYPE_UNREAL_PLUGIN"): - env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path - - # Set default environments if are not set via settings - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "UnrealAddon", +) diff --git a/openpype/hosts/unreal/addon.py b/openpype/hosts/unreal/addon.py new file mode 100644 index 0000000000..16736214c5 --- /dev/null +++ b/openpype/hosts/unreal/addon.py @@ -0,0 +1,42 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class UnrealAddon(OpenPypeModule, IHostAddon): + name = "unreal" + host_name = "unreal" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, app): + """Modify environments to contain all required for implementation.""" + # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation + + ue_plugin = "UE_5.0" if app.name[:1] == "5" else "UE_4.7" + unreal_plugin_path = os.path.join( + UNREAL_ROOT_DIR, "integration", ue_plugin + ) + if not env.get("OPENPYPE_UNREAL_PLUGIN"): + env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path + + # Set default environments if are not set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(UNREAL_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".uproject"] diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index ede71aa218..870982f5f9 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -19,6 +19,7 @@ from .pipeline import ( show_tools_dialog, show_tools_popup, instantiate, + UnrealHost, ) __all__ = [ @@ -36,5 +37,6 @@ __all__ = [ "show_experimental_tools", "show_tools_dialog", "show_tools_popup", - "instantiate" + "instantiate", + "UnrealHost", ] diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index bbca7916d3..d396b64072 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -14,6 +14,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools import openpype.hosts.unreal +from openpype.host import HostBase, ILoadHost import unreal # noqa @@ -29,6 +30,32 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +class UnrealHost(HostBase, ILoadHost): + """Unreal host implementation. + + For some time this class will re-use functions from module based + implementation for backwards compatibility of older unreal projects. + """ + + name = "unreal" + + def install(self): + install() + + def get_containers(self): + return ls() + + def show_tools_popup(self): + """Show tools popup with actions leading to show other tools.""" + + show_tools_popup() + + def show_tools_dialog(self): + """Show tools dialog with actions leading to show other tools.""" + + show_tools_dialog() + + def install(): """Install Unreal configuration for OpenPype.""" print("-=" * 40) diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 5be04fc841..50b34bd573 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- """Hook to launch Unreal and prepare projects.""" import os +import copy from pathlib import Path from openpype.lib import ( PreLaunchHook, ApplicationLaunchFailed, ApplicationNotFound, - get_workdir_data, get_workfile_template_key ) import openpype.hosts.unreal.lib as unreal_lib @@ -35,18 +35,13 @@ class UnrealPrelaunchHook(PreLaunchHook): return last_workfile.name # Prepare data for fill data and for getting workfile template key - task_name = self.data["task_name"] anatomy = self.data["anatomy"] - asset_doc = self.data["asset_doc"] project_doc = self.data["project_doc"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") + # Use already prepared workdir data + workdir_data = copy.deepcopy(self.data["workdir_data"]) + task_type = workdir_data.get("task", {}).get("type") - workdir_data = get_workdir_data( - project_doc, asset_doc, task_name, self.host_name - ) # QUESTION raise exception if version is part of filename template? workdir_data["version"] = 1 workdir_data["ext"] = "uproject" diff --git a/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py b/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py index 4bb03b07ed..b85f970699 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py +++ b/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py @@ -3,7 +3,9 @@ import unreal openpype_detected = True try: from openpype.pipeline import install_host - from openpype.hosts.unreal import api as openpype_host + from openpype.hosts.unreal.api import UnrealHost + + openpype_host = UnrealHost() except ImportError as exc: openpype_host = None openpype_detected = False diff --git a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py b/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py index 4bb03b07ed..b85f970699 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py +++ b/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py @@ -3,7 +3,9 @@ import unreal openpype_detected = True try: from openpype.pipeline import install_host - from openpype.hosts.unreal import api as openpype_host + from openpype.hosts.unreal.api import UnrealHost + + openpype_host = UnrealHost() except ImportError as exc: openpype_host = None openpype_detected = False diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index 8c453b38b9..d02c6de357 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Unreal launching and project tools.""" -import sys + import os import platform import json @@ -9,7 +9,7 @@ import subprocess import re from pathlib import Path from collections import OrderedDict -from openpype.api import get_project_settings +from openpype.settings import get_project_settings def get_engine_versions(env=None): diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py index b2c3889f68..9fe5f3ab4b 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py @@ -20,6 +20,34 @@ class SkeletalMeshAlembicLoader(plugin.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """Load and containerise representation into Content Browser. @@ -50,36 +78,24 @@ class SkeletalMeshAlembicLoader(plugin.Loader): asset_name = "{}_{}".format(asset, name) else: asset_name = "{}".format(name) + version = context.get('version').get('name') tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + f"{root}/{asset}/{name}_v{version:03d}", suffix="") container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.SKELETAL) - - task.options = options - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - - # Create Asset Container - unreal_pipeline.create_container( - container=container_name, path=asset_dir) + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) data = { "schema": "openpype:container-2.0", @@ -110,23 +126,8 @@ class SkeletalMeshAlembicLoader(plugin.Loader): source_path = get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.SKELETAL) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) container_path = "{}/{}".format(container["namespace"], diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 5a73c72c64..50e498dbb0 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -24,7 +24,11 @@ class StaticMeshAlembicLoader(plugin.Loader): task = unreal.AssetImportTask() options = unreal.AbcImportSettings() sm_settings = unreal.AbcStaticMeshSettings() - conversion_settings = unreal.AbcConversionSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) task.set_editor_property('filename', filename) task.set_editor_property('destination_path', asset_dir) @@ -40,13 +44,6 @@ class StaticMeshAlembicLoader(plugin.Loader): sm_settings.set_editor_property('merge_meshes', True) - conversion_settings.set_editor_property('flip_u', False) - conversion_settings.set_editor_property('flip_v', True) - conversion_settings.set_editor_property( - 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) - conversion_settings.set_editor_property( - 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) - options.static_mesh_settings = sm_settings options.conversion_settings = conversion_settings task.options = options @@ -83,22 +80,24 @@ class StaticMeshAlembicLoader(plugin.Loader): asset_name = "{}_{}".format(asset, name) else: asset_name = "{}".format(name) + version = context.get('version').get('name') tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + f"{root}/{asset}/{name}_v{version:03d}", suffix="") container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) - task = self.get_task(self.fname, asset_dir, asset_name, False) + task = self.get_task(self.fname, asset_dir, asset_name, False) - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - # Create Asset Container - unreal_pipeline.create_container( - container=container_name, path=asset_dir) + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) data = { "schema": "openpype:container-2.0", diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index da2830bc52..1fe0bef462 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -8,13 +8,13 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection +from openpype.pipeline.context_tools import get_current_project_asset from openpype.pipeline import ( get_representation_path, AVALON_CONTAINER_ID ) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline -from openpype.api import get_asset class AnimationFBXLoader(plugin.Loader): @@ -53,6 +53,8 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None + asset_doc = get_current_project_asset(fields=["data.fps"]) + task.set_editor_property('filename', self.fname) task.set_editor_property('destination_path', asset_dir) task.set_editor_property('destination_name', asset_name) @@ -80,7 +82,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -246,6 +248,7 @@ class AnimationFBXLoader(plugin.Loader): def update(self, container, representation): name = container["asset_name"] source_path = get_representation_path(representation) + asset_doc = get_current_project_asset(fields=["data.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() @@ -279,7 +282,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 3f16a68ead..926c932a85 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -9,7 +9,10 @@ from unreal import EditorLevelLibrary from unreal import EditorLevelUtils from unreal import AssetToolsHelpers from unreal import FBXImportType -from unreal import MathLibrary as umath +from unreal import MovieSceneLevelVisibilityTrack +from unreal import MovieSceneSubTrack + +from bson.objectid import ObjectId from openpype.client import get_asset_by_name, get_assets from openpype.pipeline import ( @@ -20,7 +23,8 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, legacy_io, ) -from openpype.api import get_asset +from openpype.pipeline.context_tools import get_current_project_asset +from openpype.api import get_current_project_settings from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline @@ -159,9 +163,29 @@ class LayoutLoader(plugin.Loader): hid_section.set_row_index(index) hid_section.set_level_names(maps) - @staticmethod + def _transform_from_basis(self, transform, basis): + """Transform a transform from a basis to a new basis.""" + # Get the basis matrix + basis_matrix = unreal.Matrix( + basis[0], + basis[1], + basis[2], + basis[3] + ) + transform_matrix = unreal.Matrix( + transform[0], + transform[1], + transform[2], + transform[3] + ) + + new_transform = ( + basis_matrix.get_inverse() * transform_matrix * basis_matrix) + + return new_transform.transform() + def _process_family( - assets, class_name, transform, sequence, inst_name=None + self, assets, class_name, transform, basis, sequence, inst_name=None ): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -171,30 +195,12 @@ class LayoutLoader(plugin.Loader): for asset in assets: obj = ar.get_asset_by_object_path(asset).get_asset() if obj.get_class().get_name() == class_name: + t = self._transform_from_basis(transform, basis) actor = EditorLevelLibrary.spawn_actor_from_object( - obj, - transform.get('translation') + obj, t.translation ) - if inst_name: - try: - # Rename method leads to crash - # actor.rename(name=inst_name) - - # The label works, although it make it slightly more - # complicated to check for the names, as we need to - # loop through all the actors in the level - actor.set_actor_label(inst_name) - except Exception as e: - print(e) - actor.set_actor_rotation(unreal.Rotator( - umath.radians_to_degrees( - transform.get('rotation').get('x')), - -umath.radians_to_degrees( - transform.get('rotation').get('y')), - umath.radians_to_degrees( - transform.get('rotation').get('z')), - ), False) - actor.set_actor_scale3d(transform.get('scale')) + actor.set_actor_rotation(t.rotation.rotator(), False) + actor.set_actor_scale3d(t.scale3d) if class_name == 'SkeletalMesh': skm_comp = actor.get_editor_property( @@ -203,16 +209,17 @@ class LayoutLoader(plugin.Loader): actors.append(actor) - binding = None - for p in sequence.get_possessables(): - if p.get_name() == actor.get_name(): - binding = p - break + if sequence: + binding = None + for p in sequence.get_possessables(): + if p.get_name() == actor.get_name(): + binding = p + break - if not binding: - binding = sequence.add_possessable(actor) + if not binding: + binding = sequence.add_possessable(actor) - bindings.append(binding) + bindings.append(binding) return actors, bindings @@ -225,6 +232,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" + asset_doc = get_current_project_asset() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() @@ -259,7 +267,7 @@ class LayoutLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', get_asset()["data"].get("fps")) + 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -300,52 +308,53 @@ class LayoutLoader(plugin.Loader): actor.skeletal_mesh_component.animation_data.set_editor_property( 'anim_to_play', animation) - # Add animation to the sequencer - bindings = bindings_dict.get(instance_name) + if sequence: + # Add animation to the sequencer + bindings = bindings_dict.get(instance_name) - ar = unreal.AssetRegistryHelpers.get_asset_registry() + ar = unreal.AssetRegistryHelpers.get_asset_registry() - for binding in bindings: - tracks = binding.get_tracks() - track = None - track = tracks[0] if tracks else binding.add_track( - unreal.MovieSceneSkeletalAnimationTrack) + for binding in bindings: + tracks = binding.get_tracks() + track = None + track = tracks[0] if tracks else binding.add_track( + unreal.MovieSceneSkeletalAnimationTrack) - sections = track.get_sections() - section = None - if not sections: - section = track.add_section() - else: - section = sections[0] + sections = track.get_sections() + section = None + if not sections: + section = track.add_section() + else: + section = sections[0] + sec_params = section.get_editor_property('params') + curr_anim = sec_params.get_editor_property('animation') + + if curr_anim: + # Checks if the animation path has a container. + # If it does, it means that the animation is + # already in the sequencer. + anim_path = str(Path( + curr_anim.get_path_name()).parent + ).replace('\\', '/') + + _filter = unreal.ARFilter( + class_names=["AssetContainer"], + package_paths=[anim_path], + recursive_paths=False) + containers = ar.get_assets(_filter) + + if len(containers) > 0: + return + + section.set_range( + sequence.get_playback_start(), + sequence.get_playback_end()) sec_params = section.get_editor_property('params') - curr_anim = sec_params.get_editor_property('animation') - - if curr_anim: - # Checks if the animation path has a container. - # If it does, it means that the animation is already - # in the sequencer. - anim_path = str(Path( - curr_anim.get_path_name()).parent - ).replace('\\', '/') - - _filter = unreal.ARFilter( - class_names=["AssetContainer"], - package_paths=[anim_path], - recursive_paths=False) - containers = ar.get_assets(_filter) - - if len(containers) > 0: - return - - section.set_range( - sequence.get_playback_start(), - sequence.get_playback_end()) - sec_params = section.get_editor_property('params') - sec_params.set_editor_property('animation', animation) + sec_params.set_editor_property('animation', animation) @staticmethod - def _generate_sequence(self, h, h_dir): + def _generate_sequence(h, h_dir): tools = unreal.AssetToolsHelpers().get_asset_tools() sequence = tools.create_asset( @@ -401,7 +410,7 @@ class LayoutLoader(plugin.Loader): return sequence, (min_frame, max_frame) - def _process(self, lib_path, asset_dir, sequence, loaded=None): + def _process(self, lib_path, asset_dir, sequence, repr_loaded=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() with open(lib_path, "r") as fp: @@ -409,8 +418,8 @@ class LayoutLoader(plugin.Loader): all_loaders = discover_loader_plugins() - if not loaded: - loaded = [] + if not repr_loaded: + repr_loaded = [] path = Path(lib_path) @@ -421,36 +430,65 @@ class LayoutLoader(plugin.Loader): loaded_assets = [] for element in data: - reference = None - if element.get('reference_fbx'): - reference = element.get('reference_fbx') + representation = None + repr_format = None + if element.get('representation'): + # representation = element.get('representation') + + self.log.info(element.get("version")) + + valid_formats = ['fbx', 'abc'] + + repr_data = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(element.get("version")), + "name": {"$in": valid_formats} + }) + repr_format = repr_data.get('name') + + if not repr_data: + self.log.error( + f"No valid representation found for version " + f"{element.get('version')}") + continue + + representation = str(repr_data.get('_id')) + print(representation) + # This is to keep compatibility with old versions of the + # json format. + elif element.get('reference_fbx'): + representation = element.get('reference_fbx') + repr_format = 'fbx' elif element.get('reference_abc'): - reference = element.get('reference_abc') + representation = element.get('reference_abc') + repr_format = 'abc' # If reference is None, this element is skipped, as it cannot be # imported in Unreal - if not reference: + if not representation: continue instance_name = element.get('instance_name') skeleton = None - if reference not in loaded: - loaded.append(reference) + if representation not in repr_loaded: + repr_loaded.append(representation) family = element.get('family') loaders = loaders_from_representation( - all_loaders, reference) + all_loaders, representation) loader = None - if reference == element.get('reference_fbx'): + if repr_format == 'fbx': loader = self._get_fbx_loader(loaders, family) - elif reference == element.get('reference_abc'): + elif repr_format == 'abc': loader = self._get_abc_loader(loaders, family) if not loader: + self.log.error( + f"No valid loader found for {representation}") continue options = { @@ -459,7 +497,7 @@ class LayoutLoader(plugin.Loader): assets = load_container( loader, - reference, + representation, namespace=instance_name, options=options ) @@ -477,28 +515,36 @@ class LayoutLoader(plugin.Loader): instances = [ item for item in data - if (item.get('reference_fbx') == reference or - item.get('reference_abc') == reference)] + if ((item.get('version') and + item.get('version') == element.get('version')) or + item.get('reference_fbx') == representation or + item.get('reference_abc') == representation)] for instance in instances: - transform = instance.get('transform') + # transform = instance.get('transform') + transform = instance.get('transform_matrix') + basis = instance.get('basis') inst = instance.get('instance_name') actors = [] if family == 'model': actors, _ = self._process_family( - assets, 'StaticMesh', transform, sequence, inst) + assets, 'StaticMesh', transform, basis, + sequence, inst + ) elif family == 'rig': actors, bindings = self._process_family( - assets, 'SkeletalMesh', transform, sequence, inst) + assets, 'SkeletalMesh', transform, basis, + sequence, inst + ) actors_dict[inst] = actors bindings_dict[inst] = bindings if skeleton: - skeleton_dict[reference] = skeleton + skeleton_dict[representation] = skeleton else: - skeleton = skeleton_dict.get(reference) + skeleton = skeleton_dict.get(representation) animation_file = element.get('animation') @@ -572,6 +618,9 @@ class LayoutLoader(plugin.Loader): Returns: list(str): list of container content """ + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] + # Create directory for asset and avalon container hierarchy = context.get('asset').get('data').get('parents') root = self.ASSET_ROOT @@ -592,81 +641,88 @@ class LayoutLoader(plugin.Loader): EditorAssetLibrary.make_directory(asset_dir) - # Create map for the shot, and create hierarchy of map. If the maps - # already exist, we will use them. - h_dir = hierarchy_dir_list[0] - h_asset = hierarchy[0] - master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" - if not EditorAssetLibrary.does_asset_exist(master_level): - EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") + master_level = None + shot = None + sequences = [] level = f"{asset_dir}/{asset}_map.{asset}_map" EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") - EditorLevelLibrary.load_level(master_level) - EditorLevelUtils.add_level_to_world( - EditorLevelLibrary.get_editor_world(), - level, - unreal.LevelStreamingDynamic - ) - EditorLevelLibrary.save_all_dirty_levels() - EditorLevelLibrary.load_level(level) + if create_sequences: + # Create map for the shot, and create hierarchy of map. If the + # maps already exist, we will use them. + if hierarchy: + h_dir = hierarchy_dir_list[0] + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + if not EditorAssetLibrary.does_asset_exist(master_level): + EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") - # Get all the sequences in the hierarchy. It will create them, if - # they don't exist. - sequences = [] - frame_ranges = [] - for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): - root_content = EditorAssetLibrary.list_assets( - h_dir, recursive=False, include_folder=False) + if master_level: + EditorLevelLibrary.load_level(master_level) + EditorLevelUtils.add_level_to_world( + EditorLevelLibrary.get_editor_world(), + level, + unreal.LevelStreamingDynamic + ) + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(level) - existing_sequences = [ - EditorAssetLibrary.find_asset_data(asset) - for asset in root_content - if EditorAssetLibrary.find_asset_data( - asset).get_class().get_name() == 'LevelSequence' - ] + # Get all the sequences in the hierarchy. It will create them, if + # they don't exist. + frame_ranges = [] + for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): + root_content = EditorAssetLibrary.list_assets( + h_dir, recursive=False, include_folder=False) - if not existing_sequences: - sequence, frame_range = self._generate_sequence(h, h_dir) + existing_sequences = [ + EditorAssetLibrary.find_asset_data(asset) + for asset in root_content + if EditorAssetLibrary.find_asset_data( + asset).get_class().get_name() == 'LevelSequence' + ] - sequences.append(sequence) - frame_ranges.append(frame_range) - else: - for e in existing_sequences: - sequences.append(e.get_asset()) - frame_ranges.append(( - e.get_asset().get_playback_start(), - e.get_asset().get_playback_end())) + if not existing_sequences: + sequence, frame_range = self._generate_sequence(h, h_dir) - shot = tools.create_asset( - asset_name=asset, - package_path=asset_dir, - asset_class=unreal.LevelSequence, - factory=unreal.LevelSequenceFactoryNew() - ) + sequences.append(sequence) + frame_ranges.append(frame_range) + else: + for e in existing_sequences: + sequences.append(e.get_asset()) + frame_ranges.append(( + e.get_asset().get_playback_start(), + e.get_asset().get_playback_end())) - # sequences and frame_ranges have the same length - for i in range(0, len(sequences) - 1): - self._set_sequence_hierarchy( - sequences[i], sequences[i + 1], - frame_ranges[i][1], - frame_ranges[i + 1][0], frame_ranges[i + 1][1], - [level]) + shot = tools.create_asset( + asset_name=asset, + package_path=asset_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) - project_name = legacy_io.active_project() - data = get_asset_by_name(project_name, asset)["data"] - shot.set_display_rate( - unreal.FrameRate(data.get("fps"), 1.0)) - shot.set_playback_start(0) - shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) - self._set_sequence_hierarchy( - sequences[-1], shot, - frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), - [level]) + # sequences and frame_ranges have the same length + for i in range(0, len(sequences) - 1): + self._set_sequence_hierarchy( + sequences[i], sequences[i + 1], + frame_ranges[i][1], + frame_ranges[i + 1][0], frame_ranges[i + 1][1], + [level]) - EditorLevelLibrary.load_level(level) + project_name = legacy_io.active_project() + data = get_asset_by_name(project_name, asset)["data"] + shot.set_display_rate( + unreal.FrameRate(data.get("fps"), 1.0)) + shot.set_playback_start(0) + shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) + if sequences: + self._set_sequence_hierarchy( + sequences[-1], shot, + frame_ranges[-1][1], + data.get('clipIn'), data.get('clipOut'), + [level]) + + EditorLevelLibrary.load_level(level) loaded_assets = self._process(self.fname, asset_dir, shot) @@ -701,32 +757,47 @@ class LayoutLoader(plugin.Loader): for a in asset_content: EditorAssetLibrary.save_asset(a) - EditorLevelLibrary.load_level(master_level) + if master_level: + EditorLevelLibrary.load_level(master_level) return asset_content def update(self, container, representation): + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] + ar = unreal.AssetRegistryHelpers.get_asset_registry() root = "/Game/OpenPype" asset_dir = container.get('namespace') - context = representation.get("context") - hierarchy = context.get('hierarchy').split("/") - h_dir = f"{root}/{hierarchy[0]}" - h_asset = hierarchy[0] - master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + sequence = None + master_level = None - # # Create a temporary level to delete the layout level. - # EditorLevelLibrary.save_all_dirty_levels() - # EditorAssetLibrary.make_directory(f"{root}/tmp") - # tmp_level = f"{root}/tmp/temp_map" - # if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"): - # EditorLevelLibrary.new_level(tmp_level) - # else: - # EditorLevelLibrary.load_level(tmp_level) + if create_sequences: + hierarchy = context.get('hierarchy').split("/") + h_dir = f"{root}/{hierarchy[0]}" + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[asset_dir], + recursive_paths=False) + sequences = ar.get_assets(filter) + sequence = sequences[0].get_asset() + + prev_level = None + + if not master_level: + curr_level = unreal.LevelEditorSubsystem().get_current_level() + curr_level_path = curr_level.get_outer().get_path_name() + # If the level path does not start with "/Game/", the current + # level is a temporary, unsaved level. + if curr_level_path.startswith("/Game/"): + prev_level = curr_level_path # Get layout level filter = unreal.ARFilter( @@ -734,11 +805,6 @@ class LayoutLoader(plugin.Loader): package_paths=[asset_dir], recursive_paths=False) levels = ar.get_assets(filter) - filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[asset_dir], - recursive_paths=False) - sequences = ar.get_assets(filter) layout_level = levels[0].get_editor_property('object_path') @@ -750,14 +816,14 @@ class LayoutLoader(plugin.Loader): for actor in actors: unreal.EditorLevelLibrary.destroy_actor(actor) - EditorLevelLibrary.save_current_level() + if create_sequences: + EditorLevelLibrary.save_current_level() EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/") source_path = get_representation_path(representation) - loaded_assets = self._process( - source_path, asset_dir, sequences[0].get_asset()) + loaded_assets = self._process(source_path, asset_dir, sequence) data = { "representation": str(representation["_id"]), @@ -775,13 +841,20 @@ class LayoutLoader(plugin.Loader): for a in asset_content: EditorAssetLibrary.save_asset(a) - EditorLevelLibrary.load_level(master_level) + if master_level: + EditorLevelLibrary.load_level(master_level) + elif prev_level: + EditorLevelLibrary.load_level(prev_level) def remove(self, container): """ Delete the layout. First, check if the assets loaded with the layout are used by other layouts. If not, delete the assets. """ + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] + + root = "/Game/OpenPype" path = Path(container.get("namespace")) containers = unreal_pipeline.ls() @@ -792,7 +865,7 @@ class LayoutLoader(plugin.Loader): # Check if the assets have been loaded by other layouts, and deletes # them if they haven't. - for asset in container.get('loaded_assets'): + for asset in eval(container.get('loaded_assets')): layouts = [ lc for lc in layout_containers if asset in lc.get('loaded_assets')] @@ -800,71 +873,87 @@ class LayoutLoader(plugin.Loader): if not layouts: EditorAssetLibrary.delete_directory(str(Path(asset).parent)) - # Remove the Level Sequence from the parent. - # We need to traverse the hierarchy from the master sequence to find - # the level sequence. - root = "/Game/OpenPype" - namespace = container.get('namespace').replace(f"{root}/", "") - ms_asset = namespace.split('/')[0] - ar = unreal.AssetRegistryHelpers.get_asset_registry() - _filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[f"{root}/{ms_asset}"], - recursive_paths=False) - sequences = ar.get_assets(_filter) - master_sequence = sequences[0].get_asset() - _filter = unreal.ARFilter( - class_names=["World"], - package_paths=[f"{root}/{ms_asset}"], - recursive_paths=False) - levels = ar.get_assets(_filter) - master_level = levels[0].get_editor_property('object_path') + # Delete the parent folder if there aren't any more + # layouts in it. + asset_content = EditorAssetLibrary.list_assets( + str(Path(asset).parent.parent), recursive=False, + include_folder=True + ) - sequences = [master_sequence] + if len(asset_content) == 0: + EditorAssetLibrary.delete_directory( + str(Path(asset).parent.parent)) - parent = None - for s in sequences: - tracks = s.get_master_tracks() - subscene_track = None - visibility_track = None - for t in tracks: - if t.get_class() == unreal.MovieSceneSubTrack.static_class(): - subscene_track = t - if (t.get_class() == - unreal.MovieSceneLevelVisibilityTrack.static_class()): - visibility_track = t - if subscene_track: - sections = subscene_track.get_sections() - for ss in sections: - if ss.get_sequence().get_name() == container.get('asset'): - parent = s - subscene_track.remove_section(ss) - break - sequences.append(ss.get_sequence()) - # Update subscenes indexes. - i = 0 - for ss in sections: - ss.set_row_index(i) - i += 1 + master_sequence = None + master_level = None + sequences = [] - if visibility_track: - sections = visibility_track.get_sections() - for ss in sections: - if (unreal.Name(f"{container.get('asset')}_map") - in ss.get_level_names()): - visibility_track.remove_section(ss) - # Update visibility sections indexes. - i = -1 - prev_name = [] - for ss in sections: - if prev_name != ss.get_level_names(): + if create_sequences: + # Remove the Level Sequence from the parent. + # We need to traverse the hierarchy from the master sequence to + # find the level sequence. + namespace = container.get('namespace').replace(f"{root}/", "") + ms_asset = namespace.split('/')[0] + ar = unreal.AssetRegistryHelpers.get_asset_registry() + _filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + sequences = ar.get_assets(_filter) + master_sequence = sequences[0].get_asset() + _filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + levels = ar.get_assets(_filter) + master_level = levels[0].get_editor_property('object_path') + + sequences = [master_sequence] + + parent = None + for s in sequences: + tracks = s.get_master_tracks() + subscene_track = None + visibility_track = None + for t in tracks: + if t.get_class() == MovieSceneSubTrack.static_class(): + subscene_track = t + if (t.get_class() == + MovieSceneLevelVisibilityTrack.static_class()): + visibility_track = t + if subscene_track: + sections = subscene_track.get_sections() + for ss in sections: + if (ss.get_sequence().get_name() == + container.get('asset')): + parent = s + subscene_track.remove_section(ss) + break + sequences.append(ss.get_sequence()) + # Update subscenes indexes. + i = 0 + for ss in sections: + ss.set_row_index(i) i += 1 - ss.set_row_index(i) - prev_name = ss.get_level_names() - if parent: - break - assert parent, "Could not find the parent sequence" + if visibility_track: + sections = visibility_track.get_sections() + for ss in sections: + if (unreal.Name(f"{container.get('asset')}_map") + in ss.get_level_names()): + visibility_track.remove_section(ss) + # Update visibility sections indexes. + i = -1 + prev_name = [] + for ss in sections: + if prev_name != ss.get_level_names(): + i += 1 + ss.set_row_index(i) + prev_name = ss.get_level_names() + if parent: + break + + assert parent, "Could not find the parent sequence" # Create a temporary level to delete the layout level. EditorLevelLibrary.save_all_dirty_levels() @@ -878,10 +967,9 @@ class LayoutLoader(plugin.Loader): # Delete the layout directory. EditorAssetLibrary.delete_directory(str(path)) - EditorLevelLibrary.load_level(master_level) - EditorAssetLibrary.delete_directory(f"{root}/tmp") - - EditorLevelLibrary.save_current_level() + if create_sequences: + EditorLevelLibrary.load_level(master_level) + EditorAssetLibrary.delete_directory(f"{root}/tmp") # Delete the parent folder if there aren't any more layouts in it. asset_content = EditorAssetLibrary.list_assets( diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index e69de29bb2..4e918c5d7d 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + WebpublisherAddon, + WEBPUBLISHER_ROOT_DIR, +) + + +__all__ = ( + "WebpublisherAddon", + "WEBPUBLISHER_ROOT_DIR", +) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py new file mode 100644 index 0000000000..a64d74e62b --- /dev/null +++ b/openpype/hosts/webpublisher/addon.py @@ -0,0 +1,106 @@ +import os + +import click + +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class WebpublisherAddon(OpenPypeModule, IHostAddon): + name = "webpublisher" + host_name = "webpublisher" + + def initialize(self, module_settings): + self.enabled = True + + def headless_publish(self, log, close_plugin_name=None, is_test=False): + """Runs publish in a opened host with a context. + + Close Python process at the end. + """ + + from openpype.pipeline.publish.lib import remote_publish + from .lib import get_webpublish_conn, publish_and_log + + if is_test: + remote_publish(log, close_plugin_name) + return + + dbcon = get_webpublish_conn() + _id = os.environ.get("BATCH_LOG_ID") + if not _id: + log.warning("Unable to store log records, " + "batch will be unfinished!") + return + + publish_and_log( + dbcon, _id, log, close_plugin_name=close_plugin_name + ) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group( + WebpublisherAddon.name, + help="Webpublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +@click.argument("path") +@click.option("-u", "--user", help="User email address") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publish(project, path, user=None, targets=None): + """Start publishing (Inner command). + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .publish_functions import cli_publish + + cli_publish(project, path, user, targets) + + +@cli_main.command() +@click.argument("path") +@click.option("-p", "--project", help="Project") +@click.option("-h", "--host", help="Host") +@click.option("-u", "--user", help="User email address") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publishfromapp(project, path, host, user=None, targets=None): + """Start publishing through application (Inner command). + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .publish_functions import cli_publish_from_app + + cli_publish_from_app(project, path, host, user, targets) + + +@cli_main.command() +@click.option("-e", "--executable", help="Executable") +@click.option("-u", "--upload_dir", help="Upload dir") +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) +def webserver(executable, upload_dir, host=None, port=None): + """Start service for communication with Webpublish Front end. + + OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND + FTRACK_BOT_API_KEY provided with api key from Ftrack. + + Expect "pype.club" user created on Ftrack. + """ + + from .webserver_service import run_webserver + + run_webserver(executable, upload_dir, host, port) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 18e3a16cf5..afea838e2c 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -1,31 +1,23 @@ import os import logging -from pyblish import api as pyblish -import openpype.hosts.webpublisher -from openpype.pipeline import legacy_io +import pyblish.api + +from openpype.host import HostBase +from openpype.hosts.webpublisher import WEBPUBLISHER_ROOT_DIR log = logging.getLogger("openpype.hosts.webpublisher") -HOST_DIR = os.path.dirname(os.path.abspath( - openpype.hosts.webpublisher.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +class WebpublisherHost(HostBase): + name = "webpublisher" -def install(): - print("Installing Pype config...") + def install(self): + print("Installing Pype config...") + pyblish.api.register_host(self.name) - pyblish.register_plugin_path(PUBLISH_PATH) - log.info(PUBLISH_PATH) - - legacy_io.install() - - -def uninstall(): - pyblish.deregister_plugin_path(PUBLISH_PATH) - - -# to have required methods for interface -def ls(): - pass + publish_plugin_dir = os.path.join( + WEBPUBLISHER_ROOT_DIR, "plugins", "publish" + ) + pyblish.api.register_plugin_path(publish_plugin_dir) + self.log.info(publish_plugin_dir) diff --git a/openpype/lib/remote_publish.py b/openpype/hosts/webpublisher/lib.py similarity index 69% rename from openpype/lib/remote_publish.py rename to openpype/hosts/webpublisher/lib.py index d7884d0200..4bc3f1db80 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/hosts/webpublisher/lib.py @@ -1,14 +1,18 @@ import os from datetime import datetime import collections +import json from bson.objectid import ObjectId import pyblish.util import pyblish.api -from openpype.lib.mongo import OpenPypeMongoConnection -from openpype.lib.plugin_tools import parse_json +from openpype.client.mongo import OpenPypeMongoConnection +from openpype.settings import get_project_settings +from openpype.lib import Logger +from openpype.lib.profiles_filtering import filter_profiles +from openpype.pipeline.publish.lib import find_close_plugin ERROR_STATUS = "error" IN_PROGRESS_STATUS = "in_progress" @@ -17,21 +21,51 @@ SENT_REPROCESSING_STATUS = "sent_for_reprocessing" FINISHED_REPROCESS_STATUS = "republishing_finished" FINISHED_OK_STATUS = "finished_ok" +log = Logger.get_logger(__name__) -def headless_publish(log, close_plugin_name=None, is_test=False): - """Runs publish in a opened host with a context and closes Python process. + +def parse_json(path): + """Parses json file at 'path' location + + Returns: + (dict) or None if unparsable + Raises: + AsssertionError if 'path' doesn't exist """ - if not is_test: - dbcon = get_webpublish_conn() - _id = os.environ.get("BATCH_LOG_ID") - if not _id: - log.warning("Unable to store log records, " - "batch will be unfinished!") - return + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + log.error( + "Error loading json: {} - Exception: {}".format(path, exc) + ) + return data - publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name) + +def get_batch_asset_task_info(ctx): + """Parses context data from webpublisher's batch metadata + + Returns: + (tuple): asset, task_name (Optional), task_type + """ + task_type = "default_task_type" + task_name = None + asset = None + + if ctx["type"] == "task": + items = ctx["path"].split('/') + asset = items[-2] + task_name = ctx["name"] + task_type = ctx["attributes"]["type"] else: - publish(log, close_plugin_name) + asset = ctx["name"] + + return asset, task_name, task_type def get_webpublish_conn(): @@ -60,43 +94,13 @@ def start_webpublish_log(dbcon, batch_id, user): }).inserted_id -def publish(log, close_plugin_name=None, raise_error=False): - """Loops through all plugins, logs to console. Used for tests. - - Args: - log (OpenPypeLogger) - close_plugin_name (str): name of plugin with responsibility to - close host app - """ - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" - - close_plugin = _get_close_plugin(close_plugin_name, log) - - for result in pyblish.util.publish_iter(): - for record in result["records"]: - log.info("{}: {}".format( - result["plugin"].label, record.msg)) - - if result["error"]: - error_message = error_format.format(**result) - log.error(error_message) - if close_plugin: # close host app explicitly after error - context = pyblish.api.Context() - close_plugin().process(context) - if raise_error: - # Fatal Error is because of Deadline - error_message = "Fatal Error: " + error_format.format(**result) - raise RuntimeError(error_message) - - def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): """Loops through all plugins, logs ok and fails into OP DB. Args: dbcon (OpenPypeMongoConnection) _id (str) - id of current job in DB - log (OpenPypeLogger) + log (openpype.lib.Logger) batch_id (str) - id sent from frontend close_plugin_name (str): name of plugin with responsibility to close host app @@ -105,7 +109,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" error_format += "-" * 80 + "\n" - close_plugin = _get_close_plugin(close_plugin_name, log) + close_plugin = find_close_plugin(close_plugin_name, log) if isinstance(_id, str): _id = ObjectId(_id) @@ -175,14 +179,12 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): ) -def fail_batch(_id, batches_in_progress, dbcon): - """Set current batch as failed as there are some stuck batches.""" - running_batches = [str(batch["_id"]) - for batch in batches_in_progress - if batch["_id"] != _id] - msg = "There are still running batches {}\n". \ - format("\n".join(running_batches)) - msg += "Ask admin to check them and reprocess current batch" +def fail_batch(_id, dbcon, msg): + """Set current batch as failed as there is some problem. + + Raises: + ValueError + """ dbcon.update_one( {"_id": _id}, {"$set": @@ -226,16 +228,6 @@ def find_variant_key(application_manager, host): return found_variant_key -def _get_close_plugin(close_plugin_name, log): - if close_plugin_name: - plugins = pyblish.api.discover() - for plugin in plugins: - if plugin.__name__ == close_plugin_name: - return plugin - - log.debug("Close plugin not found, app might not close.") - - def get_task_data(batch_dir): """Return parsed data from first task manifest.json @@ -259,3 +251,19 @@ def get_task_data(batch_dir): "Cannot parse batch meta in {} folder".format(task_data)) return task_data + + +def get_timeout(project_name, host_name, task_type): + """Returns timeout(seconds) from Setting profile.""" + filter_data = { + "task_types": task_type, + "hosts": host_name + } + timeout_profiles = (get_project_settings(project_name)["webpublisher"] + ["timeout_profiles"]) + matching_item = filter_profiles(timeout_profiles, filter_data) + timeout = 3600 + if matching_item: + timeout = matching_item["timeout"] + + return timeout diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index 9ff779636a..eb2737b276 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -13,12 +13,13 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) -from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + parse_json, + get_batch_asset_task_info, + get_webpublish_conn, + IN_PROGRESS_STATUS +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 20e277d794..278a102f9d 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -23,10 +23,8 @@ from openpype.lib import ( get_ffprobe_streams, convert_ffprobe_fps_value, ) -from openpype.lib.plugin_tools import ( - parse_json, - get_subset_name_with_asset_doc -) +from openpype.pipeline.create import get_subset_name +from openpype_modules.webpublisher.lib import parse_json class CollectPublishedFiles(pyblish.api.ContextPlugin): @@ -80,9 +78,14 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence, extension.replace(".", '')) - subset_name = get_subset_name_with_asset_doc( - family, variant, task_name, asset_doc, - project_name=project_name, host_name="webpublisher" + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name=project_name, + host_name="webpublisher", + project_settings=context.data["project_settings"] ) version = self._get_next_version( project_name, asset_doc, subset_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 92f581be5f..948e86c23e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -10,7 +10,7 @@ import re import copy import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectTVPaintInstances(pyblish.api.ContextPlugin): @@ -47,13 +47,14 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances = [] # Workfile instance - workfile_subset_name = get_subset_name_with_asset_doc( + workfile_subset_name = get_subset_name( self.workfile_family, self.workfile_variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) workfile_instance = self._create_workfile_instance( context, workfile_subset_name @@ -61,13 +62,14 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances.append(workfile_instance) # Review instance - review_subset_name = get_subset_name_with_asset_doc( + review_subset_name = get_subset_name( self.review_family, self.review_variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) review_instance = self._create_review_instance( context, review_subset_name @@ -114,14 +116,15 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_pass_family, render_pass, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_pass_instance( @@ -137,14 +140,15 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Override family for subset name "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_layer_family, variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_layer_instance( context, layers, subset_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index f0f29260a2..b5f8ed9c8f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -16,11 +16,11 @@ import uuid import json import shutil import pyblish.api -from openpype.lib.plugin_tools import parse_json from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, CollectSceneData ) +from openpype_modules.webpublisher.lib import parse_json class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/publish_functions.py b/openpype/hosts/webpublisher/publish_functions.py new file mode 100644 index 0000000000..83f53ced68 --- /dev/null +++ b/openpype/hosts/webpublisher/publish_functions.py @@ -0,0 +1,205 @@ +import os +import time +import pyblish.api +import pyblish.util + +from openpype.lib import Logger +from openpype.lib.applications import ( + ApplicationManager, + get_app_environments_for_context, +) +from openpype.pipeline import install_host +from openpype.hosts.webpublisher.api import WebpublisherHost + +from .lib import ( + get_batch_asset_task_info, + get_webpublish_conn, + start_webpublish_log, + publish_and_log, + fail_batch, + find_variant_key, + get_task_data, + get_timeout, + IN_PROGRESS_STATUS +) + + +def cli_publish(project_name, batch_path, user_email, targets): + """Start headless publishing. + + Used to publish rendered assets, workfiles etc via Webpublisher. + Eventually should be yanked out to Webpublisher cli. + + Publish use json from passed paths argument. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + + Raises: + RuntimeError: When there is no path to process. + """ + + if not batch_path: + raise RuntimeError("No publish paths specified") + + log = Logger.get_logger("remotepublish") + log.info("remotepublish command") + + # Register target and host + webpublisher_host = WebpublisherHost() + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + os.environ["AVALON_PROJECT"] = project_name + os.environ["AVALON_APP"] = webpublisher_host.name + os.environ["USER_EMAIL"] = user_email + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + + if targets: + if isinstance(targets, str): + targets = [targets] + for target in targets: + pyblish.api.register_target(target) + + install_host(webpublisher_host) + + log.info("Running publish ...") + + _, batch_id = os.path.split(batch_path) + dbcon = get_webpublish_conn() + _id = start_webpublish_log(dbcon, batch_id, user_email) + + task_data = get_task_data(batch_path) + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + publish_and_log(dbcon, _id, log, batch_id=batch_id) + + log.info("Publish finished.") + + +def cli_publish_from_app( + project_name, batch_path, host_name, user_email, targets +): + """Opens installed variant of 'host' and run remote publish there. + + Eventually should be yanked out to Webpublisher cli. + + Currently implemented and tested for Photoshop where customer + wants to process uploaded .psd file and publish collected layers + from there. Triggered by Webpublisher. + + Checks if no other batches are running (status =='in_progress). If + so, it sleeps for SLEEP (this is separate process), + waits for WAIT_FOR seconds altogether. + + Requires installed host application on the machine. + + Runs publish process as user would, in automatic fashion. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + host_name (str): 'photoshop' + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + """ + + log = Logger.get_logger("RemotePublishFromApp") + + log.info("remotepublishphotoshop command") + + task_data = get_task_data(batch_path) + + workfile_path = os.path.join(batch_path, + task_data["task"], + task_data["files"][0]) + + print("workfile_path {}".format(workfile_path)) + + batch_id = task_data["batch"] + dbcon = get_webpublish_conn() + # safer to start logging here, launch might be broken altogether + _id = start_webpublish_log(dbcon, batch_id, user_email) + + batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) + if len(batches_in_progress) > 1: + running_batches = [str(batch["_id"]) + for batch in batches_in_progress + if batch["_id"] != _id] + msg = "There are still running batches {}\n". \ + format("\n".join(running_batches)) + msg += "Ask admin to check them and reprocess current batch" + fail_batch(_id, dbcon, msg) + + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + asset_name, task_name, task_type = get_batch_asset_task_info( + task_data["context"]) + + application_manager = ApplicationManager() + found_variant_key = find_variant_key(application_manager, host_name) + app_name = "{}/{}".format(host_name, found_variant_key) + + # must have for proper launch of app + env = get_app_environments_for_context( + project_name, + asset_name, + task_name, + app_name + ) + print("env:: {}".format(env)) + os.environ.update(env) + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + # must pass identifier to update log lines for a batch + os.environ["BATCH_LOG_ID"] = str(_id) + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + os.environ["USER_EMAIL"] = user_email + + pyblish.api.register_host(host_name) + if targets: + if isinstance(targets, str): + targets = [targets] + current_targets = os.environ.get("PYBLISH_TARGETS", "").split( + os.pathsep) + for target in targets: + current_targets.append(target) + + os.environ["PYBLISH_TARGETS"] = os.pathsep.join( + set(current_targets)) + + data = { + "last_workfile_path": workfile_path, + "start_last_workfile": True, + "project_name": project_name, + "asset_name": asset_name, + "task_name": task_name + } + + launched_app = application_manager.launch(app_name, **data) + + timeout = get_timeout(project_name, host_name, task_type) + + time_start = time.time() + while launched_app.poll() is None: + time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + msg = "Timeout reached" + fail_batch(_id, dbcon, msg) diff --git a/openpype/hosts/webpublisher/webserver_service/__init__.py b/openpype/hosts/webpublisher/webserver_service/__init__.py new file mode 100644 index 0000000000..73111d286e --- /dev/null +++ b/openpype/hosts/webpublisher/webserver_service/__init__.py @@ -0,0 +1,6 @@ +from .webserver import run_webserver + + +__all__ = ( + "run_webserver", +) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 4cb3cee8e1..4039d2c8ec 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -11,19 +11,18 @@ from openpype.client import ( get_projects, get_assets, ) -from openpype.lib import ( - OpenPypeMongoConnection, - PypeLogger, -) -from openpype.lib.remote_publish import ( +from openpype.lib import Logger +from openpype.settings import get_project_settings +from openpype_modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webpublisher import WebpublisherAddon +from openpype_modules.webpublisher.lib import ( + get_webpublish_conn, get_task_data, ERROR_STATUS, REPROCESS_STATUS ) -from openpype.settings import get_project_settings -from openpype_modules.webserver.base_routes import RestApiEndpoint -log = PypeLogger.get_logger("WebpublishRoutes") +log = Logger.get_logger("WebpublishRoutes") class ResourceRestApiEndpoint(RestApiEndpoint): @@ -79,9 +78,7 @@ class WebpublishRestApiResource(JsonApiResource): """Resource carrying OP DB connection for storing batch info into DB.""" def __init__(self): - mongo_client = OpenPypeMongoConnection.get_mongo_client() - database_name = os.environ["OPENPYPE_DATABASE_NAME"] - self.dbcon = mongo_client[database_name]["webpublishes"] + self.dbcon = get_webpublish_conn() class ProjectsEndpoint(ResourceRestApiEndpoint): @@ -217,7 +214,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # TVPaint filter { "extensions": [".tvpp"], - "command": "remotepublish", + "command": "publish", "arguments": { "targets": ["tvpaint_worker"] }, @@ -226,13 +223,13 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # Photoshop filter { "extensions": [".psd", ".psb"], - "command": "remotepublishfromapp", + "command": "publishfromapp", "arguments": { - # Command 'remotepublishfromapp' requires --host argument + # Command 'publishfromapp' requires --host argument "host": "photoshop", # Make sure targets are set to None for cases that default # would change - # - targets argument is not used in 'remotepublishfromapp' + # - targets argument is not used in 'publishfromapp' "targets": ["remotepublish"] }, # does publish need to be handled by a queue, eg. only @@ -244,7 +241,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): batch_dir = os.path.join(self.resource.upload_dir, content["batch"]) # Default command and arguments - command = "remotepublish" + command = "publish" add_args = { # All commands need 'project' and 'user' "project": content["project_name"], @@ -275,6 +272,8 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): args = [ openpype_app, + "module", + WebpublisherAddon.name, command, batch_dir ] diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver.py similarity index 90% rename from openpype/hosts/webpublisher/webserver_service/webserver_cli.py rename to openpype/hosts/webpublisher/webserver_service/webserver.py index 1ed8f22b2c..093b53d9d3 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver.py @@ -6,7 +6,15 @@ import requests import json import subprocess -from openpype.lib import PypeLogger +from openpype.client import OpenPypeMongoConnection +from openpype.modules import ModulesManager +from openpype.lib import Logger + +from openpype_modules.webpublisher.lib import ( + ERROR_STATUS, + REPROCESS_STATUS, + SENT_REPROCESSING_STATUS +) from .webpublish_routes import ( RestApiResource, @@ -20,32 +28,29 @@ from .webpublish_routes import ( TaskPublishEndpoint, UserReportEndpoint ) -from openpype.lib.remote_publish import ( - ERROR_STATUS, - REPROCESS_STATUS, - SENT_REPROCESSING_STATUS -) + +log = Logger.get_logger("webserver_gui") -log = PypeLogger.get_logger("webserver_gui") - - -def run_webserver(*args, **kwargs): +def run_webserver(executable, upload_dir, host=None, port=None): """Runs webserver in command line, adds routes.""" - from openpype.modules import ModulesManager + + if not host: + host = "localhost" + if not port: + port = 8079 manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - host = kwargs.get("host") or "localhost" - port = kwargs.get("port") or 8079 + server_manager = webserver_module.create_new_server_manager(port, host) webserver_url = server_manager.url # queue for remotepublishfromapp tasks studio_task_queue = collections.deque() resource = RestApiResource(server_manager, - upload_dir=kwargs["upload_dir"], - executable=kwargs["executable"], + upload_dir=upload_dir, + executable=executable, studio_task_queue=studio_task_queue) projects_endpoint = ProjectsEndpoint(resource) server_manager.add_route( @@ -110,7 +115,7 @@ def run_webserver(*args, **kwargs): last_reprocessed = time.time() while True: if time.time() - last_reprocessed > 20: - reprocess_failed(kwargs["upload_dir"], webserver_url) + reprocess_failed(upload_dir, webserver_url) last_reprocessed = time.time() if studio_task_queue: args = studio_task_queue.popleft() @@ -121,8 +126,6 @@ def run_webserver(*args, **kwargs): def reprocess_failed(upload_dir, webserver_url): # log.info("check_reprocesable_records") - from openpype.lib import OpenPypeMongoConnection - mongo_client = OpenPypeMongoConnection.get_mongo_client() database_name = os.environ["OPENPYPE_DATABASE_NAME"] dbcon = mongo_client[database_name]["webpublishes"] diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index fb52a9aca7..adb857a056 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -63,7 +63,10 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit +from .log import ( + Logger, + PypeLogger, +) from .path_templates import ( merge_dict, @@ -83,8 +86,9 @@ from .anatomy import ( Anatomy ) -from .config import ( +from .dateutils import ( get_datetime_data, + get_timestamp, get_formatted_current_time ) @@ -111,6 +115,7 @@ from .transcoding import ( get_ffmpeg_codec_args, get_ffmpeg_format_args, convert_ffprobe_fps_value, + convert_ffprobe_fps_to_float, ) from .avalon_context import ( CURRENT_DOC_SCHEMAS, @@ -184,8 +189,6 @@ from .plugin_tools import ( filter_pyblish_plugins, set_plugin_attributes_from_settings, source_hash, - get_unique_layer_name, - get_background_layers, ) from .path_tools import ( @@ -283,6 +286,7 @@ __all__ = [ "get_ffmpeg_codec_args", "get_ffmpeg_format_args", "convert_ffprobe_fps_value", + "convert_ffprobe_fps_to_float", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", @@ -348,8 +352,6 @@ __all__ = [ "filter_pyblish_plugins", "set_plugin_attributes_from_settings", "source_hash", - "get_unique_layer_name", - "get_background_layers", "create_hard_link", "version_up", @@ -370,13 +372,13 @@ __all__ = [ "get_datetime_data", "get_formatted_current_time", + "Logger", "PypeLogger", + "get_default_components", "validate_mongo_connection", "OpenPypeMongoConnection", - "timeit", - "is_overlapping_otio_ranges", "otio_range_with_handles", "convert_to_padded_path", diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index f46197e15f..e249ae4f1c 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -24,15 +24,9 @@ from openpype.settings.constants import ( METADATA_KEYS, M_DYNAMIC_KEY_LABEL ) -from . import PypeLogger +from .log import Logger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import ( - get_workdir_data, - get_workdir_with_workdir_data, - get_workfile_template_key, - get_last_workfile -) from .python_module_tools import ( modules_from_path, @@ -144,7 +138,7 @@ def get_logger(): """Global lib.applications logger getter.""" global _logger if _logger is None: - _logger = PypeLogger.get_logger(__name__) + _logger = Logger.get_logger(__name__) return _logger @@ -379,7 +373,7 @@ class ApplicationManager: """ def __init__(self, system_settings=None): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.app_groups = {} self.applications = {} @@ -475,6 +469,19 @@ class ApplicationManager: for tool in group: self.tools[tool.full_name] = tool + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + def launch(self, app_name, **data): """Launch procedure. @@ -741,7 +748,7 @@ class LaunchHook: Always should be called """ - self.log = PypeLogger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.launch_context = launch_context @@ -883,7 +890,7 @@ class ApplicationLaunchContext: # Logger logger_name = "{}-{}".format(self.__class__.__name__, self.app_name) - self.log = PypeLogger.get_logger(logger_name) + self.log = Logger.get_logger(logger_name) self.executable = executable @@ -956,6 +963,63 @@ class ApplicationLaunchContext: ) self.kwargs["env"] = value + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect appliction as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.modules_manager.get_enabled_modules(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + def paths_to_launch_hooks(self): """Directory paths where to look for launch hooks.""" # This method has potential to be part of application manager (maybe). @@ -963,32 +1027,24 @@ class ApplicationLaunchContext: # TODO load additional studio paths from settings import openpype - pype_dir = os.path.dirname(os.path.abspath(openpype.__file__)) + openpype_dir = os.path.dirname(os.path.abspath(openpype.__file__)) - # --- START: Backwards compatibility --- - hooks_dir = os.path.join(pype_dir, "hooks") + global_hooks_dir = os.path.join(openpype_dir, "hooks") - subfolder_names = ["global"] - if self.host_name: - subfolder_names.append(self.host_name) - for subfolder_name in subfolder_names: - path = os.path.join(hooks_dir, subfolder_name) - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - # --- END: Backwards compatibility --- - - subfolders_list = [ - ["hooks"] + hooks_dirs = [ + global_hooks_dir ] if self.host_name: - subfolders_list.append(["hosts", self.host_name, "hooks"]) + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.modules_manager.get_host_module(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + openpype_dir, "hosts", self.host_name, "hooks" + )) - for subfolders in subfolders_list: - path = os.path.join(pype_dir, *subfolders) + for path in hooks_dirs: if ( os.path.exists(path) and os.path.isdir(path) @@ -997,7 +1053,7 @@ class ApplicationLaunchContext: paths.append(path) # Load modules paths - paths.extend(self.modules_manager.collect_launch_hook_paths()) + paths.extend(self._collect_addons_launch_hook_paths()) return paths @@ -1310,6 +1366,7 @@ def get_app_environments_for_context( dict: Environments for passed context and application. """ + from openpype.modules import ModulesManager from openpype.pipeline import AvalonMongoDB, Anatomy # Avalon database connection @@ -1322,8 +1379,6 @@ def get_app_environments_for_context( asset_doc = get_asset_by_name(project_name, asset_name) if modules_manager is None: - from openpype.modules import ModulesManager - modules_manager = ModulesManager() # Prepare app object which can be obtained only from ApplciationManager @@ -1350,7 +1405,7 @@ def get_app_environments_for_context( }) prepare_app_environments(data, env_group, modules_manager) - prepare_context_environments(data, env_group) + prepare_context_environments(data, env_group, modules_manager) # Discard avalon connection dbcon.uninstall() @@ -1509,8 +1564,10 @@ def prepare_app_environments( final_env = None # Add host specific environments if app.host_name and implementation_envs: - module = __import__("openpype.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) + host_module = modules_manager.get_host_module(app.host_name) + if not host_module: + module = __import__("openpype.hosts", fromlist=[app.host_name]) + host_module = getattr(module, app.host_name, None) add_implementation_envs = None if host_module: add_implementation_envs = getattr( @@ -1569,13 +1626,16 @@ def apply_project_environments_value( return env -def prepare_context_environments(data, env_group=None): +def prepare_context_environments(data, env_group=None, modules_manager=None): """Modify launch environments with context data for launched host. Args: data (EnvironmentPrepData): Dictionary where result and intermediate result will be stored. """ + + from openpype.pipeline.template_data import get_template_data + # Context environments log = data["log"] @@ -1596,7 +1656,9 @@ def prepare_context_environments(data, env_group=None): # Load project specific environments project_name = project_doc["name"] project_settings = get_project_settings(project_name) + system_settings = get_system_settings() data["project_settings"] = project_settings + data["system_settings"] = system_settings # Apply project specific environments on current env value apply_project_environments_value( project_name, data["env"], project_settings, env_group @@ -1619,8 +1681,8 @@ def prepare_context_environments(data, env_group=None): if not app.is_host: return - workdir_data = get_workdir_data( - project_doc, asset_doc, task_name, app.host_name + workdir_data = get_template_data( + project_doc, asset_doc, task_name, app.host_name, system_settings ) data["workdir_data"] = workdir_data @@ -1631,7 +1693,14 @@ def prepare_context_environments(data, env_group=None): data["task_type"] = task_type try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + from openpype.pipeline.workfile import get_workdir_with_workdir_data + + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) except Exception as exc: raise ApplicationLaunchFailed( @@ -1652,10 +1721,10 @@ def prepare_context_environments(data, env_group=None): data["env"]["AVALON_APP"] = app.host_name data["env"]["AVALON_WORKDIR"] = workdir - _prepare_last_workfile(data, workdir) + _prepare_last_workfile(data, workdir, modules_manager) -def _prepare_last_workfile(data, workdir): +def _prepare_last_workfile(data, workdir, modules_manager): """last workfile workflow preparation. Function check if should care about last workfile workflow and tries @@ -1670,8 +1739,13 @@ def _prepare_last_workfile(data, workdir): result will be stored. workdir (str): Path to folder where workfiles should be stored. """ + + from openpype.modules import ModulesManager from openpype.pipeline import HOST_WORKFILE_EXTENSIONS + if not modules_manager: + modules_manager = ModulesManager() + log = data["log"] _workdir_data = data.get("workdir_data") @@ -1719,13 +1793,26 @@ def _prepare_last_workfile(data, workdir): # Last workfile path last_workfile_path = data.get("last_workfile_path") or "" if not last_workfile_path: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + host_module = modules_manager.get_host_module(app.host_name) + if host_module: + extensions = host_module.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + if extensions: + from openpype.pipeline.workfile import ( + get_workfile_template_key, + get_last_workfile + ) + anatomy = data["anatomy"] project_settings = data["project_settings"] task_type = workdir_data["task"]["type"] template_key = get_workfile_template_key( - task_type, app.host_name, project_settings=project_settings + task_type, + app.host_name, + project_name, + project_settings=project_settings ) # Find last workfile file_template = str(anatomy.templates[template_key]["file"]) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 2944b2506e..7d56d039d4 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1,50 +1,42 @@ """Should be used only inside of hosts.""" import os -import json -import re import copy import platform import logging -import collections import functools import warnings +import six + from openpype.client import ( get_project, get_assets, get_asset_by_name, - get_subset_by_name, - get_subsets, - get_version_by_id, - get_last_versions, - get_last_version_by_subset_id, - get_representations, - get_representation_by_id, + get_last_version_by_subset_name, get_workfile_info, ) -from openpype.settings import ( - get_project_settings, - get_system_settings +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, + PROJECT_NAME_ALLOWED_SYMBOLS, + PROJECT_NAME_REGEX, ) from .profiles_filtering import filter_profiles -from .events import emit_event from .path_templates import StringTemplate -from .local_settings import get_openpype_username legacy_io = None log = logging.getLogger("AvalonContext") +# Backwards compatibility - should not be used anymore +# - Will be removed in OP 3.16.* CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" + "project": CURRENT_PROJECT_SCHEMA, + "asset": CURRENT_ASSET_DOC_SCHEMA, + "config": CURRENT_PROJECT_CONFIG_SCHEMA } -PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" -PROJECT_NAME_REGEX = re.compile( - "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) -) class AvalonContextDeprecatedWarning(DeprecationWarning): @@ -180,7 +172,7 @@ def with_pipeline_io(func): return wrapped -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.is_representation_from_latest") def is_latest(representation): """Return whether the representation is from latest version @@ -189,54 +181,30 @@ def is_latest(representation): Returns: bool: Whether the representation is of latest version. + + Deprecated: + Function will be removed after release version 3.15.* """ - project_name = legacy_io.active_project() - version = get_version_by_id( - project_name, - representation["parent"], - fields=["_id", "type", "parent"] - ) - if version["type"] == "hero_version": - return True + from openpype.pipeline.context_tools import is_representation_from_latest - # Get highest version under the parent - last_version = get_last_version_by_subset_id( - project_name, version["parent"], fields=["_id"] - ) - - return version["_id"] == last_version["_id"] + return is_representation_from_latest(representation) -@with_pipeline_io +@deprecated("openpype.pipeline.load.any_outdated_containers") def any_outdated(): - """Return whether the current scene has any outdated content""" - from openpype.pipeline import registered_host + """Return whether the current scene has any outdated content. - project_name = legacy_io.active_project() - checked = set() - host = registered_host() - for container in host.ls(): - representation = container['representation'] - if representation in checked: - continue + Deprecated: + Function will be removed after release version 3.15.* + """ - representation_doc = get_representation_by_id( - project_name, representation, fields=["parent"] - ) - if representation_doc and not is_latest(representation_doc): - return True - elif not representation_doc: - log.debug("Container '{objectName}' has an invalid " - "representation, it is missing in the " - "database".format(**container)) + from openpype.pipeline.load import any_outdated_containers - checked.add(representation) - - return False + return any_outdated_containers() -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_current_project_asset") def get_asset(asset_name=None): """ Returning asset document from database by its name. @@ -247,30 +215,25 @@ def get_asset(asset_name=None): Returns: (MongoDB document) + + Deprecated: + Function will be removed after release version 3.15.* """ - project_name = legacy_io.active_project() - if not asset_name: - asset_name = legacy_io.Session["AVALON_ASSET"] + from openpype.pipeline.context_tools import get_current_project_asset - asset_document = get_asset_by_name(project_name, asset_name) - if not asset_document: - raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) - - return asset_document + return get_current_project_asset(asset_name=asset_name) +@deprecated("openpype.pipeline.template_data.get_general_template_data") def get_system_general_anatomy_data(system_settings=None): - if not system_settings: - system_settings = get_system_settings() - studio_name = system_settings["general"]["studio_name"] - studio_code = system_settings["general"]["studio_code"] - return { - "studio": { - "name": studio_name, - "code": studio_code - } - } + """ + Deprecated: + Function will be removed after release version 3.15.* + """ + from openpype.pipeline.template_data import get_general_template_data + + return get_general_template_data(system_settings) def get_linked_asset_ids(asset_doc): @@ -319,7 +282,7 @@ def get_linked_assets(asset_doc): return list(get_assets(project_name, link_ids)) -@with_pipeline_io +@deprecated("openpype.client.get_last_version_by_subset_name") def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """Retrieve latest version from `asset_name`, and `subset_name`. @@ -335,11 +298,16 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): Returns: None: If asset, subset or version were not found. - dict: Last version document for entered . + dict: Last version document for entered. + + Deprecated: + Function will be removed after release version 3.15.* """ if not project_name: if not dbcon: + from openpype.pipeline import legacy_io + log.debug("Using `legacy_io` for query.") dbcon = legacy_io # Make sure is installed @@ -347,39 +315,13 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): project_name = dbcon.active_project() - log.debug(( - "Getting latest version for Project: \"{}\" Asset: \"{}\"" - " and Subset: \"{}\"" - ).format(project_name, asset_name, subset_name)) - - # Query asset document id by asset name - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - if not asset_doc: - log.info( - "Asset \"{}\" was not found in Database.".format(asset_name) - ) - return None - - subset_doc = get_subset_by_name( - project_name, subset_name, asset_doc["_id"] + return get_last_version_by_subset_name( + project_name, subset_name, asset_name=asset_name ) - if not subset_doc: - log.info( - "Subset \"{}\" was not found in Database.".format(subset_name) - ) - return None - - version_doc = get_last_version_by_subset_id( - project_name, subset_doc["_id"] - ) - if not version_doc: - log.info( - "Subset \"{}\" does not have any version yet.".format(subset_name) - ) - return None - return version_doc +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key_from_context") def get_workfile_template_key_from_context( asset_name, task_name, host_name, project_name=None, dbcon=None, project_settings=None @@ -407,28 +349,30 @@ def get_workfile_template_key_from_context( Raises: ValueError: When both 'dbcon' and 'project_name' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ + + from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context + ) + if not project_name: if not dbcon: raise ValueError(( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - project_name = dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - return get_workfile_template_key( - task_type, host_name, project_name, project_settings + return get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key") def get_workfile_template_key( task_type, host_name, project_name=None, project_settings=None ): @@ -451,44 +395,19 @@ def get_workfile_template_key( Raises: ValueError: When both 'project_name' and 'project_settings' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ - default = "work" - if not task_type or not host_name: - return default - if not project_settings: - if not project_name: - raise ValueError(( - "`get_workfile_template_key` requires to pass" - " one of 'project_name' or 'project_settings' arguments." - )) - project_settings = get_project_settings(project_name) + from openpype.pipeline.workfile import get_workfile_template_key - try: - profiles = ( - project_settings - ["global"] - ["tools"] - ["Workfiles"] - ["workfile_template_profiles"] - ) - except Exception: - profiles = [] - - if not profiles: - return default - - profile_filter = { - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles(profiles, profile_filter) - if profile: - return profile["workfile_template"] or default - return default + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) -# TODO rename function as is not just "work" specific +@deprecated("openpype.pipeline.template_data.get_template_data") def get_workdir_data(project_doc, asset_doc, task_name, host_name): """Prepare data for workdir template filling from entered information. @@ -501,42 +420,19 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): Returns: dict: Data prepared for filling workdir template. + + Deprecated: + Function will be removed after release version 3.15.* """ - task_type = asset_doc['data']['tasks'].get(task_name, {}).get('type') - project_task_types = project_doc["config"]["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") + from openpype.pipeline.template_data import get_template_data - asset_parents = asset_doc["data"]["parents"] - hierarchy = "/".join(asset_parents) - - parent_name = project_doc["name"] - if asset_parents: - parent_name = asset_parents[-1] - - data = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "task": { - "name": task_name, - "type": task_type, - "short": task_code, - }, - "asset": asset_doc["name"], - "parent": parent_name, - "app": host_name, - "user": get_openpype_username(), - "hierarchy": hierarchy, - } - - system_general_data = get_system_general_anatomy_data() - data.update(system_general_data) - - return data + return get_template_data( + project_doc, asset_doc, task_name, host_name + ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir_with_workdir_data( workdir_data, anatomy=None, project_name=None, template_key=None ): @@ -562,32 +458,28 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. + + Deprecated: + Function will be removed after release version 3.15.* """ + if not anatomy and not project_name: raise ValueError(( "Missing required arguments one of `project_name` or `anatomy`" " must be entered." )) - if not anatomy: - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + if not project_name: + project_name = anatomy.project_name - if not template_key: - template_key = get_workfile_template_key( - workdir_data["task"]["type"], - workdir_data["app"], - project_name=workdir_data["project"]["name"] - ) + from openpype.pipeline.workfile import get_workdir_with_workdir_data - anatomy_filled = anatomy.format(workdir_data) - # Output is TemplateResult object which contain useful data - output = anatomy_filled[template_key]["folder"] - if output: - return output.normalized() - return output + return get_workdir_with_workdir_data( + workdir_data, project_name, anatomy, template_key + ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir( project_doc, asset_doc, @@ -614,45 +506,44 @@ def get_workdir( Returns: TemplateResult: Workdir path. + + Deprecated: + Function will be removed after release version 3.15.* """ - if not anatomy: - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_doc["name"]) - - workdir_data = get_workdir_data( - project_doc, asset_doc, task_name, host_name - ) + from openpype.pipeline.workfile import get_workdir # Output is TemplateResult object which contain useful data - return get_workdir_with_workdir_data( - workdir_data, anatomy, template_key=template_key + return get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + template_key ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_template_data_from_session") def template_data_from_session(session=None): """ Return dictionary with template from session keys. Args: session (dict, Optional): The Session to use. If not provided use the currently active global Session. + Returns: dict: All available data from session. + + Deprecated: + Function will be removed after release version 3.15.* """ - if session is None: - session = legacy_io.Session + from openpype.pipeline.context_tools import get_template_data_from_session - project_name = session["AVALON_PROJECT"] - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] - host_name = session["AVALON_APP"] - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) - return get_workdir_data(project_doc, asset_doc, task_name, host_name) + return get_template_data_from_session(session) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.compute_session_changes") def compute_session_changes( session, task=None, asset=None, app=None, template_key=None ): @@ -673,80 +564,49 @@ def compute_session_changes( Returns: dict: The required changes in the Session dictionary. + + Deprecated: + Function will be removed after release version 3.16.* """ - changes = dict() + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import compute_session_changes - # If no changes, return directly - if not any([task, asset, app]): - return changes + if isinstance(asset, six.string_types): + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, asset) - # Get asset document and asset - asset_document = None - asset_tasks = None - if isinstance(asset, dict): - # Assume asset database document - asset_document = asset - asset_tasks = asset_document.get("data", {}).get("tasks") - asset = asset["name"] - - if not asset_document or not asset_tasks: - # Assume asset name - project_name = session["AVALON_PROJECT"] - asset_document = get_asset_by_name( - project_name, asset, fields=["data.tasks"] - ) - assert asset_document, "Asset must exist" - - # Detect any changes compared session - mapping = { - "AVALON_ASSET": asset, - "AVALON_TASK": task, - "AVALON_APP": app, - } - changes = { - key: value - for key, value in mapping.items() - if value and value != session.get(key) - } - if not changes: - return changes - - # Compute work directory (with the temporary changed session so far) - _session = session.copy() - _session.update(changes) - - changes["AVALON_WORKDIR"] = get_workdir_from_session(_session) - - return changes + return compute_session_changes( + session, + asset, + task, + template_key + ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): - from openpype.pipeline import Anatomy + """Calculate workdir path based on session data. - if session is None: - session = legacy_io.Session - project_name = session["AVALON_PROJECT"] - host_name = session["AVALON_APP"] - anatomy = Anatomy(project_name) - template_data = template_data_from_session(session) - anatomy_filled = anatomy.format(template_data) + Args: + session (Union[None, Dict[str, str]]): Session to use. If not passed + current context session is used (from legacy_io). + template_key (Union[str, None]): Precalculate template key to define + workfile template name in Anatomy. - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - host_name, - project_name=project_name - ) - path = anatomy_filled[template_key]["folder"] - if path: - path = os.path.normpath(path) - return path + Returns: + str: Workdir path. + + Deprecated: + Function will be removed after release version 3.16.* + """ + + from openpype.pipeline.context_tools import get_workdir_from_session + + return get_workdir_from_session(session, template_key) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.change_current_context") def update_current_task(task=None, asset=None, app=None, template_key=None): """Update active Session to a new task work area. @@ -760,37 +620,20 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): Returns: dict: The changed key, values in the current Session. + Deprecated: + Function will be removed after release version 3.16.* """ - changes = compute_session_changes( - legacy_io.Session, - task=task, - asset=asset, - app=app, - template_key=template_key - ) - # Update the Session and environments. Pop from environments all keys with - # value set to None. - for key, value in changes.items(): - legacy_io.Session[key] = value - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import change_current_context - data = changes.copy() - # Convert env keys to human readable keys - data["project_name"] = legacy_io.Session["AVALON_PROJECT"] - data["asset_name"] = legacy_io.Session["AVALON_ASSET"] - data["task_name"] = legacy_io.Session["AVALON_TASK"] + project_name = legacy_io.active_project() + if isinstance(asset, six.string_types): + asset = get_asset_by_name(project_name, asset) - # Emit session change - emit_event("taskChanged", data) - - return changes + return change_current_context(asset, task, template_key) -@with_pipeline_io @deprecated("openpype.client.get_workfile_info") def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -807,17 +650,21 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): Returns: dict: Workfile document or None. + + Deprecated: + Function will be removed after release version 3.15.* """ # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io project_name = dbcon.active_project() return get_workfile_info(project_name, asset_id, task_name, filename) -@with_pipeline_io +@deprecated def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -832,10 +679,13 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and `legacy_io` is used if not entered. """ + from openpype.pipeline import Anatomy + from openpype.pipeline.template_data import get_template_data # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Filter of workfile document @@ -851,7 +701,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Prepare project for workdir data project_name = dbcon.active_project() project_doc = get_project(project_name) - workdir_data = get_workdir_data( + workdir_data = get_template_data( project_doc, asset_doc, task_name, dbcon.Session["AVALON_APP"] ) # Prepare anatomy @@ -882,7 +732,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_pipeline_io +@deprecated def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -893,6 +743,7 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Convert data to mongo modification keys/values @@ -910,664 +761,19 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) -class BuildWorkfile: - """Wrapper for build workfile process. +@deprecated("openpype.pipeline.workfile.BuildWorkfile") +def BuildWorkfile(): + """Build workfile class was moved to workfile pipeline. - Load representations for current context by build presets. Build presets - are host related, since each host has it's loaders. + Deprecated: + Function will be removed after release version 3.16.* """ + from openpype.pipeline.workfile import BuildWorkfile - log = logging.getLogger("BuildWorkfile") + return BuildWorkfile() - @staticmethod - def map_subsets_by_family(subsets): - subsets_by_family = collections.defaultdict(list) - for subset in subsets: - family = subset["data"].get("family") - if not family: - families = subset["data"].get("families") - if not families: - continue - family = families[0] - subsets_by_family[family].append(subset) - return subsets_by_family - - def process(self): - """Main method of this wrapper. - - Building of workfile is triggered and is possible to implement - post processing of loaded containers if necessary. - """ - containers = self.build_workfile() - - return containers - - @with_pipeline_io - def build_workfile(self): - """Prepares and load containers into workfile. - - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Profiles are set by host, - filtered by current task name and used by families. - - Each family can specify representation names and loaders for - representations and first available and successful loaded - representation is returned as container. - - At the end you'll get list of loaded containers per each asset. - - loaded_containers [{ - "asset_entity": , - "containers": [, , ...] - }, { - "asset_entity": , - "containers": [, ...] - }, { - ... - }] - """ - from openpype.pipeline import discover_loader_plugins - - # Get current asset name and entity - project_name = legacy_io.active_project() - current_asset_name = legacy_io.Session["AVALON_ASSET"] - current_asset_entity = get_asset_by_name( - project_name, current_asset_name - ) - # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name - )) - return - - # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader - - # Skip if there are any loaders - if not loaders_by_name: - self.log.warning("There are no registered loaders.") - return - - # Get current task name - current_task_name = legacy_io.Session["AVALON_TASK"] - - # Load workfile presets for task - self.build_presets = self.get_build_presets( - current_task_name, current_asset_entity - ) - - # Skip if there are any presets for task - if not self.build_presets: - self.log.warning( - "Current task `{}` does not have any loading preset.".format( - current_task_name - ) - ) - return - - # Get presets for loading current asset - current_context_profiles = self.build_presets.get("current_context") - # Get presets for loading linked assets - link_context_profiles = self.build_presets.get("linked_assets") - # Skip if both are missing - if not current_context_profiles and not link_context_profiles: - self.log.warning( - "Current task `{}` has empty loading preset.".format( - current_task_name - ) - ) - return - - elif not current_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any loading" - " preset for it's context." - ).format(current_task_name)) - - elif not link_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any" - "loading preset for it's linked assets." - ).format(current_task_name)) - - # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None - if current_context_profiles: - # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] - - if link_context_profiles: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(current_asset_entity) - if link_assets: - assets.extend(link_assets) - - # Skip if there are no assets. This can happen if only linked mapping - # is set and there are no links for his asset. - if not assets: - self.log.warning( - "Asset does not have linked assets. Nothing to process." - ) - return - - # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) - - # Load containers by prepared entities and presets - loaded_containers = [] - # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = self.load_containers_by_asset_data( - current_context_data, current_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = self.load_containers_by_asset_data( - linked_asset_data, link_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # Return list of loaded containers - return loaded_containers - - @with_pipeline_io - def get_build_presets(self, task_name, asset_doc): - """ Returns presets to build workfile for task name. - - Presets are loaded for current project set in - io.Session["AVALON_PROJECT"], filtered by registered host - and entered task name. - - Args: - task_name (str): Task name used for filtering build presets. - - Returns: - (dict): preset per entered task name - """ - host_name = os.environ["AVALON_APP"] - project_settings = get_project_settings( - legacy_io.Session["AVALON_PROJECT"] - ) - - host_settings = project_settings.get(host_name) or {} - # Get presets for host - wb_settings = host_settings.get("workfile_builder") - if not wb_settings: - # backward compatibility - wb_settings = host_settings.get("workfile_build") or {} - - builder_profiles = wb_settings.get("profiles") - if not builder_profiles: - return None - - task_type = ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") - ) - filter_data = { - "task_types": task_type, - "tasks": task_name - } - return filter_profiles(builder_profiles, filter_data) - - def _filter_build_profiles(self, build_profiles, loaders_by_name): - """ Filter build profiles by loaders and prepare process data. - - Valid profile must have "loaders", "families" and "repre_names" keys - with valid values. - - "loaders" expects list of strings representing possible loaders. - - "families" expects list of strings for filtering - by main subset family. - - "repre_names" expects list of strings for filtering by - representation name. - - Lowered "families" and "repre_names" are prepared for each profile with - all required keys. - - Args: - build_profiles (dict): Profiles for building workfile. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list): Filtered and prepared profiles. - """ - valid_profiles = [] - for profile in build_profiles: - # Check loaders - profile_loaders = profile.get("loaders") - if not profile_loaders: - self.log.warning(( - "Build profile has missing loaders configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check if any loader is available - loaders_match = False - for loader_name in profile_loaders: - if loader_name in loaders_by_name: - loaders_match = True - break - - if not loaders_match: - self.log.warning(( - "All loaders from Build profile are not available: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check families - profile_families = profile.get("families") - if not profile_families: - self.log.warning(( - "Build profile is missing families configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check representation names - profile_repre_names = profile.get("repre_names") - if not profile_repre_names: - self.log.warning(( - "Build profile is missing" - " representation names filtering: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Prepare lowered families and representation names - profile["families_lowered"] = [ - fam.lower() for fam in profile_families - ] - profile["repre_names_lowered"] = [ - name.lower() for name in profile_repre_names - ] - - valid_profiles.append(profile) - - return valid_profiles - - def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset by it's data. - - Profiles are filtered for each subset individually. - Profile is filtered by subset's family, optionally by name regex and - representation names set in profile. - It is possible to not find matching profile for subset, in that case - subset is skipped and it is possible that none of subsets have - matching profile. - - Args: - subsets (list): Subset documents. - profiles (dict): Build profiles. - - Returns: - (dict) Profile by subset's id. - """ - # Prepare subsets - subsets_by_family = self.map_subsets_by_family(subsets) - - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() - for profile in profiles: - # Skip profile if does not contain family - if family_low not in profile["families_lowered"]: - continue - - # Precompile name filters as regexes - profile_regexes = profile.get("subset_name_filters") - if profile_regexes: - _profile_regexes = [] - for regex in profile_regexes: - _profile_regexes.append(re.compile(regex)) - profile_regexes = _profile_regexes - - # TODO prepare regex compilation - for subset in subsets: - # Verify regex filtering (optional) - if profile_regexes: - valid = False - for pattern in profile_regexes: - if re.match(pattern, subset["name"]): - valid = True - break - - if not valid: - continue - - profiles_per_subset_id[subset["_id"]] = profile - - # break profiles loop on finding the first matching profile - break - return profiles_per_subset_id - - def load_containers_by_asset_data( - self, asset_entity_data, build_profiles, loaders_by_name - ): - """Load containers for entered asset entity by Build profiles. - - Args: - asset_entity_data (dict): Prepared data with subsets, last version - and representations for specific asset. - build_profiles (dict): Build profiles. - loaders_by_name (dict): Available loaders per name. - - Returns: - (dict) Output contains asset document and loaded containers. - """ - - # Make sure all data are not empty - if not asset_entity_data or not build_profiles or not loaders_by_name: - return - - asset_entity = asset_entity_data["asset_entity"] - - valid_profiles = self._filter_build_profiles( - build_profiles, loaders_by_name - ) - if not valid_profiles: - self.log.warning( - "There are not valid Workfile profiles. Skipping process." - ) - return - - self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - - subsets_by_id = {} - version_by_subset_id = {} - repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subset_entity = in_data["subset_entity"] - subsets_by_id[subset_entity["_id"]] = subset_entity - - version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = ( - version_data["repres"] - ) - - if not subsets_by_id: - self.log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] - )) - return - - profiles_per_subset_id = self._prepare_profile_for_subsets( - subsets_by_id.values(), valid_profiles - ) - if not profiles_per_subset_id: - self.log.warning("There are not valid subsets.") - return - - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, profile in profiles_per_subset_id.items(): - profile_repre_names = profile["repre_names_lowered"] - - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] - repres = repres_by_version_id[version_id] - for repre in repres: - repre_name_low = repre["name"].lower() - if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) - - # DEBUG message - msg = "Valid representations for Asset: `{}`".format( - asset_entity["name"] - ) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format( - subset["name"], subset_id - ) - for repre in repres: - msg += "\n## Repre name: `{}`".format(repre["name"]) - - self.log.debug(msg) - - containers = self._load_containers( - valid_repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ) - - return { - "asset_entity": asset_entity, - "containers": containers - } - - @with_pipeline_io - def _load_containers( - self, repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ): - """Real load by collected data happens here. - - Loading of representations per subset happens here. Each subset can - loads one representation. Loading is tried in specific order. - Representations are tried to load by names defined in configuration. - If subset has representation matching representation name each loader - is tried to load it until any is successful. If none of them was - successful then next representation name is tried. - Subset process loop ends when any representation is loaded or - all matching representations were already tried. - - Args: - repres_by_subset_id (dict): Available representations mapped - by their parent (subset) id. - subsets_by_id (dict): Subset documents mapped by their id. - profiles_per_subset_id (dict): Build profiles mapped by subset id. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list) Objects of loaded containers. - """ - from openpype.pipeline import ( - IncompatibleLoaderError, - load_container, - ) - - loaded_containers = [] - - # Get subset id order from build presets. - build_presets = self.build_presets.get("current_context", []) - build_presets += self.build_presets.get("linked_assets", []) - subset_ids_ordered = [] - for preset in build_presets: - for preset_family in preset["families"]: - for id, subset in subsets_by_id.items(): - if preset_family not in subset["data"].get("families", []): - continue - - subset_ids_ordered.append(id) - - # Order representations from subsets. - print("repres_by_subset_id", repres_by_subset_id) - representations_ordered = [] - representations = [] - for id in subset_ids_ordered: - for subset_id, repres in repres_by_subset_id.items(): - if repres in representations: - continue - - if id == subset_id: - representations_ordered.append((subset_id, repres)) - representations.append(repres) - - print("representations", representations) - - # Load ordered representations. - for subset_id, repres in representations_ordered: - subset_name = subsets_by_id[subset_id]["name"] - - profile = profiles_per_subset_id[subset_id] - loaders_last_idx = len(profile["loaders"]) - 1 - repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 - - repre_by_low_name = { - repre["name"].lower(): repre for repre in repres - } - - is_loaded = False - for repre_name_idx, profile_repre_name in enumerate( - profile["repre_names_lowered"] - ): - # Break iteration if representation was already loaded - if is_loaded: - break - - repre = repre_by_low_name.get(profile_repre_name) - if not repre: - continue - - for loader_idx, loader_name in enumerate(profile["loaders"]): - if is_loaded: - break - - loader = loaders_by_name.get(loader_name) - if not loader: - continue - try: - container = load_container( - loader, - repre["_id"], - name=subset_name - ) - loaded_containers.append(container) - is_loaded = True - - except Exception as exc: - if exc == IncompatibleLoaderError: - self.log.info(( - "Loader `{}` is not compatible with" - " representation `{}`" - ).format(loader_name, repre["name"])) - - else: - self.log.error( - "Unexpected error happened during loading", - exc_info=True - ) - - msg = "Loading failed." - if loader_idx < loaders_last_idx: - msg += " Trying next loader." - elif repre_name_idx < repre_names_last_idx: - msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) - else: - msg += " Trying next representation." - self.log.info(msg) - - return loaded_containers - - @with_pipeline_io - def _collect_last_version_repres(self, asset_docs): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - output = {} - if not asset_docs: - return output - - asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} - - project_name = legacy_io.active_project() - subsets = list(get_subsets( - project_name, asset_ids=asset_docs_by_ids.keys() - )) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - last_version_by_subset_id = get_last_versions( - project_name, subset_entity_by_ids.keys() - ) - last_version_docs_by_id = { - version["_id"]: version - for version in last_version_by_subset_id.values() - } - repre_docs = get_representations( - project_name, version_ids=last_version_docs_by_id.keys() - ) - - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = last_version_docs_by_id[version_id] - - subset_id = version_doc["parent"] - subset_doc = subset_entity_by_ids[subset_id] - - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset_doc, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset_doc, - "version": { - "version_entity": version_doc, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre_doc - ) - - return output - - -@with_pipeline_io +@deprecated("openpype.pipeline.create.get_legacy_creator_by_name") def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. @@ -1578,32 +784,27 @@ def get_creator_by_name(creator_name, case_sensitive=False): Returns: Creator: Return first matching plugin or `None`. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.pipeline import discover_legacy_creator_plugins + from openpype.pipeline.create import get_legacy_creator_by_name - # Lower input creator name if is not case sensitive - if not case_sensitive: - creator_name = creator_name.lower() - - for creator_plugin in discover_legacy_creator_plugins(): - _creator_name = creator_plugin.__name__ - - # Lower creator plugin name if is not case sensitive - if not case_sensitive: - _creator_name = _creator_name.lower() - - if _creator_name == creator_name: - return creator_plugin - return None + return get_legacy_creator_by_name(creator_name, case_sensitive) -@with_pipeline_io +@deprecated def change_timer_to_current_context(): """Called after context change to change timers. - TODO: - - use TimersManager's static method instead of reimplementing it here + Deprecated: + This method is specific for TimersManager module so please use the + functionality from there. Function will be removed after release + version 3.15.* """ + + from openpype.pipeline import legacy_io + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: log.warning("Couldn't find webserver url") @@ -1691,6 +892,8 @@ def _get_task_context_data_for_anatomy( return data +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_context") def get_custom_workfile_template_by_context( template_profiles, project_doc, asset_doc, task_name, anatomy=None ): @@ -1712,6 +915,9 @@ def get_custom_workfile_template_by_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ if anatomy is None: @@ -1744,6 +950,9 @@ def get_custom_workfile_template_by_context( return None +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_string_context" +) def get_custom_workfile_template_by_string_context( template_profiles, project_name, asset_name, task_name, dbcon=None, anatomy=None @@ -1767,6 +976,9 @@ def get_custom_workfile_template_by_string_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ project_name = None @@ -1788,7 +1000,7 @@ def get_custom_workfile_template_by_string_context( ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_custom_workfile_template") def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. @@ -1801,8 +1013,13 @@ def get_custom_workfile_template(template_profiles): Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ + from openpype.pipeline import legacy_io + return get_custom_workfile_template_by_string_context( template_profiles, legacy_io.Session["AVALON_PROJECT"], @@ -1812,6 +1029,7 @@ def get_custom_workfile_template(template_profiles): ) +@deprecated("openpype.pipeline.workfile.get_last_workfile_with_version") def get_last_workfile_with_version( workdir, file_template, fill_data, extensions ): @@ -1826,79 +1044,19 @@ def get_last_workfile_with_version( Returns: tuple: Last workfile with version if there is any otherwise returns (None, None). + + Deprecated: + Function will be removed after release version 3.16.* """ - if not os.path.exists(workdir): - return None, None - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions - ] + from openpype.pipeline.workfile import get_last_workfile_with_version - # Build template without optionals, version to digits only regex - # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" - - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) - file_template = StringTemplate.format_strict_template( - file_template, fill_data + return get_last_workfile_with_version( + workdir, file_template, fill_data, extensions ) - # Match with ignore case on Windows due to the Windows - # OS not being case-sensitive. This avoids later running - # into the error that the file did exist if it existed - # with a different upper/lower-case. - kwargs = {} - if platform.system().lower() == "windows": - kwargs["flags"] = re.IGNORECASE - - # Get highest version among existing matching files - version = None - output_filenames = [] - for filename in sorted(filenames): - match = re.match(file_template, filename, **kwargs) - if not match: - continue - - file_version = int(match.group(1)) - if version is None or file_version > version: - output_filenames[:] = [] - version = file_version - - if file_version == version: - output_filenames.append(filename) - - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time - - return output_filename, version - +@deprecated("openpype.pipeline.workfile.get_last_workfile") def get_last_workfile( workdir, file_template, fill_data, extensions, full_path=False ): @@ -1915,23 +1073,16 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. + + Deprecated: + Function will be removed after release version 3.16.* """ - filename, version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + + from openpype.pipeline.workfile import get_last_workfile + + return get_last_workfile( + workdir, file_template, fill_data, extensions, full_path ) - if filename is None: - data = copy.deepcopy(fill_data) - data["version"] = 1 - data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] - data["ext"] = data["ext"].replace('.', '') - filename = StringTemplate.format_strict_template(file_template, data) - - if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename @with_pipeline_io diff --git a/openpype/lib/config.py b/openpype/lib/config.py index 57e8efa57d..26822649e4 100644 --- a/openpype/lib/config.py +++ b/openpype/lib/config.py @@ -1,82 +1,41 @@ -# -*- coding: utf-8 -*- -"""Get configuration data.""" -import datetime +import warnings +import functools -def get_datetime_data(datetime_obj=None): - """Returns current datetime data as dictionary. +class ConfigDeprecatedWarning(DeprecationWarning): + pass - Args: - datetime_obj (datetime): Specific datetime object - Returns: - dict: prepared date & time data +def deprecated(func): + """Mark functions as deprecated. - Available keys: - "d" - in shortest possible way. - "dd" - with 2 digits. - "ddd" - shortened week day. e.g.: `Mon`, ... - "dddd" - full name of week day. e.g.: `Monday`, ... - "m" - in shortest possible way. e.g.: `1` if January - "mm" - with 2 digits. - "mmm" - shortened month name. e.g.: `Jan`, ... - "mmmm" - full month name. e.g.: `January`, ... - "yy" - shortened year. e.g.: `19`, `20`, ... - "yyyy" - full year. e.g.: `2019`, `2020`, ... - "H" - shortened hours. - "HH" - with 2 digits. - "h" - shortened hours. - "hh" - with 2 digits. - "ht" - AM or PM. - "M" - shortened minutes. - "MM" - with 2 digits. - "S" - shortened seconds. - "SS" - with 2 digits. + It will result in a warning being emitted when the function is used. """ - if not datetime_obj: - datetime_obj = datetime.datetime.now() - - year = datetime_obj.strftime("%Y") - - month = datetime_obj.strftime("%m") - month_name_full = datetime_obj.strftime("%B") - month_name_short = datetime_obj.strftime("%b") - day = datetime_obj.strftime("%d") - - weekday_full = datetime_obj.strftime("%A") - weekday_short = datetime_obj.strftime("%a") - - hours = datetime_obj.strftime("%H") - hours_midday = datetime_obj.strftime("%I") - hour_midday_type = datetime_obj.strftime("%p") - minutes = datetime_obj.strftime("%M") - seconds = datetime_obj.strftime("%S") - - return { - "d": str(int(day)), - "dd": str(day), - "ddd": weekday_short, - "dddd": weekday_full, - "m": str(int(month)), - "mm": str(month), - "mmm": month_name_short, - "mmmm": month_name_full, - "yy": str(year[2:]), - "yyyy": str(year), - "H": str(int(hours)), - "HH": str(hours), - "h": str(int(hours_midday)), - "hh": str(hours_midday), - "ht": hour_midday_type, - "M": str(int(minutes)), - "MM": str(minutes), - "S": str(int(seconds)), - "SS": str(seconds), - } + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", ConfigDeprecatedWarning) + warnings.warn( + ( + "Deprecated import of function '{}'." + " Class was moved to 'openpype.lib.dateutils.{}'." + " Please change your imports." + ).format(func.__name__), + category=ConfigDeprecatedWarning + ) + return func(*args, **kwargs) + return new_func +@deprecated +def get_datetime_data(datetime_obj=None): + from .dateutils import get_datetime_data + + return get_datetime_data(datetime_obj) + + +@deprecated def get_formatted_current_time(): - return datetime.datetime.now().strftime( - "%Y%m%dT%H%M%SZ" - ) + from .dateutils import get_formatted_current_time + + return get_formatted_current_time() diff --git a/openpype/lib/dateutils.py b/openpype/lib/dateutils.py new file mode 100644 index 0000000000..68cd1d1c5b --- /dev/null +++ b/openpype/lib/dateutils.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +"""Get configuration data.""" +import datetime + + +def get_datetime_data(datetime_obj=None): + """Returns current datetime data as dictionary. + + Args: + datetime_obj (datetime): Specific datetime object + + Returns: + dict: prepared date & time data + + Available keys: + "d" - in shortest possible way. + "dd" - with 2 digits. + "ddd" - shortened week day. e.g.: `Mon`, ... + "dddd" - full name of week day. e.g.: `Monday`, ... + "m" - in shortest possible way. e.g.: `1` if January + "mm" - with 2 digits. + "mmm" - shortened month name. e.g.: `Jan`, ... + "mmmm" - full month name. e.g.: `January`, ... + "yy" - shortened year. e.g.: `19`, `20`, ... + "yyyy" - full year. e.g.: `2019`, `2020`, ... + "H" - shortened hours. + "HH" - with 2 digits. + "h" - shortened hours. + "hh" - with 2 digits. + "ht" - AM or PM. + "M" - shortened minutes. + "MM" - with 2 digits. + "S" - shortened seconds. + "SS" - with 2 digits. + """ + + if not datetime_obj: + datetime_obj = datetime.datetime.now() + + year = datetime_obj.strftime("%Y") + + month = datetime_obj.strftime("%m") + month_name_full = datetime_obj.strftime("%B") + month_name_short = datetime_obj.strftime("%b") + day = datetime_obj.strftime("%d") + + weekday_full = datetime_obj.strftime("%A") + weekday_short = datetime_obj.strftime("%a") + + hours = datetime_obj.strftime("%H") + hours_midday = datetime_obj.strftime("%I") + hour_midday_type = datetime_obj.strftime("%p") + minutes = datetime_obj.strftime("%M") + seconds = datetime_obj.strftime("%S") + + return { + "d": str(int(day)), + "dd": str(day), + "ddd": weekday_short, + "dddd": weekday_full, + "m": str(int(month)), + "mm": str(month), + "mmm": month_name_short, + "mmmm": month_name_full, + "yy": str(year[2:]), + "yyyy": str(year), + "H": str(int(hours)), + "HH": str(hours), + "h": str(int(hours_midday)), + "hh": str(hours_midday), + "ht": hour_midday_type, + "M": str(int(minutes)), + "MM": str(minutes), + "S": str(int(seconds)), + "SS": str(seconds), + } + + +def get_timestamp(datetime_obj=None): + """Get standardized timestamp from datetime object. + + Args: + datetime_obj (datetime.datetime): Object of datetime. Current time + is used if not passed. + """ + + if datetime_obj is None: + datetime_obj = datetime.datetime.now() + return datetime_obj.strftime( + "%Y%m%dT%H%M%SZ" + ) + + +def get_formatted_current_time(): + return get_timestamp() diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index c3e35772f3..f1f2a4fa0a 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -5,7 +5,7 @@ import platform import json import tempfile -from .log import PypeLogger as Logger +from .log import Logger from .vendor_bin_utils import find_executable # MSDN process creation flag (Windows only) @@ -40,7 +40,7 @@ def execute(args, log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:'] - log = Logger().get_logger('execute') + log = Logger.get_logger('execute') log.info("Executing ({})".format(" ".join(args))) popen = subprocess.Popen( args, diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py new file mode 100644 index 0000000000..1626bec6b6 --- /dev/null +++ b/openpype/lib/file_transaction.py @@ -0,0 +1,171 @@ +import os +import logging +import sys +import errno +import six + +from openpype.lib import create_hard_link + +# this is needed until speedcopy for linux is fixed +if sys.platform == "win32": + from speedcopy import copyfile +else: + from shutil import copyfile + + +class FileTransaction(object): + """ + + The file transaction is a three step process. + + 1) Rename any existing files to a "temporary backup" during `process()` + 2) Copy the files to final destination during `process()` + 3) Remove any backed up files (*no rollback possible!) during `finalize()` + + Step 3 is done during `finalize()`. If not called the .bak files will + remain on disk. + + These steps try to ensure that we don't overwrite half of any existing + files e.g. if they are currently in use. + + Note: + A regular filesystem is *not* a transactional file system and even + though this implementation tries to produce a 'safe copy' with a + potential rollback do keep in mind that it's inherently unsafe due + to how filesystem works and a myriad of things could happen during + the transaction that break the logic. A file storage could go down, + permissions could be changed, other machines could be moving or writing + files. A lot can happen. + + Warning: + Any folders created during the transfer will not be removed. + + """ + + MODE_COPY = 0 + MODE_HARDLINK = 1 + + def __init__(self, log=None): + + if log is None: + log = logging.getLogger("FileTransaction") + + self.log = log + + # The transfer queue + # todo: make this an actual FIFO queue? + self._transfers = {} + + # Destination file paths that a file was transferred to + self._transferred = [] + + # Backup file location mapping to original locations + self._backup_to_original = {} + + def add(self, src, dst, mode=MODE_COPY): + """Add a new file to transfer queue""" + opts = {"mode": mode} + + src = os.path.abspath(src) + dst = os.path.abspath(dst) + + if dst in self._transfers: + queued_src = self._transfers[dst][0] + if src == queued_src: + self.log.debug("File transfer was already " + "in queue: {} -> {}".format(src, dst)) + return + else: + self.log.warning("File transfer in queue replaced..") + self.log.debug("Removed from queue: " + "{} -> {}".format(queued_src, dst)) + self.log.debug("Added to queue: {} -> {}".format(src, dst)) + + self._transfers[dst] = (src, opts) + + def process(self): + + # Backup any existing files + for dst in self._transfers.keys(): + if os.path.exists(dst): + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug("Backup existing file: " + "{} -> {}".format(dst, backup)) + os.rename(dst, backup) + + # Copy the files to transfer + for dst, (src, opts) in self._transfers.items(): + self._create_folder_for_file(dst) + + if opts["mode"] == self.MODE_COPY: + self.log.debug("Copying file ... {} -> {}".format(src, dst)) + copyfile(src, dst) + elif opts["mode"] == self.MODE_HARDLINK: + self.log.debug("Hardlinking file ... {} -> {}".format(src, + dst)) + create_hard_link(src, dst) + + self._transferred.append(dst) + + def finalize(self): + # Delete any backed up files + for backup in self._backup_to_original.keys(): + try: + os.remove(backup) + except OSError: + self.log.error("Failed to remove backup file: " + "{}".format(backup), + exc_info=True) + + def rollback(self): + + errors = 0 + + # Rollback any transferred files + for path in self._transferred: + try: + os.remove(path) + except OSError: + errors += 1 + self.log.error("Failed to rollback created file: " + "{}".format(path), + exc_info=True) + + # Rollback the backups + for backup, original in self._backup_to_original.items(): + try: + os.rename(backup, original) + except OSError: + errors += 1 + self.log.error("Failed to restore original file: " + "{} -> {}".format(backup, original), + exc_info=True) + + if errors: + self.log.error("{} errors occurred during " + "rollback.".format(errors), exc_info=True) + six.reraise(*sys.exc_info()) + + @property + def transferred(self): + """Return the processed transfers destination paths""" + return list(self._transferred) + + @property + def backups(self): + """Return the backup file paths""" + return list(self._backup_to_original.keys()) + + def _create_folder_for_file(self, path): + dirname = os.path.dirname(path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + six.reraise(*sys.exc_info()) diff --git a/openpype/lib/git_progress.py b/openpype/lib/git_progress.py deleted file mode 100644 index 331b7b6745..0000000000 --- a/openpype/lib/git_progress.py +++ /dev/null @@ -1,86 +0,0 @@ -import git -from tqdm import tqdm - - -class _GitProgress(git.remote.RemoteProgress): - """ Class handling displaying progress during git operations. - - This is using **tqdm** for showing progress bars. As **GitPython** - is parsing progress directly from git command, it is somehow unreliable - as in some operations it is difficult to get total count of iterations - to display meaningful progress bar. - - """ - _t = None - _code = 0 - _current_status = '' - _current_max = '' - - _description = { - 256: "Checking out files", - 4: "Counting objects", - 128: "Finding sources", - 32: "Receiving objects", - 64: "Resolving deltas", - 16: "Writing objects" - } - - def __init__(self): - super().__init__() - - def __del__(self): - if self._t is not None: - self._t.close() - - def _detroy_tqdm(self): - """ Used to close tqdm when operation ended. - - """ - if self._t is not None: - self._t.close() - self._t = None - - def _check_mask(self, opcode: int) -> bool: - """" Add meaningful description to **GitPython** opcodes. - - :param opcode: OP_MASK opcode - :type opcode: int - :return: String description of opcode - :rtype: str - - .. seealso:: For opcodes look at :class:`git.RemoteProgress` - - """ - if opcode & self.COUNTING: - return self._description.get(self.COUNTING) - elif opcode & self.CHECKING_OUT: - return self._description.get(self.CHECKING_OUT) - elif opcode & self.WRITING: - return self._description.get(self.WRITING) - elif opcode & self.RECEIVING: - return self._description.get(self.RECEIVING) - elif opcode & self.RESOLVING: - return self._description.get(self.RESOLVING) - elif opcode & self.FINDING_SOURCES: - return self._description.get(self.FINDING_SOURCES) - else: - return "Processing" - - def update(self, op_code, cur_count, max_count=None, message=''): - """ Called when git operation update progress. - - .. seealso:: For more details see - :func:`git.objects.submodule.base.Submodule.update` - `Documentation `_ - - """ - code = self._check_mask(op_code) - if self._current_status != code or self._current_max != max_count: - self._current_max = max_count - self._current_status = code - self._detroy_tqdm() - self._t = tqdm(total=max_count) - self._t.set_description(" . {}".format(code)) - - self._t.update(cur_count) diff --git a/openpype/lib/local_settings.py b/openpype/lib/local_settings.py index 97e99b4b5a..c6c9699240 100644 --- a/openpype/lib/local_settings.py +++ b/openpype/lib/local_settings.py @@ -34,7 +34,7 @@ from openpype.settings import ( get_system_settings ) -from .import validate_mongo_connection +from openpype.client.mongo import validate_mongo_connection _PLACEHOLDER = object() diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 2cdb7ec8e4..26dcd86eec 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -24,12 +24,13 @@ import traceback import threading import copy -from . import Terminal -from .mongo import ( +from openpype.client.mongo import ( MongoEnvNotSet, get_default_components, - OpenPypeMongoConnection + OpenPypeMongoConnection, ) +from . import Terminal + try: import log4mongo from log4mongo.handlers import MongoHandler @@ -41,13 +42,13 @@ except ImportError: USE_UNICODE = hasattr(__builtins__, "unicode") -class PypeStreamHandler(logging.StreamHandler): +class LogStreamHandler(logging.StreamHandler): """ StreamHandler class designed to handle utf errors in python 2.x hosts. """ def __init__(self, stream=None): - super(PypeStreamHandler, self).__init__(stream) + super(LogStreamHandler, self).__init__(stream) self.enabled = True def enable(self): @@ -56,7 +57,6 @@ class PypeStreamHandler(logging.StreamHandler): Used to silence output """ self.enabled = True - pass def disable(self): """ Disable StreamHandler @@ -107,13 +107,13 @@ class PypeStreamHandler(logging.StreamHandler): self.handleError(record) -class PypeFormatter(logging.Formatter): +class LogFormatter(logging.Formatter): DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ]' default_formatter = logging.Formatter(DFT) def __init__(self, formats): - super(PypeFormatter, self).__init__() + super(LogFormatter, self).__init__() self.formatters = {} for loglevel in formats: self.formatters[loglevel] = logging.Formatter(formats[loglevel]) @@ -141,7 +141,7 @@ class PypeFormatter(logging.Formatter): return out -class PypeMongoFormatter(logging.Formatter): +class MongoFormatter(logging.Formatter): DEFAULT_PROPERTIES = logging.LogRecord( '', '', '', '', '', '', '', '').__dict__.keys() @@ -161,7 +161,7 @@ class PypeMongoFormatter(logging.Formatter): 'method': record.funcName, 'lineNumber': record.lineno } - document.update(PypeLogger.get_process_data()) + document.update(Logger.get_process_data()) # Standard document decorated with exception info if record.exc_info is not None: @@ -181,7 +181,7 @@ class PypeMongoFormatter(logging.Formatter): return document -class PypeLogger: +class Logger: DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ] ' DBG = " - { %(name)s }: [ %(message)s ] " INF = ">>> [ %(message)s ] " @@ -239,7 +239,7 @@ class PypeLogger: for handler in logger.handlers: if isinstance(handler, MongoHandler): add_mongo_handler = False - elif isinstance(handler, PypeStreamHandler): + elif isinstance(handler, LogStreamHandler): add_console_handler = False if add_console_handler: @@ -292,7 +292,7 @@ class PypeLogger: "username": components["username"], "password": components["password"], "capped": True, - "formatter": PypeMongoFormatter() + "formatter": MongoFormatter() } if components["port"] is not None: kwargs["port"] = int(components["port"]) @@ -303,10 +303,10 @@ class PypeLogger: @classmethod def _get_console_handler(cls): - formatter = PypeFormatter(cls.FORMAT_FILE) - console_handler = PypeStreamHandler() + formatter = LogFormatter(cls.FORMAT_FILE) + console_handler = LogStreamHandler() - console_handler.set_name("PypeStreamHandler") + console_handler.set_name("LogStreamHandler") console_handler.setFormatter(formatter) return console_handler @@ -417,9 +417,9 @@ class PypeLogger: def get_process_name(cls): """Process name that is like "label" of a process. - Pype's logging can be used from pype itseld of from hosts. Even in Pype - it's good to know if logs are from Pype tray or from pype's event - server. This should help to identify that information. + OpenPype's logging can be used from OpenPyppe itself of from hosts. + Even in OpenPype process it's good to know if logs are from tray or + from other cli commands. This should help to identify that information. """ if cls._process_name is not None: return cls._process_name @@ -485,23 +485,19 @@ class PypeLogger: return OpenPypeMongoConnection.get_mongo_client() -def timeit(method): - """Print time in function. - - For debugging. +class PypeLogger(Logger): + """Duplicate of 'Logger'. + Deprecated: + Class will be removed after release version 3.16.* """ - log = logging.getLogger() - def timed(*args, **kw): - ts = time.time() - result = method(*args, **kw) - te = time.time() - if 'log_time' in kw: - name = kw.get('log_name', method.__name__.upper()) - kw['log_time'][name] = int((te - ts) * 1000) - else: - log.debug('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) - print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) - return result - return timed + @classmethod + def get_logger(cls, *args, **kwargs): + logger = Logger.get_logger(*args, **kwargs) + # TODO uncomment when replaced most of places + logger.warning(( + "'openpype.lib.PypeLogger' is deprecated class." + " Please use 'openpype.lib.Logger' instead." + )) + return logger diff --git a/openpype/lib/mongo.py b/openpype/lib/mongo.py index c08e76c75c..bb2ee6016a 100644 --- a/openpype/lib/mongo.py +++ b/openpype/lib/mongo.py @@ -1,206 +1,61 @@ -import os -import sys -import time -import logging -import pymongo -import certifi - -if sys.version_info[0] == 2: - from urlparse import urlparse, parse_qs -else: - from urllib.parse import urlparse, parse_qs +import warnings +import functools +from openpype.client.mongo import ( + MongoEnvNotSet, + OpenPypeMongoConnection, +) -class MongoEnvNotSet(Exception): +class MongoDeprecatedWarning(DeprecationWarning): pass -def _decompose_url(url): - """Decompose mongo url to basic components. +def mongo_deprecated(func): + """Mark functions as deprecated. - Used for creation of MongoHandler which expect mongo url components as - separated kwargs. Components are at the end not used as we're setting - connection directly this is just a dumb components for MongoHandler - validation pass. + It will result in a warning being emitted when the function is used. """ - # Use first url from passed url - # - this is because it is possible to pass multiple urls for multiple - # replica sets which would crash on urlparse otherwise - # - please don't use comma in username of password - url = url.split(",")[0] - components = { - "scheme": None, - "host": None, - "port": None, - "username": None, - "password": None, - "auth_db": None - } - result = urlparse(url) - if result.scheme is None: - _url = "mongodb://{}".format(url) - result = urlparse(_url) - - components["scheme"] = result.scheme - components["host"] = result.hostname - try: - components["port"] = result.port - except ValueError: - raise RuntimeError("invalid port specified") - components["username"] = result.username - components["password"] = result.password - - try: - components["auth_db"] = parse_qs(result.query)['authSource'][0] - except KeyError: - # no auth db provided, mongo will use the one we are connecting to - pass - - return components - - -def get_default_components(): - mongo_url = os.environ.get("OPENPYPE_MONGO") - if mongo_url is None: - raise MongoEnvNotSet( - "URL for Mongo logging connection is not set." + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", MongoDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'." + " Function was moved to 'openpype.client.mongo'." + ).format(func.__name__), + category=MongoDeprecatedWarning, + stacklevel=2 ) - return _decompose_url(mongo_url) + return func(*args, **kwargs) + return new_func +@mongo_deprecated +def get_default_components(): + from openpype.client.mongo import get_default_components + + return get_default_components() + + +@mongo_deprecated def should_add_certificate_path_to_mongo_url(mongo_url): - """Check if should add ca certificate to mongo url. + from openpype.client.mongo import should_add_certificate_path_to_mongo_url - Since 30.9.2021 cloud mongo requires newer certificates that are not - available on most of workstation. This adds path to certifi certificate - which is valid for it. To add the certificate path url must have scheme - 'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query. - """ - parsed = urlparse(mongo_url) - query = parse_qs(parsed.query) - lowered_query_keys = set(key.lower() for key in query.keys()) - add_certificate = False - # Check if url 'ssl' or 'tls' are set to 'true' - for key in ("ssl", "tls"): - if key in query and "true" in query["ssl"]: - add_certificate = True - break - - # Check if url contains 'mongodb+srv' - if not add_certificate and parsed.scheme == "mongodb+srv": - add_certificate = True - - # Check if url does already contain certificate path - if add_certificate and "tlscafile" in lowered_query_keys: - add_certificate = False - - return add_certificate + return should_add_certificate_path_to_mongo_url(mongo_url) +@mongo_deprecated def validate_mongo_connection(mongo_uri): - """Check if provided mongodb URL is valid. + from openpype.client.mongo import validate_mongo_connection - Args: - mongo_uri (str): URL to validate. - - Raises: - ValueError: When port in mongo uri is not valid. - pymongo.errors.InvalidURI: If passed mongo is invalid. - pymongo.errors.ServerSelectionTimeoutError: If connection timeout - passed so probably couldn't connect to mongo server. - - """ - client = OpenPypeMongoConnection.create_connection( - mongo_uri, retry_attempts=1 - ) - client.close() + return validate_mongo_connection(mongo_uri) -class OpenPypeMongoConnection: - """Singleton MongoDB connection. - - Keeps MongoDB connections by url. - """ - mongo_clients = {} - log = logging.getLogger("OpenPypeMongoConnection") - - @staticmethod - def get_default_mongo_url(): - return os.environ["OPENPYPE_MONGO"] - - @classmethod - def get_mongo_client(cls, mongo_url=None): - if mongo_url is None: - mongo_url = cls.get_default_mongo_url() - - connection = cls.mongo_clients.get(mongo_url) - if connection: - # Naive validation of existing connection - try: - connection.server_info() - with connection.start_session(): - pass - except Exception: - connection = None - - if not connection: - cls.log.debug("Creating mongo connection to {}".format(mongo_url)) - connection = cls.create_connection(mongo_url) - cls.mongo_clients[mongo_url] = connection - - return connection - - @classmethod - def create_connection(cls, mongo_url, timeout=None, retry_attempts=None): - parsed = urlparse(mongo_url) - # Force validation of scheme - if parsed.scheme not in ["mongodb", "mongodb+srv"]: - raise pymongo.errors.InvalidURI(( - "Invalid URI scheme:" - " URI must begin with 'mongodb://' or 'mongodb+srv://'" - )) - - if timeout is None: - timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000) - - kwargs = { - "serverSelectionTimeoutMS": timeout - } - if should_add_certificate_path_to_mongo_url(mongo_url): - kwargs["ssl_ca_certs"] = certifi.where() - - mongo_client = pymongo.MongoClient(mongo_url, **kwargs) - - if retry_attempts is None: - retry_attempts = 3 - - elif not retry_attempts: - retry_attempts = 1 - - last_exc = None - valid = False - t1 = time.time() - for attempt in range(1, retry_attempts + 1): - try: - mongo_client.server_info() - with mongo_client.start_session(): - pass - valid = True - break - - except Exception as exc: - last_exc = exc - if attempt < retry_attempts: - cls.log.warning( - "Attempt {} failed. Retrying... ".format(attempt) - ) - time.sleep(1) - - if not valid: - raise last_exc - - cls.log.info("Connected to {}, delay {:.3f}s".format( - mongo_url, time.time() - t1 - )) - return mongo_client +__all__ = ( + "MongoEnvNotSet", + "OpenPypeMongoConnection", + "get_default_components", + "should_add_certificate_path_to_mongo_url", + "validate_mongo_connection", +) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index c1282016ef..b160054e38 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -6,11 +6,6 @@ import collections import six -from .log import PypeLogger - -log = PypeLogger.get_logger(__name__) - - KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})") KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+") SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") @@ -211,15 +206,28 @@ class StringTemplate(object): if counted_symb > -1: parts = tmp_parts.pop(counted_symb) counted_symb -= 1 + # If part contains only single string keep value + # unchanged if parts: # Remove optional start char parts.pop(0) - if counted_symb < 0: - out_parts = new_parts - else: - out_parts = tmp_parts[counted_symb] - # Store temp parts - out_parts.append(OptionalPart(parts)) + + if not parts: + value = "<>" + elif ( + len(parts) == 1 + and isinstance(parts[0], six.string_types) + ): + value = "<{}>".format(parts[0]) + else: + value = OptionalPart(parts) + + if counted_symb < 0: + out_parts = new_parts + else: + out_parts = tmp_parts[counted_symb] + # Store value + out_parts.append(value) continue if counted_symb < 0: @@ -793,6 +801,7 @@ class OptionalPart: parts(list): Parts of template. Can contain 'str', 'OptionalPart' or 'FormattingPart'. """ + def __init__(self, parts): self._parts = parts diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 1d3c1eec6b..81d268ea1c 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -1,29 +1,72 @@ # -*- coding: utf-8 -*- """Avalon/Pyblish plugin tools.""" import os -import inspect import logging import re import json +import warnings +import functools + from openpype.client import get_asset_by_id from openpype.settings import get_project_settings -from .profiles_filtering import filter_profiles - log = logging.getLogger(__name__) -# Subset name template used when plugin does not have defined any -DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" + +class PluginToolsDeprecatedWarning(DeprecationWarning): + pass -class TaskNotSetError(KeyError): - def __init__(self, msg=None): - if not msg: - msg = "Creator's subset name template requires task name." - super(TaskNotSetError, self).__init__(msg) +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", PluginToolsDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=PluginToolsDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) +@deprecated("openpype.pipeline.create.TaskNotSetError") +def TaskNotSetError(*args, **kwargs): + from openpype.pipeline.create import TaskNotSetError + + return TaskNotSetError(*args, **kwargs) + + +@deprecated("openpype.pipeline.create.get_subset_name") def get_subset_name_with_asset_doc( family, variant, @@ -62,61 +105,22 @@ def get_subset_name_with_asset_doc( dbcon (AvalonMongoDB): Mongo connection to be able query asset document if 'asset_doc' is not passed. """ - if not family: - return "" - if not host_name: - host_name = os.environ["AVALON_APP"] + from openpype.pipeline.create import get_subset_name - # Use only last part of class family value split by dot (`.`) - family = family.rsplit(".", 1)[-1] - - if project_name is None: - from openpype.pipeline import legacy_io - - project_name = legacy_io.Session["AVALON_PROJECT"] - - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - # Get settings - tools_settings = get_project_settings(project_name)["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] - filtering_criteria = { - "families": family, - "hosts": host_name, - "tasks": task_name, - "task_types": task_type - } - - matching_profile = filter_profiles(profiles, filtering_criteria) - template = None - if matching_profile: - template = matching_profile["template"] - - # Make sure template is set (matching may have empty string) - if not template: - template = default_template or DEFAULT_SUBSET_TEMPLATE - - # Simple check of task name existence for template with {task} in - # - missing task should be possible only in Standalone publisher - if not task_name and "{task" in template.lower(): - raise TaskNotSetError() - - fill_pairs = { - "variant": variant, - "family": family, - "task": task_name - } - if dynamic_data: - # Dynamic data may override default values - for key, value in dynamic_data.items(): - fill_pairs[key] = value - - return template.format(**prepare_template_data(fill_pairs)) + return get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name, + default_template, + dynamic_data + ) +@deprecated def get_subset_name( family, variant, @@ -136,16 +140,18 @@ def get_subset_name( `get_subset_name_with_asset_doc` where asset document is expected. """ + from openpype.pipeline.create import get_subset_name + if project_name is None: project_name = dbcon.project_name asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) - return get_subset_name_with_asset_doc( + return get_subset_name( family, variant, task_name, - asset_doc or {}, + asset_doc, project_name, host_name, default_template, @@ -197,6 +203,7 @@ def prepare_template_data(fill_pairs): return fill_data +@deprecated("openpype.pipeline.publish.lib.filter_pyblish_plugins") def filter_pyblish_plugins(plugins): """Filter pyblish plugins by presets. @@ -207,56 +214,16 @@ def filter_pyblish_plugins(plugins): plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base` `discover()` method. + Deprecated: + Function will be removed after release version 3.15.* """ - from pyblish import api - host = api.current_host() + from openpype.pipeline.publish.lib import filter_pyblish_plugins - presets = get_project_settings(os.environ['AVALON_PROJECT']) or {} - # skip if there are no presets to process - if not presets: - return - - # iterate over plugins - for plugin in plugins[:]: - - try: - config_data = presets[host]["publish"][plugin.__name__] - except KeyError: - # host determined from path - file = os.path.normpath(inspect.getsourcefile(plugin)) - file = os.path.normpath(file) - - split_path = file.split(os.path.sep) - if len(split_path) < 4: - log.warning( - 'plugin path too short to extract host {}'.format(file) - ) - continue - - host_from_file = split_path[-4] - plugin_kind = split_path[-2] - - # TODO: change after all plugins are moved one level up - if host_from_file == "openpype": - host_from_file = "global" - - try: - config_data = presets[host_from_file][plugin_kind][plugin.__name__] # noqa: E501 - except KeyError: - continue - - for option, value in config_data.items(): - if option == "enabled" and value is False: - log.info('removing plugin {}'.format(plugin.__name__)) - plugins.remove(plugin) - else: - log.info('setting {}:{} on plugin {}'.format( - option, value, plugin.__name__)) - - setattr(plugin, option, value) + filter_pyblish_plugins(plugins) +@deprecated def set_plugin_attributes_from_settings( plugins, superclass, host_name=None, project_name=None ): @@ -272,7 +239,12 @@ def set_plugin_attributes_from_settings( Value from environment `AVALON_APP` is used if not entered. project_name (str): Name of project for which settings will be loaded. Value from environment `AVALON_PROJECT` is used if not entered. + + Deprecated: + Function will be removed after release version 3.15.* """ + + # Function is not used anymore from openpype.pipeline import LegacyCreator, LoaderPlugin # determine host application to use for finding presets @@ -366,102 +338,3 @@ def source_hash(filepath, *args): time = str(os.path.getmtime(filepath)) size = str(os.path.getsize(filepath)) return "|".join([file_name, time, size] + list(args)).replace(".", ",") - - -def get_unique_layer_name(layers, name): - """ - Gets all layer names and if 'name' is present in them, increases - suffix by 1 (eg. creates unique layer name - for Loader) - Args: - layers (list): of strings, names only - name (string): checked value - - Returns: - (string): name_00X (without version) - """ - names = {} - for layer in layers: - layer_name = re.sub(r'_\d{3}$', '', layer) - if layer_name in names.keys(): - names[layer_name] = names[layer_name] + 1 - else: - names[layer_name] = 1 - occurrences = names.get(name, 0) - - return "{}_{:0>3d}".format(name, occurrences + 1) - - -def get_background_layers(file_url): - """ - Pulls file name from background json file, enrich with folder url for - AE to be able import files. - - Order is important, follows order in json. - - Args: - file_url (str): abs url of background json - - Returns: - (list): of abs paths to images - """ - with open(file_url) as json_file: - data = json.load(json_file) - - layers = list() - bg_folder = os.path.dirname(file_url) - for child in data['children']: - if child.get("filename"): - layers.append(os.path.join(bg_folder, child.get("filename")). - replace("\\", "/")) - else: - for layer in child['children']: - if layer.get("filename"): - layers.append(os.path.join(bg_folder, - layer.get("filename")). - replace("\\", "/")) - return layers - - -def parse_json(path): - """Parses json file at 'path' location - - Returns: - (dict) or None if unparsable - Raises: - AsssertionError if 'path' doesn't exist - """ - path = path.strip('\"') - assert os.path.isfile(path), ( - "Path to json file doesn't exist. \"{}\"".format(path) - ) - data = None - with open(path, "r") as json_file: - try: - data = json.load(json_file) - except Exception as exc: - log.error( - "Error loading json: " - "{} - Exception: {}".format(path, exc) - ) - return data - - -def get_batch_asset_task_info(ctx): - """Parses context data from webpublisher's batch metadata - - Returns: - (tuple): asset, task_name (Optional), task_type - """ - task_type = "default_task_type" - task_name = None - asset = None - - if ctx["type"] == "task": - items = ctx["path"].split('/') - asset = items[-2] - task_name = ctx["name"] - task_type = ctx["attributes"]["type"] - else: - asset = ctx["name"] - - return asset, task_name, task_type diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index ee9a0f08de..60d5d3ed4a 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -938,3 +938,40 @@ def convert_ffprobe_fps_value(str_value): fps = int(fps) return str(fps) + + +def convert_ffprobe_fps_to_float(value): + """Convert string value of frame rate to float. + + Copy of 'convert_ffprobe_fps_value' which raises exceptions on invalid + value, does not convert value to string and does not return "Unknown" + string. + + Args: + value (str): Value to be converted. + + Returns: + Float: Converted frame rate in float. If divisor in value is '0' then + '0.0' is returned. + + Raises: + ValueError: Passed value is invalid for conversion. + """ + + if not value: + raise ValueError("Got empty value.") + + items = value.split("/") + if len(items) == 1: + return float(items[0]) + + if len(items) > 2: + raise ValueError(( + "FPS expression contains multiple dividers \"{}\"." + ).format(value)) + + dividend = float(items.pop(0)) + divisor = float(items.pop(0)) + if divisor == 0.0: + return 0.0 + return dividend / divisor diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 68b5f6c247..02e7dc13ab 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,7 +2,6 @@ from .base import ( OpenPypeModule, OpenPypeAddOn, - OpenPypeInterface, load_modules, @@ -20,7 +19,6 @@ from .base import ( __all__ = ( "OpenPypeModule", "OpenPypeAddOn", - "OpenPypeInterface", "load_modules", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index b9ccec13cc..09aea50424 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -13,7 +13,6 @@ from uuid import uuid4 from abc import ABCMeta, abstractmethod import six -import openpype from openpype.settings import ( get_system_settings, SYSTEM_SETTINGS_KEY, @@ -26,7 +25,20 @@ from openpype.settings.lib import ( get_studio_system_settings_overrides, load_json_file ) -from openpype.lib import PypeLogger + +from openpype.lib import ( + Logger, + import_filepath, + import_module_from_dirpath +) + +from .interfaces import ( + OpenPypeInterface, + IPluginPaths, + IHostAddon, + ITrayModule, + ITrayService +) # Files that will be always ignored on modules import IGNORED_FILENAMES = ( @@ -49,6 +61,7 @@ class _ModuleClass(object): Object of this class can be stored to `sys.modules` and used for storing dynamically imported modules. """ + def __init__(self, name): # Call setattr on super class super(_ModuleClass, self).__setattr__("name", name) @@ -92,7 +105,7 @@ class _ModuleClass(object): def log(self): if self._log is None: super(_ModuleClass, self).__setattr__( - "_log", PypeLogger.get_logger(self.name) + "_log", Logger.get_logger(self.name) ) return self._log @@ -116,12 +129,13 @@ class _InterfacesClass(_ModuleClass): - this is because interfaces must be available even if are missing implementation """ + def __getattr__(self, attr_name): if attr_name not in self.__attributes__: if attr_name in ("__path__", "__file__"): return None - raise ImportError(( + raise AttributeError(( "cannot import name '{}' from 'openpype_interfaces'" ).format(attr_name)) @@ -138,7 +152,7 @@ class _LoadCache: def get_default_modules_dir(): """Path to default OpenPype modules.""" - current_dir = os.path.abspath(os.path.dirname(__file__)) + current_dir = os.path.dirname(os.path.abspath(__file__)) output = [] for folder_name in ("default_modules", ): @@ -276,19 +290,13 @@ def load_modules(force=False): def _load_modules(): - # Import helper functions from lib - from openpype.lib import ( - import_filepath, - import_module_from_dirpath - ) - # Key under which will be modules imported in `sys.modules` modules_key = "openpype_modules" # Change `sys.modules` sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) - log = PypeLogger.get_logger("ModulesLoader") + log = Logger.get_logger("ModulesLoader") # Look for OpenPype modules in paths defined with `get_module_dirs` # - dynamically imported OpenPype modules and addons @@ -296,6 +304,8 @@ def _load_modules(): # Add current directory at first place # - has small differences in import logic current_dir = os.path.abspath(os.path.dirname(__file__)) + hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") + module_dirs.insert(0, hosts_dir) module_dirs.insert(0, current_dir) processed_paths = set() @@ -312,6 +322,7 @@ def _load_modules(): continue is_in_current_dir = dirpath == current_dir + is_in_host_dir = dirpath == hosts_dir for filename in os.listdir(dirpath): # Ignore filenames if filename in IGNORED_FILENAMES: @@ -351,6 +362,24 @@ def _load_modules(): sys.modules[new_import_str] = default_module setattr(openpype_modules, basename, default_module) + elif is_in_host_dir: + import_str = "openpype.hosts.{}".format(basename) + new_import_str = "{}.{}".format(modules_key, basename) + # Until all hosts are converted to be able use them as + # modules is this error check needed + try: + default_module = __import__( + import_str, fromlist=("", ) + ) + sys.modules[new_import_str] = default_module + setattr(openpype_modules, basename, default_module) + + except Exception: + log.warning( + "Failed to import host folder {}".format(basename), + exc_info=True + ) + elif os.path.isdir(fullpath): import_module_from_dirpath(dirpath, filename, modules_key) @@ -368,31 +397,6 @@ def _load_modules(): log.error(msg, exc_info=True) -class _OpenPypeInterfaceMeta(ABCMeta): - """OpenPypeInterface meta class to print proper string.""" - - def __str__(self): - return "<'OpenPypeInterface.{}'>".format(self.__name__) - - def __repr__(self): - return str(self) - - -@six.add_metaclass(_OpenPypeInterfaceMeta) -class OpenPypeInterface: - """Base class of Interface that can be used as Mixin with abstract parts. - - This is way how OpenPype module or addon can tell that has implementation - for specific part or for other module/addon. - - Child classes of OpenPypeInterface may be used as mixin in different - OpenPype modules which means they have to have implemented methods defined - in the interface. By default interface does not have any abstract parts. - """ - - pass - - @six.add_metaclass(ABCMeta) class OpenPypeModule: """Base class of pype module. @@ -417,7 +421,7 @@ class OpenPypeModule: def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger.get_logger(self.name) + self.log = Logger.get_logger(self.name) self.initialize(settings) @@ -539,6 +543,40 @@ class ModulesManager: self.initialize_modules() self.connect_modules() + def __getitem__(self, module_name): + return self.modules_by_name[module_name] + + def get(self, module_name, default=None): + """Access module by name. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available. + + Returns: + Union[OpenPypeModule, None]: Module found by name or None. + """ + return self.modules_by_name.get(module_name, default) + + def get_enabled_module(self, module_name, default=None): + """Fast access to enabled module. + + If module is available but is not enabled default value is returned. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available or is + not enabled. + + Returns: + Union[OpenPypeModule, None]: Enabled module found by name or None. + """ + + module = self.get(module_name) + if module is not None and module.enabled: + return module + return default + def initialize_modules(self): """Import and initialize modules.""" # Make sure modules are loaded @@ -692,8 +730,6 @@ class ModulesManager: and "actions" each containing list of paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = { "publish": [], "create": [], @@ -750,8 +786,6 @@ class ModulesManager: list: List of creator plugin paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = [] for module in self.get_enabled_modules(): # Skip module that do not inherit from `IPluginPaths` @@ -766,41 +800,40 @@ class ModulesManager: output.extend(paths) return output - def collect_launch_hook_paths(self): - """Helper to collect hooks from modules inherited ILaunchHookPaths. + def get_host_module(self, host_name): + """Find host module by host name. + + Args: + host_name (str): Host name for which is found host module. Returns: - list: Paths to launch hook directories. + OpenPypeModule: Found host module by name. + None: There was not found module inheriting IHostAddon which has + host name set to passed 'host_name'. """ - from openpype_interfaces import ILaunchHookPaths - str_type = type("") - expected_types = (list, tuple, set) - - output = [] for module in self.get_enabled_modules(): - # Skip module that do not inherit from `ILaunchHookPaths` - if not isinstance(module, ILaunchHookPaths): - continue + if ( + isinstance(module, IHostAddon) + and module.host_name == host_name + ): + return module + return None - hook_paths = module.get_launch_hook_paths() - if not hook_paths: - continue + def get_host_names(self): + """List of available host names based on host modules. - # Convert string to list - if isinstance(hook_paths, str_type): - hook_paths = [hook_paths] + Returns: + Iterable[str]: All available host names based on enabled modules + inheriting 'IHostAddon'. + """ - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output + host_names = { + module.host_name + for module in self.get_enabled_modules() + if isinstance(module, IHostAddon) + } + return host_names def print_report(self): """Print out report of time spent on modules initialization parts. @@ -937,7 +970,7 @@ class TrayModulesManager(ModulesManager): ) def __init__(self): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.modules = [] self.modules_by_id = {} @@ -976,8 +1009,6 @@ class TrayModulesManager(ModulesManager): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): - from openpype_interfaces import ITrayModule - output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -1053,8 +1084,6 @@ class TrayModulesManager(ModulesManager): self._report["Tray menu"] = report def start_modules(self): - from openpype_interfaces import ITrayService - report = {} time_start = time.time() prev_start_time = time_start @@ -1113,7 +1142,7 @@ def get_module_settings_defs(): settings_defs = [] - log = PypeLogger.get_logger("ModuleSettingsLoad") + log = Logger.get_logger("ModuleSettingsLoad") for raw_module in openpype_modules: for attr_name in dir(raw_module): diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 3f54273a56..0bad981fdf 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -4,6 +4,7 @@ It provides Deadline JobInfo data class. """ +import json.decoder import os from abc import abstractmethod import platform @@ -15,7 +16,12 @@ import attr import requests import pyblish.api -from openpype.pipeline.publish import AbstractMetaInstancePlugin +from openpype.pipeline.publish import ( + AbstractMetaInstancePlugin, + KnownPublishError +) + +JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) def requests_post(*args, **kwargs): @@ -615,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): str: resulting Deadline job id. Throws: - RuntimeError: if submission fails. + KnownPublishError: if submission fails. """ url = "{}/api/jobs".format(self._deadline_url) @@ -625,9 +631,16 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self.log.error(response.status_code) self.log.error(response.content) self.log.debug(payload) - raise RuntimeError(response.text) + raise KnownPublishError(response.text) + + try: + result = response.json() + except JSONDecodeError: + msg = "Broken response {}. ".format(response) + msg += "Try restarting the Deadline Webservice." + self.log.warning(msg, exc_info=True) + raise KnownPublishError("Broken response from DL") - result = response.json() # for submit publish job self._instance.data["deadlineSubmissionJob"] = result diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index c30db75188..bbd0f74e8a 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -3,7 +3,7 @@ import requests import six import sys -from openpype.lib import requests_get, PypeLogger +from openpype.lib import requests_get, Logger from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths @@ -58,7 +58,7 @@ class DeadlineModule(OpenPypeModule, IPluginPaths): """ if not log: - log = PypeLogger.get_logger(__name__) + log = Logger.get_logger(__name__) argument = "{}/api/pools?NamesOnly=true".format(webservice) try: diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index de8df3dd9e..c55f85c8da 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,7 +80,8 @@ class AfterEffectsSubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index a1ee5e0957..3f9c09b592 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -274,7 +274,8 @@ class HarmonySubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index fdf67b51bc..95856137e2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -130,6 +130,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "houdini17.5;pluginx2.3" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index aca88c7440..beda753723 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -101,6 +101,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3707c5709f..7966861358 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -62,6 +62,7 @@ payload_skeleton_template = { "RenderLayer": None, # Render only this layer "Renderer": None, "ProjectPath": None, # Resolve relative references + "RenderSetupIncludeLights": None, # Include all lights flag. }, "AuxFiles": [] # Mandatory for Deadline, may be empty } @@ -413,8 +414,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) comment = context.data.get("comment", "") @@ -505,6 +505,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.payload_skeleton["JobInfo"]["Comment"] = comment self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa # Adding file dependencies. dependencies = instance.context.data["fileDependencies"] dependencies.append(filepath) @@ -519,12 +520,14 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "OPENPYPE_SG_USER", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 57572fcb24..38ae5d2f7f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -5,7 +5,6 @@ from maya import cmds from openpype.pipeline import legacy_io, PublishXmlValidationError from openpype.settings import get_project_settings -import openpype.api import pyblish.api @@ -34,7 +33,9 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): targets = ["local"] def process(self, instance): - settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_name = instance.context.data["projectName"] + # TODO settings can be received from 'context.data["project_settings"]' + settings = get_project_settings(project_name) # use setting for publish job on farm, no reason to have it separately deadline_publish_job_sett = (settings["deadline"] ["publish"] @@ -53,9 +54,6 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): scene = instance.context.data["currentFile"] scenename = os.path.basename(scene) - # Get project code - project_name = legacy_io.Session["AVALON_PROJECT"] - job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) @@ -102,13 +100,14 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", - "FTRACK_SERVER" + "FTRACK_SERVER", + "OPENPYPE_VERSION" ] environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO replace legacy_io with context.data ? - environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] + # TODO replace legacy_io with context.data + environment["AVALON_PROJECT"] = project_name environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 93fb511a34..336a56ec45 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -80,10 +80,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Using published scene for render {}".format(script_path) ) - # exception for slate workflow - if "slate" in instance.data["families"]: - submit_frame_start -= 1 - response = self.payload_submit( instance, script_path, @@ -99,10 +95,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["publishJobState"] = "Suspended" if instance.data.get("bakingNukeScripts"): - # exception for slate workflow - if "slate" in instance.data["families"]: - submit_frame_start += 1 - for baking_script in instance.data["bakingNukeScripts"]: render_path = baking_script["bakeRenderPath"] script_path = baking_script["bakeScriptPath"] @@ -261,7 +253,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "PYBLISHPLUGINPATH", "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE" + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): @@ -365,7 +358,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): if not instance.data.get("expectedFiles"): instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) + dirname = os.path.dirname(path) file = os.path.basename(path) if "#" in file: @@ -377,9 +370,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["expectedFiles"].append(path) return + if instance.data.get("slate"): + start_frame -= 1 + for i in range(start_frame, (end_frame + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirname, (file % i)).replace("\\", "/")) def get_limit_groups(self): """Search for limit group nodes and return group name. diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index b098eaba8e..379953c9e4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -10,8 +10,10 @@ import clique import pyblish.api -import openpype.api -from openpype.client import get_representations +from openpype.client import ( + get_last_version_by_subset_name, + get_representations, +) from openpype.pipeline import ( get_representation_path, legacy_io, @@ -139,7 +141,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_USERNAME", "OPENPYPE_RENDER_JOB", "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_MONGO" + "OPENPYPE_MONGO", + "OPENPYPE_VERSION" ] # custom deadline attributes @@ -156,7 +159,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # mapping of instance properties to be transfered to new instance for every # specified family instance_transfer = { - "slate": ["slateFrames"], + "slate": ["slateFrames", "slate"], "review": ["lutPath"], "render2d": ["bakingNukeScripts", "version"], "renderlayer": ["convertToScanline"] @@ -343,8 +346,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # get latest version of subset # this will stop if subset wasn't published yet - version = openpype.api.get_latest_version(instance.data.get("asset"), - instance.data.get("subset")) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + instance.data.get("subset"), + asset_name=instance.data.get("asset") + ) + # get its files based on extension subset_resources = get_resources( project_name, version, representation.get("ext") @@ -578,11 +586,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): " This may cause issues on farm." ).format(staging)) + frame_start = int(instance.get("frameStartHandle")) + if instance.get("slate"): + frame_start -= 1 + rep = { "name": ext, "ext": ext, "files": [os.path.basename(f) for f in list(collection)], - "frameStart": int(instance.get("frameStartHandle")), + "frameStart": frame_start, "frameEnd": int(instance.get("frameEndHandle")), # If expectedFile are absolute, we need only filenames "stagingDir": staging, @@ -1025,9 +1037,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): prev_start = None prev_end = None - version = openpype.api.get_latest_version(asset_name=asset, - subset_name=subset - ) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + subset, + asset_name=asset + ) # Set prev start / end frames for comparison if not prev_start and not prev_end: @@ -1072,7 +1087,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): based on 'publish' template """ if not version: - version = openpype.api.get_latest_version(asset, subset) + project_name = legacy_io.active_project() + version = get_last_version_by_subset_name( + project_name, + subset, + asset_name=asset + ) if version: version = int(version["name"]) + 1 else: diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index bcd853f374..61b95cf06d 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -6,13 +6,52 @@ import subprocess import json import platform import uuid -from Deadline.Scripting import RepositoryUtils, FileUtils +import re +from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils + + +def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + + # skip if the version is not build + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(" ! path is not a build: {}".format(path)) + return None + + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] def get_openpype_executable(): """Return OpenPype Executable from Event Plug-in Settings""" config = RepositoryUtils.GetPluginConfig("OpenPype") - return config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + dir_list = config.GetConfigEntryWithDefault( + "OpenPypeInstallationDirs", "") + return exe_list, dir_list def inject_openpype_environment(deadlinePlugin): @@ -25,16 +64,94 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: print(">>> Getting OpenPype executable ...") - exe_list = get_openpype_executable() - openpype_app = FileUtils.SearchFileList(exe_list) - if openpype_app == "": + exe_list, dir_list = get_openpype_executable() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if install_dir: + print("--- Looking for OpenPype at: {}".format(install_dir)) + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + print(" - found: {} - {}".format(version, subdir)) + openpype_versions.append((version, subdir)) + + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) + version = get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + print(" - found: {} - {}".format( + version, os.path.dirname(exe) + )) + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + # use natural sorting + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) + print(( + "*** Latest available version found is {}" + ).format(openpype_versions[-1][0])) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + raise RuntimeError( + ("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) + print(( + "*** Latest compatible version found is {}" + ).format(compatible_versions[-1][0])) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": raise RuntimeError( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exe_list + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") - print("--- OpenPype executable: {}".format(openpype_app)) + print("--- OpenPype executable: {}".format(exe)) # tempfile.TemporaryFile cannot be used because of locking temp_file_name = "{}_{}.json".format( @@ -45,7 +162,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Temporary path: {}".format(export_url)) args = [ - openpype_app, + exe, "--headless", 'extractenvironments', export_url @@ -75,9 +192,9 @@ def inject_openpype_environment(deadlinePlugin): env["OPENPYPE_HEADLESS_MODE"] = "1" env["AVALON_TIMEOUT"] = "5000" - print(">>> Executing: {}".format(args)) + print(">>> Executing: {}".format(" ".join(args))) std_output = subprocess.check_output(args, - cwd=os.path.dirname(openpype_app), + cwd=os.path.dirname(exe), env=env) print(">>> Process result {}".format(std_output)) @@ -122,78 +239,6 @@ def inject_render_job_id(deadlinePlugin): print(">>> Injection end.") -def pype_command_line(executable, arguments, workingDirectory): - """Remap paths in comand line argument string. - - Using Deadline rempper it will remap all path found in command-line. - - Args: - executable (str): path to executable - arguments (str): arguments passed to executable - workingDirectory (str): working directory path - - Returns: - Tuple(executable, arguments, workingDirectory) - - """ - print("-" * 40) - print("executable: {}".format(executable)) - print("arguments: {}".format(arguments)) - print("workingDirectory: {}".format(workingDirectory)) - print("-" * 40) - print("Remapping arguments ...") - arguments = RepositoryUtils.CheckPathMapping(arguments) - print("* {}".format(arguments)) - print("-" * 40) - return executable, arguments, workingDirectory - - -def pype(deadlinePlugin): - """Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars. - - `PYPE_METADATA_FILE` is used on farm to point to rendered data. This path - originates on platform from which this job was published. To be able to - publish on different platform, this path needs to be remapped. - - `PYPE_PYTHON_EXE` can be used to specify custom location of python - interpreter to use for Pype. This is remappeda also if present even - though it probably doesn't make much sense. - - Arguments: - deadlinePlugin: Deadline job plugin passed by Deadline - - """ - print(">>> Getting job ...") - job = deadlinePlugin.GetJob() - # PYPE should be here, not OPENPYPE - backward compatibility!! - pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE") - pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE") - print(">>> Having backward compatible env vars {}/{}".format(pype_metadata, - pype_python)) - # test if it is pype publish job. - if pype_metadata: - pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata) - if platform.system().lower() == "linux": - pype_metadata = pype_metadata.replace("\\", "/") - - print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata)) - job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_METADATA_FILE", pype_metadata) - - if pype_python: - pype_python = RepositoryUtils.CheckPathMapping(pype_python) - if platform.system().lower() == "linux": - pype_python = pype_python.replace("\\", "/") - - print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python)) - job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_PYTHON_EXE", pype_python) - - deadlinePlugin.ModifyCommandLineCallback += pype_command_line - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") @@ -217,5 +262,3 @@ def __main__(deadlinePlugin): inject_render_job_id(deadlinePlugin) elif openpype_render_job == '1' or openpype_remote_job == '1': inject_openpype_environment(deadlinePlugin) - else: - pype(deadlinePlugin) # backward compatibility with Pype2 diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param index 8bd6dce12d..b3ac18e20c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param @@ -7,11 +7,20 @@ Index=0 Default=OpenPype Plugin for Deadline Description=Not configurable +[OpenPypeInstallationDirs] +Type=multilinemultifolder +Label=Directories where OpenPype versions are installed +Category=OpenPype Installation Directories +CategoryOrder=0 +Index=0 +Default=C:\Program Files (x86)\OpenPype +Description=Path or paths to directories where multiple versions of OpenPype might be installed. Enter every such path on separate lines. + [OpenPypeExecutable] Type=multilinemultifilename Label=OpenPype Executable Category=OpenPype Executables -CategoryOrder=0 +CategoryOrder=1 Index=0 Default= Description=The path to the OpenPype executable. Enter alternative paths on separate lines. diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 451d71fb63..6b0f69d98f 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -1,10 +1,19 @@ +#!/usr/bin/env python3 + from System.IO import Path from System.Text.RegularExpressions import Regex from Deadline.Plugins import PluginType, DeadlinePlugin -from Deadline.Scripting import StringUtils, FileUtils, RepositoryUtils +from Deadline.Scripting import ( + StringUtils, + FileUtils, + DirectoryUtils, + RepositoryUtils +) import re +import os +import platform ###################################################################### @@ -52,13 +61,115 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): self.AddStdoutHandlerCallback( ".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress + @staticmethod + def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + + # skip if the version is not build + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(f" ! path is not a build: {path}") + return None + + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] + def RenderExecutable(self): - exeList = self.GetConfigEntry("OpenPypeExecutable") - exe = FileUtils.SearchFileList(exeList) + job = self.GetJob() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + self.LogInfo(( + "Scanning for compatible requested " + f"version {requested_version}")) + dir_list = self.GetConfigEntry("OpenPypeInstallationDirs") + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if dir: + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = self.get_openpype_version_from_path(subdir) + if not version: + continue + openpype_versions.append((version, subdir)) + + exe_list = self.GetConfigEntry("OpenPypeExecutable") + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + version = self.get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + self.FailRender(("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": self.FailRender( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exeList + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") return exe diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 0914933de4..dc76920a57 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -1,10 +1,11 @@ import collections import datetime +import copy import ftrack_api from openpype_modules.ftrack.lib import ( BaseEvent, - query_custom_attributes + query_custom_attributes, ) @@ -124,10 +125,15 @@ class PushFrameValuesToTaskEvent(BaseEvent): # Separate value changes and task parent changes _entities_info = [] + added_entities = [] + added_entity_ids = set() task_parent_changes = [] for entity_info in entities_info: if entity_info["entity_type"].lower() == "task": task_parent_changes.append(entity_info) + elif entity_info.get("action") == "add": + added_entities.append(entity_info) + added_entity_ids.add(entity_info["entityId"]) else: _entities_info.append(entity_info) entities_info = _entities_info @@ -136,6 +142,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data, changed_keys_by_object_id = self.filter_changes( session, event, entities_info, interest_attributes ) + self.interesting_data_for_added( + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ) if not interesting_data and not task_parent_changes: return @@ -151,9 +164,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): # - it is a complex way how to find out if interesting_data: self.process_attribute_changes( - session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ) if task_parent_changes: @@ -163,8 +180,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): ) def process_task_parent_change( - self, session, object_types_by_name, task_parent_changes, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + task_parent_changes, + interest_entity_types, + interest_attributes ): """Push custom attribute values if task parent has changed. @@ -176,6 +197,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): real hierarchical value and non hierarchical custom attribute value should be set to hierarchical value. """ + # Store task ids which were created or moved under parent with entity # type defined in settings (interest_entity_types). task_ids = set() @@ -380,33 +402,49 @@ class PushFrameValuesToTaskEvent(BaseEvent): uncommited_changes = False for idx, item in enumerate(changes): new_value = item["new_value"] + old_value = item["old_value"] attr_id = item["attr_id"] entity_id = item["entity_id"] attr_key = item["attr_key"] - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = attr_id - entity_key["entity_id"] = entity_id + entity_key = collections.OrderedDict(( + ("configuration_id", attr_id), + ("entity_id", entity_id) + )) self._cached_changes.append({ "attr_key": attr_key, "entity_id": entity_id, "value": new_value, "time": datetime.datetime.now() }) + old_value_is_set = ( + old_value is not ftrack_api.symbol.NOT_SET + and old_value is not None + ) if new_value is None: + if not old_value_is_set: + continue op = ftrack_api.operation.DeleteEntityOperation( "CustomAttributeValue", entity_key ) - else: + + elif old_value_is_set: op = ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", + "CustomAttributeValue", entity_key, "value", - ftrack_api.symbol.NOT_SET, + old_value, new_value ) + else: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": new_value} + ) + session.recorded_operations.push(op) self.log.info(( "Changing Custom Attribute \"{}\" to value" @@ -432,9 +470,14 @@ class PushFrameValuesToTaskEvent(BaseEvent): self.log.warning("Changing of values failed.", exc_info=True) def process_attribute_changes( - self, session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ): # Prepare task object id task_object_id = object_types_by_name["task"]["id"] @@ -522,15 +565,26 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id_by_task_id[task_id] = task_entity["parent_id"] self.finalize_attribute_changes( - session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ) def finalize_attribute_changes( - self, session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + self, + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ): attr_id_to_key = {} for attr_confs in attrs_by_obj_id.values(): @@ -550,7 +604,11 @@ class PushFrameValuesToTaskEvent(BaseEvent): attr_ids = set(attr_id_to_key.keys()) current_values_by_id = self.get_current_values( - session, attr_ids, entity_ids, task_entity_ids, hier_attrs + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ) changes = [] @@ -560,14 +618,25 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id = entity_id values = interesting_data[parent_id] + added_entity = entity_id in added_entity_ids for attr_id, old_value in current_values.items(): + if added_entity and attr_id in hier_attrs: + continue + attr_key = attr_id_to_key.get(attr_id) if not attr_key: continue # Convert new value from string new_value = values.get(attr_key) - if new_value is not None and old_value is not None: + new_value_is_valid = ( + old_value is not ftrack_api.symbol.NOT_SET + and new_value is not None + ) + if added_entity and not new_value_is_valid: + continue + + if new_value is not None and new_value_is_valid: try: new_value = type(old_value)(new_value) except Exception: @@ -581,6 +650,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): changes.append({ "new_value": new_value, "attr_id": attr_id, + "old_value": old_value, "entity_id": entity_id, "attr_key": attr_key }) @@ -599,6 +669,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data = {} changed_keys_by_object_id = {} + for entity_info in entities_info: # Care only about changes if specific keys entity_changes = {} @@ -644,16 +715,123 @@ class PushFrameValuesToTaskEvent(BaseEvent): return interesting_data, changed_keys_by_object_id + def interesting_data_for_added( + self, + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ): + if not added_entities or not interest_attributes: + return + + object_type_ids = set() + entity_ids = set() + all_entity_ids = set() + object_id_by_entity_id = {} + project_id = None + entity_ids_by_parent_id = collections.defaultdict(set) + for entity_info in added_entities: + object_id = entity_info["objectTypeId"] + entity_id = entity_info["entityId"] + object_type_ids.add(object_id) + entity_ids.add(entity_id) + object_id_by_entity_id[entity_id] = object_id + + for item in entity_info["parents"]: + entity_id = item["entityId"] + all_entity_ids.add(entity_id) + parent_id = item["parentId"] + if not parent_id: + project_id = entity_id + else: + entity_ids_by_parent_id[parent_id].add(entity_id) + + hier_attrs = self.get_hierarchical_configurations( + session, interest_attributes + ) + if not hier_attrs: + return + + hier_attrs_key_by_id = { + attr_conf["id"]: attr_conf["key"] + for attr_conf in hier_attrs + } + default_values_by_key = { + attr_conf["key"]: attr_conf["default"] + for attr_conf in hier_attrs + } + + values = query_custom_attributes( + session, list(hier_attrs_key_by_id.keys()), all_entity_ids, True + ) + values_per_entity_id = {} + for entity_id in all_entity_ids: + values_per_entity_id[entity_id] = {} + for attr_name in interest_attributes: + values_per_entity_id[entity_id][attr_name] = None + + for item in values: + entity_id = item["entity_id"] + key = hier_attrs_key_by_id[item["configuration_id"]] + values_per_entity_id[entity_id][key] = item["value"] + + fill_queue = collections.deque() + fill_queue.append((project_id, default_values_by_key)) + while fill_queue: + item = fill_queue.popleft() + entity_id, values_by_key = item + entity_values = values_per_entity_id[entity_id] + new_values_by_key = copy.deepcopy(values_by_key) + for key, value in values_by_key.items(): + current_value = entity_values[key] + if current_value is None: + entity_values[key] = value + else: + new_values_by_key[key] = current_value + + for child_id in entity_ids_by_parent_id[entity_id]: + fill_queue.append((child_id, new_values_by_key)) + + for entity_id in entity_ids: + entity_changes = {} + for key, value in values_per_entity_id[entity_id].items(): + if value is not None: + entity_changes[key] = value + + if not entity_changes: + continue + + interesting_data[entity_id] = entity_changes + object_id = object_id_by_entity_id[entity_id] + if object_id not in changed_keys_by_object_id: + changed_keys_by_object_id[object_id] = set() + changed_keys_by_object_id[object_id] |= set(entity_changes.keys()) + def get_current_values( - self, session, attr_ids, entity_ids, task_entity_ids, hier_attrs + self, + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ): current_values_by_id = {} if not attr_ids or not entity_ids: return current_values_by_id + for entity_id in entity_ids: + current_values_by_id[entity_id] = {} + for attr_id in attr_ids: + current_values_by_id[entity_id][attr_id] = ( + ftrack_api.symbol.NOT_SET + ) + values = query_custom_attributes( session, attr_ids, entity_ids, True ) + for item in values: entity_id = item["entity_id"] attr_id = item["configuration_id"] @@ -699,6 +877,18 @@ class PushFrameValuesToTaskEvent(BaseEvent): output[obj_id][attr["key"]] = attr["id"] return output, hiearchical + def get_hierarchical_configurations(self, session, interest_attributes): + hier_attr_query = ( + "select id, key, object_type_id, is_hierarchical, default" + " from CustomAttributeConfiguration" + " where key in ({}) and is_hierarchical is true" + ) + if not interest_attributes: + return [] + return list(session.query(hier_attr_query.format( + self.join_query_keys(interest_attributes), + )).all()) + def register(session): PushFrameValuesToTaskEvent(session).register() diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index a4e791aaf0..e549de7ed0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -18,6 +18,7 @@ from openpype.client import ( get_archived_assets, get_asset_ids_with_subsets ) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA from openpype.pipeline import AvalonMongoDB, schema from openpype_modules.ftrack.lib import ( @@ -35,7 +36,6 @@ from openpype_modules.ftrack.lib.avalon_sync import ( convert_to_fps, InvalidFpsValue ) -from openpype.lib import CURRENT_DOC_SCHEMAS class SyncToAvalonEvent(BaseEvent): @@ -697,13 +697,22 @@ class SyncToAvalonEvent(BaseEvent): continue auto_sync = changes[CUST_ATTR_AUTO_SYNC]["new"] - if auto_sync == "1": + turned_on = auto_sync == "1" + ft_project = self.cur_project + username = self._get_username(session, event) + message = ( + "Auto sync was turned {} for project \"{}\" by \"{}\"." + ).format( + "on" if turned_on else "off", + ft_project["full_name"], + username + ) + if turned_on: + message += " Triggering syncToAvalon action." + self.log.debug(message) + + if turned_on: # Trigger sync to avalon action if auto sync was turned on - ft_project = self.cur_project - self.log.debug(( - "Auto sync was turned on for project <{}>." - " Triggering syncToAvalon action." - ).format(ft_project["full_name"])) selection = [{ "entityId": ft_project["id"], "entityType": "show" @@ -851,6 +860,26 @@ class SyncToAvalonEvent(BaseEvent): self.report() return True + def _get_username(self, session, event): + username = "Unknown" + event_source = event.get("source") + if not event_source: + return username + user_info = event_source.get("user") + if not user_info: + return username + user_id = user_info.get("id") + if not user_id: + return username + + user_entity = session.query( + "User where id is {}".format(user_id) + ).first() + if user_entity: + username = user_entity["username"] or username + return username + + def process_removed(self): """ Handles removed entities (not removed tasks - handle separately). @@ -1207,7 +1236,7 @@ class SyncToAvalonEvent(BaseEvent): "_id": mongo_id, "name": name, "type": "asset", - "schema": CURRENT_DOC_SCHEMAS["asset"], + "schema": CURRENT_ASSET_DOC_SCHEMA, "parent": proj["_id"], "data": { "ftrackId": ftrack_ent["id"], diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index ad82af39a3..eec245070c 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -16,7 +16,7 @@ from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype_modules.ftrack.lib.custom_attributes import ( query_custom_attributes ) -from openpype.lib import config +from openpype.lib.dateutils import get_datetime_data from openpype.lib.delivery import ( path_from_representation, get_format_dict, @@ -555,7 +555,7 @@ class Delivery(BaseAction): format_dict = get_format_dict(anatomy, location_path) - datetime_data = config.get_datetime_data() + datetime_data = get_datetime_data() for repre in repres_to_deliver: source_path = repre.get("data", {}).get("path") debug_msg = "Processing representation {}".format(repre["_id"]) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index d91649d7ba..fb1cdf340e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -11,13 +11,11 @@ from openpype.client import ( get_project, get_assets, ) -from openpype.settings import get_project_settings -from openpype.lib import ( - get_workfile_template_key, - get_workdir_data, - StringTemplate, -) +from openpype.settings import get_project_settings, get_system_settings +from openpype.lib import StringTemplate from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile import get_workfile_template_key from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks @@ -279,14 +277,19 @@ class FillWorkfileAttributeAction(BaseAction): extension = "{ext}" project_doc = get_project(project_name) project_settings = get_project_settings(project_name) + system_settings = get_system_settings() anatomy = Anatomy(project_name) templates_by_key = {} operations = [] for asset_doc, task_entities in asset_docs_with_task_entities: for task_entity in task_entities: - workfile_data = get_workdir_data( - project_doc, asset_doc, task_entity["name"], host_name + workfile_data = get_template_data( + project_doc, + asset_doc, + task_entity["name"], + host_name, + system_settings ) # Use version 1 for each workfile workfile_data["version"] = 1 @@ -294,7 +297,10 @@ class FillWorkfileAttributeAction(BaseAction): task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( - task_type, host_name, project_settings=project_settings + task_type, + host_name, + project_name, + project_settings=project_settings ) if template_key in templates_by_key: template = templates_by_key[template_key] diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index f99e189082..cb4f204523 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -9,7 +9,6 @@ from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ) from openpype.settings import SaveWarningExc @@ -21,7 +20,6 @@ class FtrackModule( OpenPypeModule, ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ): name = "ftrack" @@ -85,7 +83,8 @@ class FtrackModule( } def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(FTRACK_MODULE_DIR, "launch_hooks") def modify_application_launch_arguments(self, application, env): diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 90ce757242..3ef7c8270a 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -1,11 +1,9 @@ import os -import sys import signal import datetime import subprocess import socket import json -import platform import getpass import atexit import time @@ -13,12 +11,14 @@ import uuid import ftrack_api import pymongo +from openpype.client.mongo import ( + OpenPypeMongoConnection, + validate_mongo_connection, +) from openpype.lib import ( get_openpype_execute_args, - OpenPypeMongoConnection, get_openpype_version, get_build_version, - validate_mongo_connection ) from openpype_modules.ftrack import FTRACK_MODULE_DIR from openpype_modules.ftrack.lib import credentials diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/ftrack/ftrack_server/ftrack_server.py index 8944591b71..c75b8f7172 100644 --- a/openpype/modules/ftrack/ftrack_server/ftrack_server.py +++ b/openpype/modules/ftrack/ftrack_server/ftrack_server.py @@ -7,12 +7,10 @@ import traceback import ftrack_api from openpype.lib import ( - PypeLogger, + Logger, modules_from_path ) -log = PypeLogger.get_logger(__name__) - """ # Required - Needed for connection to Ftrack FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com" @@ -43,10 +41,13 @@ class FtrackServer: server.run_server() .. """ + # set Ftrack logging to Warning only - OPTIONAL ftrack_log = logging.getLogger("ftrack_api") ftrack_log.setLevel(logging.WARNING) + self.log = Logger.get_logger(__name__) + self.stopped = True self.is_running = False @@ -72,7 +73,7 @@ class FtrackServer: # Get all modules with functions modules, crashed = modules_from_path(path) for filepath, exc_info in crashed: - log.warning("Filepath load crashed {}.\n{}".format( + self.log.warning("Filepath load crashed {}.\n{}".format( filepath, traceback.format_exception(*exc_info) )) @@ -87,7 +88,7 @@ class FtrackServer: break if not register_function: - log.warning( + self.log.warning( "\"{}\" - Missing register method".format(filepath) ) continue @@ -97,7 +98,7 @@ class FtrackServer: ) if not register_functions: - log.warning(( + self.log.warning(( "There are no events with `register` function" " in registered paths: \"{}\"" ).format("| ".join(paths))) @@ -106,7 +107,7 @@ class FtrackServer: try: register_func(self.session) except Exception: - log.warning( + self.log.warning( "\"{}\" - register was not successful".format(filepath), exc_info=True ) @@ -141,7 +142,7 @@ class FtrackServer: self.session = session if load_files: if not self.handler_paths: - log.warning(( + self.log.warning(( "Paths to event handlers are not set." " Ftrack server won't launch." )) @@ -151,8 +152,8 @@ class FtrackServer: self.set_files(self.handler_paths) msg = "Registration of event handlers has finished!" - log.info(len(msg) * "*") - log.info(msg) + self.log.info(len(msg) * "*") + self.log.info(msg) # keep event_hub on session running self.session.event_hub.wait() diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 5c6d6352d2..947dacf917 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -7,6 +7,7 @@ import threading import datetime import time import queue +import collections import appdirs import pymongo @@ -24,7 +25,7 @@ except ImportError: from ftrack_api._weakref import WeakMethod from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info -from openpype.lib import OpenPypeMongoConnection +from openpype.client import OpenPypeMongoConnection from openpype.api import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" @@ -309,7 +310,20 @@ class CustomEventHubSession(ftrack_api.session.Session): # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True + + # OpenPype change - In new API are operations properties + new_api = hasattr(self.__class__, "record_operations") + + if new_api: + self._record_operations = collections.defaultdict( + lambda: True + ) + self._auto_populate = collections.defaultdict( + lambda: auto_populate + ) + else: + self.record_operations = True + self.auto_populate = auto_populate self.cache_key_maker = cache_key_maker if self.cache_key_maker is None: @@ -328,6 +342,9 @@ class CustomEventHubSession(ftrack_api.session.Session): if cache is not None: self.cache.caches.append(cache) + if new_api: + self.merge_lock = threading.RLock() + self._managed_request = None self._request = requests.Session() self._request.auth = ftrack_api.session.SessionAuthentication( @@ -335,8 +352,6 @@ class CustomEventHubSession(ftrack_api.session.Session): ) self.request_timeout = timeout - self.auto_populate = auto_populate - # Fetch server information and in doing so also check credentials. self._server_information = self._fetch_server_information() diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/ftrack/ftrack_server/socket_thread.py index f49ca5557e..3ef55f8daa 100644 --- a/openpype/modules/ftrack/ftrack_server/socket_thread.py +++ b/openpype/modules/ftrack/ftrack_server/socket_thread.py @@ -5,8 +5,8 @@ import socket import threading import traceback import subprocess -from openpype.api import Logger -from openpype.lib import get_openpype_execute_args + +from openpype.lib import get_openpype_execute_args, Logger class SocketThread(threading.Thread): @@ -16,7 +16,7 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index f8883cefbd..72be6a8e9a 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -14,6 +14,11 @@ from openpype.client import ( get_versions, get_representations ) +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, +) from openpype.api import ( Logger, get_anatomy_settings @@ -32,14 +37,6 @@ import ftrack_api log = Logger.get_logger(__name__) -# Current schemas for avalon types -CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" -} - - class InvalidFpsValue(Exception): pass @@ -2063,7 +2060,7 @@ class SyncEntitiesFactory: item["_id"] = new_id item["parent"] = self.avalon_project_id - item["schema"] = CURRENT_DOC_SCHEMAS["asset"] + item["schema"] = CURRENT_ASSET_DOC_SCHEMA item["data"]["visualParent"] = avalon_parent new_id_str = str(new_id) @@ -2198,8 +2195,8 @@ class SyncEntitiesFactory: project_item["_id"] = new_id project_item["parent"] = None - project_item["schema"] = CURRENT_DOC_SCHEMAS["project"] - project_item["config"]["schema"] = CURRENT_DOC_SCHEMAS["config"] + project_item["schema"] = CURRENT_PROJECT_SCHEMA + project_item["config"]["schema"] = CURRENT_PROJECT_CONFIG_SCHEMA self.ftrack_avalon_mapper[self.ft_project_id] = new_id self.avalon_ftrack_mapper[new_id] = self.ft_project_id diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/ftrack/lib/ftrack_base_handler.py index c0fad6aadc..c0b03f8a41 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/ftrack/lib/ftrack_base_handler.py @@ -6,7 +6,7 @@ import uuid import datetime import traceback import time -from openpype.api import Logger +from openpype.lib import Logger from openpype.settings import get_project_settings import ftrack_api @@ -52,7 +52,7 @@ class BaseHandler(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or isinstance(session, ftrack_server.lib.SocketSession) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 14da188150..e13b7e65cd 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -105,11 +105,17 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): context.data["ftrackEntity"] = asset_entity context.data["ftrackTask"] = task_entity - self.per_instance_process(context, asset_name, task_name) + self.per_instance_process(context, asset_entity, task_entity) def per_instance_process( - self, context, context_asset_name, context_task_name + self, context, context_asset_entity, context_task_entity ): + context_task_name = None + context_asset_name = None + if context_asset_entity: + context_asset_name = context_asset_entity["name"] + if context_task_entity: + context_task_name = context_task_entity["name"] instance_by_asset_and_task = {} for instance in context: self.log.debug( @@ -120,6 +126,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): if not instance_asset_name and not instance_task_name: self.log.debug("Instance does not have set context keys.") + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue elif instance_asset_name and instance_task_name: @@ -131,6 +139,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context is same as in publish context." " Asset: {} | Task: {}" ).format(context_asset_name, context_task_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue asset_name = instance_asset_name task_name = instance_task_name @@ -141,6 +151,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context task is same as in publish" " context. Task: {}" ).format(context_task_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue asset_name = context_asset_name @@ -152,6 +164,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context asset is same as in publish" " context. Asset: {}" ).format(context_asset_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue # Do not use context's task name diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index c4f7b1f05d..159e60024d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -13,6 +13,7 @@ Provides: import os import sys import collections + import six import pyblish.api import clique @@ -21,13 +22,11 @@ import clique class IntegrateFtrackApi(pyblish.api.InstancePlugin): """ Commit components to server. """ - order = pyblish.api.IntegratorOrder+0.499 + order = pyblish.api.IntegratorOrder + 0.499 label = "Integrate Ftrack Api" families = ["ftrack"] def process(self, instance): - session = instance.context.data["ftrackSession"] - context = instance.context component_list = instance.data.get("ftrackComponentsList") if not component_list: self.log.info( @@ -36,8 +35,8 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) return - session = instance.context.data["ftrackSession"] context = instance.context + session = context.data["ftrackSession"] parent_entity = None default_asset_name = None @@ -89,6 +88,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): asset_versions_data_by_id = {} used_asset_versions = [] + # Iterate over components and publish for data in component_list: self.log.debug("data: {}".format(data)) @@ -118,9 +118,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): asset_version_status_ids_by_name ) - # Component - self.create_component(session, asset_version_entity, data) - # Store asset version and components items that were version_id = asset_version_entity["id"] if version_id not in asset_versions_data_by_id: @@ -137,6 +134,8 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): if asset_version_entity not in used_asset_versions: used_asset_versions.append(asset_version_entity) + self._create_components(session, asset_versions_data_by_id) + instance.data["ftrackIntegratedAssetVersionsData"] = ( asset_versions_data_by_id ) @@ -625,3 +624,40 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.rollback() session._configure_locations() six.reraise(tp, value, tb) + + def _create_components(self, session, asset_versions_data_by_id): + for item in asset_versions_data_by_id.values(): + asset_version_entity = item["asset_version"] + component_items = item["component_items"] + + component_entities = session.query( + ( + "select id, name from Component where version_id is \"{}\"" + ).format(asset_version_entity["id"]) + ).all() + + existing_component_names = { + component["name"] + for component in component_entities + } + + contain_review = "ftrackreview-mp4" in existing_component_names + thumbnail_component_item = None + for component_item in component_items: + component_data = component_item.get("component_data") or {} + component_name = component_data.get("name") + if component_name == "ftrackreview-mp4": + contain_review = True + elif component_name == "ftrackreview-image": + thumbnail_component_item = component_item + + if contain_review and thumbnail_component_item: + thumbnail_component_item["component_data"]["name"] = ( + "thumbnail" + ) + + # Component + for component_item in component_items: + self.create_component( + session, asset_version_entity, component_item + ) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py index 047fd8462c..8cb2336391 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py @@ -13,7 +13,10 @@ class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin): active = False def process(self, instance): - component_list = instance.data['ftrackComponentsList'] + component_list = instance.data.get('ftrackComponentsList') + if not component_list: + self.log.info("No component to overwrite...") + return for cl in component_list: cl['component_overwrite'] = True diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index c6a3d47f66..e7c265988e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -6,9 +6,11 @@ Requires: """ import sys +import json import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackDescription(pyblish.api.InstancePlugin): @@ -25,6 +27,10 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): description_template = "{comment}" def process(self, instance): + if not self.description_template: + self.log.info("Skipping. Description template is not set.") + return + # Check if there are any integrated AssetVersion entities asset_versions_key = "ftrackIntegratedAssetVersionsData" asset_versions_data_by_id = instance.data.get(asset_versions_key) @@ -38,39 +44,62 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): else: self.log.debug("Comment is set to `{}`".format(comment)) - session = instance.context.data["ftrackSession"] - intent = instance.context.data.get("intent") - intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent + if intent and "{intent}" in self.description_template: + value = intent.get("value") + if value: + intent = intent.get("label") or value - if not intent_label: - intent_label = intent_val or "" + if not intent and not comment: + self.log.info("Skipping. Intent and comment are empty.") + return # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) - if intent_label: - self.log.debug( - "Intent label is set to `{}`.".format(intent_label) - ) - + if intent: + self.log.debug("Intent is set to `{}`.".format(intent)) else: self.log.debug("Intent is not set.") + # If we would like to use more "optional" possibilities we would have + # come up with some expressions in templates or speicifc templates + # for all 3 possible combinations when comment and intent are + # set or not (when both are not set then description does not + # make sense). + fill_data = {} + if comment: + fill_data["comment"] = comment + if intent: + fill_data["intent"] = intent + + description = StringTemplate.format_template( + self.description_template, fill_data + ) + if not description.solved: + self.log.warning(( + "Couldn't solve template \"{}\" with data {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + if not description: + self.log.debug(( + "Skipping. Result of template is empty string." + " Template \"{}\" Fill data: {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + session = instance.context.data["ftrackSession"] for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] # Backwards compatibility for older settings using # attribute 'note_with_intent_template' - comment = self.description_template.format(**{ - "intent": intent_label, - "comment": comment - }) - asset_version["comment"] = comment + + asset_version["comment"] = description try: session.commit() diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py new file mode 100644 index 0000000000..ab5738c33f --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -0,0 +1,150 @@ +import pyblish.api +from openpype.lib import filter_profiles + + +class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): + """Change task status when should be published on farm. + + Instance which has set "farm" key in data to 'True' is considered as will + be rendered on farm thus it's status should be changed. + """ + + order = pyblish.api.IntegratorOrder + 0.48 + label = "Integrate Ftrack Farm Status" + + farm_status_profiles = [] + + def process(self, context): + # Quick end + if not self.farm_status_profiles: + project_name = context.data["projectName"] + self.log.info(( + "Status profiles are not filled for project \"{}\". Skipping" + ).format(project_name)) + return + + filtered_instances = self.filter_instances(context) + instances_with_status_names = self.get_instances_with_statuse_names( + context, filtered_instances + ) + if instances_with_status_names: + self.fill_statuses(context, instances_with_status_names) + + def filter_instances(self, context): + filtered_instances = [] + for instance in context: + # Skip disabled instances + if instance.data.get("publish") is False: + continue + subset_name = instance.data["subset"] + msg_start = "Skipping instance {}.".format(subset_name) + if not instance.data.get("farm"): + self.log.debug( + "{} Won't be rendered on farm.".format(msg_start) + ) + continue + + task_entity = instance.data.get("ftrackTask") + if not task_entity: + self.log.debug( + "{} Does not have filled task".format(msg_start) + ) + continue + + filtered_instances.append(instance) + return filtered_instances + + def get_instances_with_statuse_names(self, context, instances): + instances_with_status_names = [] + for instance in instances: + family = instance.data["family"] + subset_name = instance.data["subset"] + task_entity = instance.data["ftrackTask"] + host_name = context.data["hostName"] + task_name = task_entity["name"] + task_type = task_entity["type"]["name"] + status_profile = filter_profiles( + self.farm_status_profiles, + { + "hosts": host_name, + "task_types": task_type, + "task_names": task_name, + "families": family, + "subsets": subset_name, + }, + logger=self.log + ) + if not status_profile: + # There already is log in 'filter_profiles' + continue + + status_name = status_profile["status_name"] + if status_name: + instances_with_status_names.append((instance, status_name)) + return instances_with_status_names + + def fill_statuses(self, context, instances_with_status_names): + # Prepare available task statuses on the project + project_name = context.data["projectName"] + session = context.data["ftrackSession"] + project_entity = session.query(( + "select project_schema from Project where full_name is \"{}\"" + ).format(project_name)).one() + project_schema = project_entity["project_schema"] + + task_type_ids = set() + for item in instances_with_status_names: + instance, _ = item + task_entity = instance.data["ftrackTask"] + task_type_ids.add(task_entity["type"]["id"]) + + task_statuses_by_type_id = { + task_type_id: project_schema.get_statuses("Task", task_type_id) + for task_type_id in task_type_ids + } + + # Keep track if anything has changed + skipped_status_names = set() + status_changed = False + for item in instances_with_status_names: + instance, status_name = item + task_entity = instance.data["ftrackTask"] + task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]] + status_name_low = status_name.lower() + + status_id = None + status_name = None + # Skip if status name was already tried to be found + for status in task_statuses: + if status["name"].lower() == status_name_low: + status_id = status["id"] + status_name = status["name"] + break + + if status_id is None: + if status_name_low not in skipped_status_names: + skipped_status_names.add(status_name_low) + joined_status_names = ", ".join({ + '"{}"'.format(status["name"]) + for status in task_statuses + }) + self.log.warning(( + "Status \"{}\" is not available on project \"{}\"." + " Available statuses are {}" + ).format(status_name, project_name, joined_status_names)) + continue + + # Change task status id + if status_id != task_entity["status_id"]: + task_entity["status_id"] = status_id + status_changed = True + path = "/".join([ + item["name"] + for item in task_entity["link"] + ]) + self.log.debug("Set status \"{}\" to \"{}\"".format( + status_name, path + )) + + if status_changed: + session.commit() diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index c8d9e4117d..6024781d87 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -3,7 +3,11 @@ import json import copy import pyblish.api -from openpype.lib import get_ffprobe_streams +from openpype.lib.openpype_version import get_openpype_version +from openpype.lib.transcoding import ( + get_ffprobe_streams, + convert_ffprobe_fps_to_float, +) from openpype.lib.profiles_filtering import filter_profiles @@ -17,6 +21,17 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): label = "Integrate Ftrack Component" families = ["ftrack"] + metadata_keys_to_label = { + "openpype_version": "OpenPype version", + "frame_start": "Frame start", + "frame_end": "Frame end", + "duration": "Duration", + "width": "Resolution width", + "height": "Resolution height", + "fps": "FPS", + "codec": "Codec" + } + family_mapping = { "camera": "cam", "look": "look", @@ -39,7 +54,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "reference": "reference" } keep_first_subset_name_for_review = True - asset_versions_status_profiles = {} + asset_versions_status_profiles = [] + additional_metadata_keys = [] def process(self, instance): self.log.debug("instance {}".format(instance)) @@ -58,7 +74,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = instance.data["family"].lower() + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") if not asset_type and family_low in self.family_mapping: @@ -79,11 +95,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ).format(family)) return - # Prepare FPS - instance_fps = instance.data.get("fps") - if instance_fps is None: - instance_fps = instance.context.data["fps"] - status_name = self._get_asset_version_status_name(instance) # Base of component item data @@ -107,7 +118,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "component_data": None, "component_path": None, "component_location": None, - "component_location_name": None + "component_location_name": None, + "additional_data": {} } # Filter types of representations @@ -140,28 +152,21 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component = None first_thumbnail_component_repre = None for repre in thumbnail_representations: - published_path = repre.get("published_path") - if not published_path: - comp_files = repre["files"] - if isinstance(comp_files, (tuple, list, set)): - filename = comp_files[0] - else: - filename = comp_files - - published_path = os.path.join( - repre["stagingDir"], filename + repre_path = self._get_repre_path(instance, repre, False) + if not repre_path: + self.log.warning( + "Published path is not set and source was removed." ) - if not os.path.exists(published_path): - continue - repre["published_path"] = published_path + continue # Create copy of base comp item and append it thumbnail_item = copy.deepcopy(base_component_item) - thumbnail_item["component_path"] = repre["published_path"] + thumbnail_item["component_path"] = repre_path thumbnail_item["component_data"] = { "name": "thumbnail" } thumbnail_item["thumbnail"] = True + # Create copy of item before setting location src_components_to_add.append(copy.deepcopy(thumbnail_item)) # Create copy of first thumbnail @@ -176,10 +181,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add item to component list component_list.append(thumbnail_item) - if ( - not review_representations - and first_thumbnail_component is not None - ): + if first_thumbnail_component is not None: width = first_thumbnail_component_repre.get("width") height = first_thumbnail_component_repre.get("height") if not width or not height: @@ -216,6 +218,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): extended_asset_name = "" multiple_reviewable = len(review_representations) > 1 for repre in review_representations: + repre_path = self._get_repre_path(instance, repre, False) + if not repre_path: + self.log.warning( + "Published path is not set and source was removed." + ) + continue + # Create copy of base comp item and append it review_item = copy.deepcopy(base_component_item) @@ -254,34 +263,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component[ "asset_data"]["name"] = extended_asset_name - frame_start = repre.get("frameStartFtrack") - frame_end = repre.get("frameEndFtrack") - if frame_start is None or frame_end is None: - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] - - # Frame end of uploaded video file should be duration in frames - # - frame start is always 0 - # - frame end is duration in frames - duration = frame_end - frame_start + 1 - - fps = repre.get("fps") - if fps is None: - fps = instance_fps - # Change location - review_item["component_path"] = repre["published_path"] + review_item["component_path"] = repre_path # Change component data review_item["component_data"] = { # Default component name is "main". "name": "ftrackreview-mp4", - "metadata": { - "ftr_meta": json.dumps({ - "frameIn": 0, - "frameOut": int(duration), - "frameRate": float(fps) - }) - } + "metadata": self._prepare_component_metadata( + instance, repre, repre_path, True + ) } if is_first_review_repre: @@ -323,11 +313,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): component_data = copy_src_item["component_data"] component_name = component_data["name"] component_data["name"] = component_name + "_src" + component_data["metadata"] = self._prepare_component_metadata( + instance, repre, copy_src_item["component_path"], False + ) component_list.append(copy_src_item) # Add others representations as component for repre in other_representations: - published_path = repre.get("published_path") + published_path = self._get_repre_path(instance, repre, True) if not published_path: continue # Create copy of base comp item and append it @@ -340,9 +333,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ): other_item["asset_data"]["name"] = extended_asset_name - other_item["component_data"] = { - "name": repre["name"] + component_data = { + "name": repre["name"], + "metadata": self._prepare_component_metadata( + instance, repre, published_path, False + ) } + other_item["component_data"] = component_data other_item["component_location_name"] = unmanaged_location_name other_item["component_path"] = published_path component_list.append(other_item) @@ -360,6 +357,54 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): )) instance.data["ftrackComponentsList"] = component_list + def _collect_additional_metadata(self, streams): + pass + + def _get_repre_path(self, instance, repre, only_published): + """Get representation path that can be used for integration. + + When 'only_published' is set to true the validation of path is not + relevant. In that case we just need what is set in 'published_path' + as "reference". The reference is not used to get or upload the file but + for reference where the file was published. + + Args: + instance (pyblish.Instance): Processed instance object. Used + for source of staging dir if representation does not have + filled it. + repre (dict): Representation on instance which could be and + could not be integrated with main integrator. + only_published (bool): Care only about published paths and + ignore if filepath is not existing anymore. + + Returns: + str: Path to representation file. + None: Path is not filled or does not exists. + """ + + published_path = repre.get("published_path") + if published_path: + published_path = os.path.normpath(published_path) + if os.path.exists(published_path): + return published_path + + if only_published: + return published_path + + comp_files = repre["files"] + if isinstance(comp_files, (tuple, list, set)): + filename = comp_files[0] + else: + filename = comp_files + + staging_dir = repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data["stagingDir"] + src_path = os.path.normpath(os.path.join(staging_dir, filename)) + if os.path.exists(src_path): + return src_path + return None + def _get_asset_version_status_name(self, instance): if not self.asset_versions_status_profiles: return None @@ -380,3 +425,137 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): return None return matching_profile["status"] or None + + def _prepare_component_metadata( + self, instance, repre, component_path, is_review + ): + metadata = {} + if "openpype_version" in self.additional_metadata_keys: + label = self.metadata_keys_to_label["openpype_version"] + metadata[label] = get_openpype_version() + + extension = os.path.splitext(component_path)[-1] + streams = [] + try: + streams = get_ffprobe_streams(component_path) + except Exception: + self.log.debug(( + "Failed to retrieve information about intput {}" + ).format(component_path)) + + # Find video streams + video_streams = [ + stream + for stream in streams + if stream["codec_type"] == "video" + ] + # Skip if there are not video streams + # - exr is special case which can have issues with reading through + # ffmpegh but we want to set fps for it + if not video_streams and extension not in [".exr"]: + return metadata + + stream_width = None + stream_height = None + stream_fps = None + frame_out = None + codec_label = None + for video_stream in video_streams: + codec_label = video_stream.get("codec_long_name") + if not codec_label: + codec_label = video_stream.get("codec") + + if codec_label: + pix_fmt = video_stream.get("pix_fmt") + if pix_fmt: + codec_label += " ({})".format(pix_fmt) + + tmp_width = video_stream.get("width") + tmp_height = video_stream.get("height") + if tmp_width and tmp_height: + stream_width = tmp_width + stream_height = tmp_height + + input_framerate = video_stream.get("r_frame_rate") + stream_duration = video_stream.get("duration") + if input_framerate is None or stream_duration is None: + continue + try: + stream_fps = convert_ffprobe_fps_to_float( + input_framerate + ) + except ValueError: + self.log.warning(( + "Could not convert ffprobe fps to float \"{}\"" + ).format(input_framerate)) + continue + + stream_width = tmp_width + stream_height = tmp_height + + self.log.debug("FPS from stream is {} and duration is {}".format( + input_framerate, stream_duration + )) + frame_out = float(stream_duration) * stream_fps + break + + # Prepare FPS + instance_fps = instance.data.get("fps") + if instance_fps is None: + instance_fps = instance.context.data["fps"] + + repre_fps = repre.get("fps") + if repre_fps is not None: + repre_fps = float(repre_fps) + + fps = stream_fps or repre_fps or instance_fps + + # Prepare frame ranges + frame_start = repre.get("frameStartFtrack") + frame_end = repre.get("frameEndFtrack") + if frame_start is None or frame_end is None: + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + duration = (frame_end - frame_start) + 1 + + for key, value in [ + ("fps", fps), + ("frame_start", frame_start), + ("frame_end", frame_end), + ("duration", duration), + ("width", stream_width), + ("height", stream_height), + ("fps", fps), + ("codec", codec_label) + ]: + if not value or key not in self.additional_metadata_keys: + continue + label = self.metadata_keys_to_label[key] + metadata[label] = value + + if not is_review: + ftr_meta = {} + if fps: + ftr_meta["frameRate"] = fps + + if stream_width and stream_height: + ftr_meta["width"] = int(stream_width) + ftr_meta["height"] = int(stream_height) + metadata["ftr_meta"] = json.dumps(ftr_meta) + return metadata + + # Frame end of uploaded video file should be duration in frames + # - frame start is always 0 + # - frame end is duration in frames + if not frame_out: + frame_out = duration + + # Ftrack documentation says that it is required to have + # 'width' and 'height' in review component. But with those values + # review video does not play. + metadata["ftr_meta"] = json.dumps({ + "frameIn": 0, + "frameOut": frame_out, + "frameRate": float(fps) + }) + return metadata diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 77a7ebdfcf..ac3fa874e0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -9,9 +9,11 @@ Requires: """ import sys +import copy import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackNote(pyblish.api.InstancePlugin): @@ -53,14 +55,10 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): intent = instance.context.data.get("intent") intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent - - if not intent_label: - intent_label = intent_val or "" + if intent: + value = intent["value"] + if value: + intent_label = intent["label"] or value # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) @@ -96,6 +94,14 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): labels.append(label) + base_format_data = { + "host_name": host_name, + "app_name": app_name, + "app_label": app_label, + "source": instance.data.get("source", '') + } + if comment: + base_format_data["comment"] = comment for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] component_items = asset_version_data["component_items"] @@ -109,23 +115,31 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): template = self.note_template if template is None: template = self.note_with_intent_template - format_data = { - "intent": intent_label, - "comment": comment, - "host_name": host_name, - "app_name": app_name, - "app_label": app_label, - "published_paths": "
".join(sorted(published_paths)), - "source": instance.data.get("source", '') - } - comment = template.format(**format_data) - if not comment: + format_data = copy.deepcopy(base_format_data) + format_data["published_paths"] = "
".join( + sorted(published_paths) + ) + if intent: + if "{intent}" in template: + format_data["intent"] = intent_label + else: + format_data["intent"] = intent + + note_text = StringTemplate.format_template(template, format_data) + if not note_text.solved: + self.log.warning(( + "Note template require more keys then can be provided." + "\nTemplate: {}\nData: {}" + ).format(template, format_data)) + continue + + if not note_text: self.log.info(( "Note for AssetVersion {} would be empty. Skipping." "\nTemplate: {}\nData: {}" ).format(asset_version["id"], template, format_data)) continue - asset_version.create_note(comment, author=user, labels=labels) + asset_version.create_note(note_text, author=user, labels=labels) try: session.commit() diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 1a5d74bf26..fa7a89050c 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -1,9 +1,12 @@ import sys import collections import six -import pyblish.api from copy import deepcopy + +import pyblish.api + from openpype.client import get_asset_by_id +from openpype.lib import filter_profiles # Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` @@ -65,8 +68,15 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder - 0.04 label = 'Integrate Hierarchy To Ftrack' families = ["shot"] - hosts = ["hiero", "resolve", "standalonepublisher", "flame"] + hosts = [ + "hiero", + "resolve", + "standalonepublisher", + "flame", + "traypublisher" + ] optional = False + create_task_status_profiles = [] def process(self, context): self.context = context @@ -76,14 +86,16 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.session = self.context.data["ftrackSession"] + session = self.context.data["ftrackSession"] project_name = self.context.data["projectEntity"]["name"] query = 'Project where full_name is "{}"'.format(project_name) - project = self.session.query(query).one() - auto_sync_state = project[ - "custom_attributes"][CUST_ATTR_AUTO_SYNC] + project = session.query(query).one() + auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] - self.ft_project = None + self.session = session + self.ft_project = project + self.task_types = self.get_all_task_types(project) + self.task_statuses = self.get_task_statuses(project) # disable termporarily ftrack project's autosyncing if auto_sync_state: @@ -115,10 +127,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.debug(entity_type) if entity_type.lower() == 'project': - query = 'Project where full_name is "{}"'.format(entity_name) - entity = self.session.query(query).one() - self.ft_project = entity - self.task_types = self.get_all_task_types(entity) + entity = self.ft_project elif self.ft_project is None or parent is None: raise AssertionError( @@ -147,8 +156,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) instances = [ - i for i in self.context if i.data['asset'] in entity['name'] + instance + for instance in self.context + if instance.data.get("asset") == entity["name"] ] + + for instance in instances: + instance.data["ftrackEntity"] = entity + for key in custom_attributes: hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical @@ -178,9 +193,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): ) ) - for instance in instances: - instance.data['ftrackEntity'] = entity - try: self.session.commit() except Exception: @@ -190,13 +202,22 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # TASKS + instances_by_task_name = collections.defaultdict(list) + for instance in instances: + task_name = instance.data.get("task") + if task_name: + instances_by_task_name[task_name].append(instance) + tasks = entity_data.get('tasks', []) existing_tasks = [] tasks_to_create = [] for child in entity['children']: - if child.entity_type.lower() == 'task': - existing_tasks.append(child['name'].lower()) - # existing_tasks.append(child['type']['name']) + if child.entity_type.lower() == "task": + task_name_low = child["name"].lower() + existing_tasks.append(task_name_low) + + for instance in instances_by_task_name[task_name_low]: + instance["ftrackTask"] = child for task_name in tasks: task_type = tasks[task_name]["type"] @@ -206,18 +227,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): tasks_to_create.append((task_name, task_type)) for task_name, task_type in tasks_to_create: - self.create_task( + task_entity = self.create_task( name=task_name, task_type=task_type, parent=entity ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) + + for instance in instances_by_task_name[task_name.lower()]: + instance.data["ftrackTask"] = task_entity # Incoming links. self.create_links(project_name, entity_data, entity) @@ -297,7 +314,37 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return tasks + def get_task_statuses(self, project_entity): + project_schema = project_entity["project_schema"] + task_workflow_statuses = project_schema["_task_workflow"]["statuses"] + return { + status["id"]: status + for status in task_workflow_statuses + } + def create_task(self, name, task_type, parent): + filter_data = { + "task_names": name, + "task_types": task_type + } + profile = filter_profiles( + self.create_task_status_profiles, + filter_data + ) + status_id = None + if profile: + status_name = profile["status_name"] + status_name_low = status_name.lower() + for _status_id, status in self.task_statuses.items(): + if status["name"].lower() == status_name_low: + status_id = _status_id + break + + if status_id is None: + self.log.warning( + "Task status \"{}\" was not found".format(status_name) + ) + task = self.session.create('Task', { 'name': name, 'parent': parent @@ -306,6 +353,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.info(task_type) self.log.info(self.task_types) task['type'] = self.task_types[task_type] + if status_id is not None: + task["status_id"] = status_id try: self.session.commit() diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/ftrack/scripts/sub_event_processor.py index d1e2e3aaeb..a5ce0511b8 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/ftrack/scripts/sub_event_processor.py @@ -4,6 +4,8 @@ import signal import socket import datetime +import ftrack_api + from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -12,17 +14,12 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger from openpype.lib import ( + Logger, get_openpype_version, get_build_version ) - -import ftrack_api - -log = Logger().get_logger("Event processor") - subprocess_started = datetime.datetime.now() @@ -68,6 +65,8 @@ def register(session): def main(args): + log = Logger.get_logger("Event processor") + port = int(args[-1]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 946ecbff79..204cce89e8 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -6,6 +6,8 @@ import socket import pymongo import ftrack_api + +from openpype.client import OpenPypeMongoConnection from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -15,7 +17,6 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import ( - OpenPypeMongoConnection, get_openpype_version, get_build_version ) diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/ftrack/scripts/sub_legacy_server.py index e3a623c376..1f0fc1b369 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/ftrack/scripts/sub_legacy_server.py @@ -5,11 +5,11 @@ import signal import threading import ftrack_api -from openpype.api import Logger +from openpype.lib import Logger from openpype.modules import ModulesManager from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer -log = Logger().get_logger("Event Server Legacy") +log = Logger.get_logger("Event Server Legacy") class TimerChecker(threading.Thread): diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/ftrack/scripts/sub_user_server.py index a3701a0950..930a2d51e2 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/ftrack/scripts/sub_user_server.py @@ -2,6 +2,7 @@ import sys import signal import socket +from openpype.lib import Logger from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -9,9 +10,7 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger - -log = Logger().get_logger("FtrackUserServer") +log = Logger.get_logger("FtrackUserServer") def main(args): diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index e4ccc59c7a..bf774c427f 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -12,10 +12,11 @@ from ..lib import credentials from ..ftrack_module import FTRACK_MODULE_DIR from . import login_dialog -from openpype.api import Logger, resources +from openpype import resources +from openpype.lib import Logger -log = Logger().get_logger("FtrackModule") +log = Logger.get_logger("FtrackModule") class FtrackTrayWrapper: diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 334485cab2..f92ec6bf2d 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -1,8 +1,33 @@ -from abc import abstractmethod +from abc import ABCMeta, abstractmethod, abstractproperty + +import six from openpype import resources -from openpype.modules import OpenPypeInterface + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + def __repr__(self): + return str(self) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell OpenPype that contain + implementation for specific functionality. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. + """ + + pass class IPluginPaths(OpenPypeInterface): @@ -50,12 +75,39 @@ class IPluginPaths(OpenPypeInterface): class ILaunchHookPaths(OpenPypeInterface): """Module has launch hook paths to return. + Modules does not have to inherit from this interface (changed 8.11.2022). + Module just have to have implemented 'get_launch_hook_paths' to be able use + the advantage. + Expected result is list of paths. ["path/to/launch_hooks_dir"] + + Deprecated: + This interface is not needed since OpenPype 3.14.*. Addon just have to + implement 'get_launch_hook_paths' which can expect Application object + or nothing as argument. + + Interface class will be removed after 3.16.*. """ @abstractmethod - def get_launch_hook_paths(self): + def get_launch_hook_paths(self, app): + """Paths to directory with application launch hooks. + + Method can be also defined without arguments. + ```python + def get_launch_hook_paths(self): + return [] + ``` + + Args: + app (Application): Application object which can be used for + filtering of which launch hook paths are returned. + + Returns: + Iterable[str]: Path to directories where launch hooks can be found. + """ + pass @@ -66,6 +118,7 @@ class ITrayModule(OpenPypeInterface): The module still must be usable if is not used in tray even if would do nothing. """ + tray_initialized = False _tray_manager = None @@ -78,16 +131,19 @@ class ITrayModule(OpenPypeInterface): This is where GUIs should be loaded or tray specific parts should be prepared. """ + pass @abstractmethod def tray_menu(self, tray_menu): """Add module's action to tray menu.""" + pass @abstractmethod def tray_start(self): """Start procedure in Pype tray.""" + pass @abstractmethod @@ -96,6 +152,7 @@ class ITrayModule(OpenPypeInterface): This is place where all threads should be shut. """ + pass def execute_in_main_thread(self, callback): @@ -104,6 +161,7 @@ class ITrayModule(OpenPypeInterface): Some callbacks need to be processed on main thread (menu actions must be added on main thread or they won't get triggered etc.) """ + if not self.tray_initialized: # TODO Called without initialized tray, still main thread needed try: @@ -128,6 +186,7 @@ class ITrayModule(OpenPypeInterface): msecs (int): Duration of message visibility in miliseconds. Default is 10000 msecs, may differ by Qt version. """ + if self._tray_manager: self._tray_manager.show_tray_message(title, message, icon, msecs) @@ -280,16 +339,19 @@ class ITrayService(ITrayModule): def set_service_running_icon(self): """Change icon of an QAction to green circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_running()) def set_service_failed_icon(self): """Change icon of an QAction to red circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_failed()) def set_service_idle_icon(self): """Change icon of an QAction to orange circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_idle()) @@ -303,6 +365,7 @@ class ISettingsChangeListener(OpenPypeInterface): "publish": ["path/to/publish_plugins"] } """ + @abstractmethod def on_system_settings_save( self, old_value, new_value, changes, new_value_metadata @@ -320,3 +383,24 @@ class ISettingsChangeListener(OpenPypeInterface): self, old_value, new_value, changes, project_name, new_value_metadata ): pass + + +class IHostAddon(OpenPypeInterface): + """Addon which also contain a host implementation.""" + + @abstractproperty + def host_name(self): + """Name of host which module represents.""" + + pass + + def get_workfile_extensions(self): + """Define workfile extensions for host. + + Not all hosts support workfiles thus this is optional implementation. + + Returns: + List[str]: Extensions used for workfiles with dot. + """ + + return [] diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index d28ded06c7..c9e78b59eb 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -39,10 +39,12 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"]) if not kitsu_entity: - raise AssertionError(f"{entity_type} not found in kitsu!") + raise AssertionError("{} not found in kitsu!".format(entity_type)) context.data["kitsu_entity"] = kitsu_entity - self.log.debug(f"Collect kitsu {entity_type}: {kitsu_entity}") + self.log.debug( + "Collect kitsu {}: {}".format(entity_type, kitsu_entity) + ) if zou_task_data: kitsu_task = gazu.task.get_task(zou_task_data["id"]) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 577050c5af..441b95a7ec 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -2,11 +2,17 @@ import os import gazu +from openpype.client import ( + get_project, + get_assets, + get_asset_by_name +) from openpype.pipeline import AvalonMongoDB from .credentials import validate_credentials from .update_op_with_zou import ( create_op_asset, set_op_project, + get_kitsu_project_name, write_project_to_op, update_op_assets, ) @@ -119,17 +125,16 @@ class Listener: # Write into DB if update_project: - self.dbcon = self.dbcon.database[project_name] + self.dbcon.Session["AVALON_PROJECT"] = project_name self.dbcon.bulk_write([update_project]) def _delete_project(self, data): """Delete project.""" - project_doc = self.dbcon.find_one( - {"type": "project", "data.zou_id": data["project_id"]} - ) + + project_name = get_kitsu_project_name(data["project_id"]) # Delete project collection - self.dbcon.database[project_doc["name"]].drop() + self.dbcon.database[project_name].drop() # == Asset == @@ -150,7 +155,8 @@ class Listener: def _update_asset(self, data): """Update asset into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity asset = gazu.asset.get_asset(data["asset_id"]) @@ -159,7 +165,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[asset["project_id"]] = project_doc @@ -199,7 +205,8 @@ class Listener: def _update_episode(self, data): """Update episode into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity episode = gazu.shot.get_episode(data["episode_id"]) @@ -208,7 +215,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[episode["project_id"]] = project_doc @@ -249,7 +256,8 @@ class Listener: def _update_sequence(self, data): """Update sequence into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity sequence = gazu.shot.get_sequence(data["sequence_id"]) @@ -258,7 +266,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[sequence["project_id"]] = project_doc @@ -299,7 +307,8 @@ class Listener: def _update_shot(self, data): """Update shot into OP DB.""" set_op_project(self.dbcon, data["project_id"]) - project_doc = self.dbcon.find_one({"type": "project"}) + project_name = self.dbcon.active_project() + project_doc = get_project(project_name) # Get gazu entity shot = gazu.shot.get_shot(data["shot_id"]) @@ -308,7 +317,7 @@ class Listener: # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in self.dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[shot["project_id"]] = project_doc @@ -335,14 +344,15 @@ class Listener: """Create new task into OP DB.""" # Get project entity set_op_project(self.dbcon, data["project_id"]) + project_name = self.dbcon.active_project() # Get gazu entity task = gazu.task.get_task(data["task_id"]) # Find asset doc - asset_doc = self.dbcon.find_one( - {"type": "asset", "data.zou.id": task["entity"]["id"]} - ) + parent_name = task["entity"]["name"] + + asset_doc = get_asset_by_name(project_name, parent_name) # Update asset tasks with new one asset_tasks = asset_doc["data"].get("tasks") @@ -359,10 +369,11 @@ class Listener: def _delete_task(self, data): """Delete task of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data["project_id"]) + project_name = self.dbcon.active_project() # Find asset doc - asset_docs = [doc for doc in self.dbcon.find({"type": "asset"})] + asset_docs = list(get_assets(project_name)) for doc in asset_docs: # Match task for name, task in doc["data"]["tasks"].items(): diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index de74b0c677..e03cf2b30e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -10,6 +10,12 @@ from gazu.task import ( all_tasks_for_shot, ) +from openpype.client import ( + get_project, + get_assets, + get_asset_by_id, + get_asset_by_name, +) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.lib import create_project @@ -33,6 +39,20 @@ def create_op_asset(gazu_entity: dict) -> dict: } +def get_kitsu_project_name(project_id: str) -> str: + """Get project name based on project id in kitsu. + + Args: + project_id (str): UUID of project in Kitsu. + + Returns: + str: Name of Kitsu project. + """ + + project = gazu.project.get_project(project_id) + return project["name"] + + def set_op_project(dbcon: AvalonMongoDB, project_id: str): """Set project context. @@ -40,9 +60,8 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str): dbcon (AvalonMongoDB): Connection to DB project_id (str): Project zou ID """ - project = gazu.project.get_project(project_id) - project_name = project["name"] - dbcon.Session["AVALON_PROJECT"] = project_name + + dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(project_id) def update_op_assets( @@ -72,9 +91,7 @@ def update_op_assets( if not item_doc: # Create asset op_asset = create_op_asset(item) insert_result = dbcon.insert_one(op_asset) - item_doc = dbcon.find_one( - {"type": "asset", "_id": insert_result.inserted_id} - ) + item_doc = get_asset_by_id(project_name, insert_result.inserted_id) # Update asset item_data = deepcopy(item_doc["data"]) @@ -137,17 +154,23 @@ def update_op_assets( parent_zou_id = substitute_parent_item["parent_id"] else: parent_zou_id = ( - item.get("parent_id") + # For Asset, put under asset type directory + item.get("entity_type_id") + if item_type == "Asset" + else None + # Else, fallback on usual hierarchy + or item.get("parent_id") or item.get("episode_id") or item.get("source_id") - ) # TODO check consistency + ) - # Substitute Episode and Sequence by Shot - substitute_item_type = ( - "shots" - if item_type in ["Episode", "Sequence"] - else f"{item_type.lower()}s" - ) + # Substitute item type for general classification (assets or shots) + if item_type in ["Asset", "AssetType"]: + substitute_item_type = "assets" + elif item_type in ["Episode", "Sequence"]: + substitute_item_type = "shots" + else: + substitute_item_type = f"{item_type.lower()}s" entity_parent_folders = [ f for f in project_module_settings["entities_root"] @@ -161,15 +184,33 @@ def update_op_assets( asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None ) if visual_parent_doc_id is None: - # Find root folder doc - root_folder_doc = dbcon.find_one( - { - "type": "asset", - "name": entity_parent_folders[-1], - "data.root_of": substitute_item_type, - }, - ["_id"], + # Find root folder docs + root_folder_docs = get_assets( + project_name, + asset_names=[entity_parent_folders[-1]], + fields=["_id", "data.root_of"], ) + # NOTE: Not sure why it's checking for entity type? + # OP3 does not support multiple assets with same names so type + # filtering is irelevant. + # This way mimics previous implementation: + # ``` + # root_folder_doc = dbcon.find_one( + # { + # "type": "asset", + # "name": entity_parent_folders[-1], + # "data.root_of": substitute_item_type, + # }, + # ["_id"], + # ) + # ``` + root_folder_doc = None + for folder_doc in root_folder_docs: + root_of = folder_doc.get("data", {}).get("root_of") + if root_of == substitute_item_type: + root_folder_doc = folder_doc + break + if root_folder_doc: visual_parent_doc_id = root_folder_doc["_id"] @@ -178,13 +219,25 @@ def update_op_assets( # Add parents for hierarchy item_data["parents"] = [] - while parent_zou_id is not None: - parent_doc = asset_doc_ids[parent_zou_id] + ancestor_id = parent_zou_id + while ancestor_id is not None: + parent_doc = asset_doc_ids[ancestor_id] item_data["parents"].insert(0, parent_doc["name"]) # Get parent entity parent_entity = parent_doc["data"]["zou"] - parent_zou_id = parent_entity["parent_id"] + ancestor_id = parent_entity.get("parent_id") + + # Build OpenPype compatible name + if item_type in ["Shot", "Sequence"] and parent_zou_id is not None: + # Name with parents hierarchy "({episode}_){sequence}_{shot}" + # to avoid duplicate name issue + item_name = f"{item_data['parents'][-1]}_{item['name']}" + + # Update doc name + asset_doc_ids[item["id"]]["name"] = item_name + else: + item_name = item["name"] # Set root folders parents item_data["parents"] = entity_parent_folders + item_data["parents"] @@ -199,9 +252,9 @@ def update_op_assets( item_doc["_id"], { "$set": { - "name": item["name"], + "name": item_name, "data": item_data, - "parent": asset_doc_ids[item["project_id"]]["_id"], + "parent": project_doc["_id"], } }, ) @@ -222,13 +275,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: UpdateOne: Update instance for the project """ project_name = project["name"] - project_doc = dbcon.database[project_name].find_one({"type": "project"}) + project_doc = get_project(project_name) if not project_doc: print(f"Creating project '{project_name}'") project_doc = create_project(project_name, project_name, dbcon=dbcon) # Project data and tasks - project_data = project["data"] or {} + project_data = project_doc["data"] or {} # Build project code and update Kitsu project_code = project.get("code") @@ -257,6 +310,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: "config.tasks": { t["name"]: {"short_name": t.get("short_name", t["name"])} for t in gazu.task.all_task_types_for_project(project) + or gazu.task.all_task_types() }, "data": project_data, } @@ -292,6 +346,11 @@ def sync_all_projects(login: str, password: str): def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): """Update OP project in DB with Zou data. + `root_of` is meant to sort entities by type for a better readability in + the data tree. It puts all shot like (Shot and Episode and Sequence) and + asset entities under two different root folders or hierarchy, defined in + settings. + Args: dbcon (AvalonMongoDB): MongoDB connection project (dict): Project dict got using gazu. @@ -306,12 +365,17 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): # Get all assets from zou all_assets = gazu.asset.all_assets_for_project(project) + all_asset_types = gazu.asset.all_asset_types_for_project(project) all_episodes = gazu.shot.all_episodes_for_project(project) all_seqs = gazu.shot.all_sequences_for_project(project) all_shots = gazu.shot.all_shots_for_project(project) all_entities = [ item - for item in all_assets + all_episodes + all_seqs + all_shots + for item in all_assets + + all_asset_types + + all_episodes + + all_seqs + + all_shots if naming_pattern.match(item["name"]) ] @@ -319,26 +383,44 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): bulk_writes.append(write_project_to_op(project, dbcon)) # Try to find project document - dbcon.Session["AVALON_PROJECT"] = project["name"] - project_doc = dbcon.find_one({"type": "project"}) + project_name = project["name"] + dbcon.Session["AVALON_PROJECT"] = project_name + project_doc = get_project(project_name) # Query all assets of the local project zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } zou_ids_and_asset_docs[project["id"]] = project_doc # Create entities root folders - project_module_settings = get_project_settings(project["name"])["kitsu"] + project_module_settings = get_project_settings(project_name)["kitsu"] for entity_type, root in project_module_settings["entities_root"].items(): parent_folders = root.split("/") direct_parent_doc = None for i, folder in enumerate(parent_folders, 1): - parent_doc = dbcon.find_one( - {"type": "asset", "name": folder, "data.root_of": entity_type} + parent_doc = get_asset_by_name( + project_name, folder, fields=["_id", "data.root_of"] ) + # NOTE: Not sure why it's checking for entity type? + # OP3 does not support multiple assets with same names so type + # filtering is irelevant. + # Also all of the entities could find be queried at once using + # 'get_assets'. + # This way mimics previous implementation: + # ``` + # parent_doc = dbcon.find_one( + # {"type": "asset", "name": folder, "data.root_of": entity_type} + # ) + # ``` + if ( + parent_doc + and parent_doc.get("data", {}).get("root_of") != entity_type + ): + parent_doc = None + if not parent_doc: direct_parent_doc = dbcon.insert_one( { @@ -348,21 +430,20 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): "data": { "root_of": entity_type, "parents": parent_folders[:i], - "visualParent": direct_parent_doc, + "visualParent": direct_parent_doc.inserted_id + if direct_parent_doc + else None, "tasks": {}, }, } ) # Create - to_insert = [] - to_insert.extend( - [ - create_op_asset(item) - for item in all_entities - if item["id"] not in zou_ids_and_asset_docs.keys() - ] - ) + to_insert = [ + create_op_asset(item) + for item in all_entities + if item["id"] not in zou_ids_and_asset_docs.keys() + ] if to_insert: # Insert doc in DB dbcon.insert_many(to_insert) @@ -371,7 +452,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): zou_ids_and_asset_docs.update( { asset_doc["data"]["zou"]["id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou") } ) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index 81d421206f..da924aa5ee 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -6,6 +6,11 @@ from typing import List import gazu from pymongo import UpdateOne +from openpype.client import ( + get_projects, + get_project, + get_assets, +) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -36,7 +41,7 @@ def sync_zou(login: str, password: str): dbcon = AvalonMongoDB() dbcon.install() - op_projects = [p for p in dbcon.projects()] + op_projects = list(get_projects()) for project_doc in op_projects: sync_zou_from_op_project(project_doc["name"], dbcon, project_doc) @@ -53,9 +58,7 @@ def sync_zou_from_op_project( """ # Get project doc if not provided if not project_doc: - project_doc = dbcon.database[project_name].find_one( - {"type": "project"} - ) + project_doc = get_project(project_name) # Get all entities from zou print(f"Synchronizing {project_name}...") @@ -96,7 +99,7 @@ def sync_zou_from_op_project( dbcon.Session["AVALON_PROJECT"] = project_name asset_docs = { asset_doc["_id"]: asset_doc - for asset_doc in dbcon.find({"type": "asset"}) + for asset_doc in get_assets(project_name) } # Create new assets diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py index aea62c381b..d369ffeb64 100644 --- a/openpype/modules/log_viewer/tray/models.py +++ b/openpype/modules/log_viewer/tray/models.py @@ -1,6 +1,6 @@ import collections from Qt import QtCore, QtGui -from openpype.lib.log import PypeLogger +from openpype.lib import Logger class LogModel(QtGui.QStandardItemModel): @@ -41,14 +41,14 @@ class LogModel(QtGui.QStandardItemModel): self.dbcon = None # Crash if connection is not possible to skip this module - if not PypeLogger.initialized: - PypeLogger.initialize() + if not Logger.initialized: + Logger.initialize() - connection = PypeLogger.get_log_mongo_connection() + connection = Logger.get_log_mongo_connection() if connection: - PypeLogger.bootstrap_mongo_log() - database = connection[PypeLogger.log_database_name] - self.dbcon = database[PypeLogger.log_collection_name] + Logger.bootstrap_mongo_log() + database = connection[Logger.log_database_name] + self.dbcon = database[Logger.log_collection_name] def headerData(self, section, orientation, role): if ( diff --git a/openpype/modules/royalrender/api.py b/openpype/modules/royalrender/api.py index ed9e71f240..de1dba8724 100644 --- a/openpype/modules/royalrender/api.py +++ b/openpype/modules/royalrender/api.py @@ -5,13 +5,10 @@ import os from openpype.settings import get_project_settings from openpype.lib.local_settings import OpenPypeSettingsRegistry -from openpype.lib import PypeLogger, run_subprocess +from openpype.lib import Logger, run_subprocess from .rr_job import RRJob, SubmitFile, SubmitterParameter -log = PypeLogger.get_logger("RoyalRender") - - class Api: _settings = None @@ -19,6 +16,7 @@ class Api: RR_SUBMIT_API = 2 def __init__(self, settings, project=None): + self.log = Logger.get_logger("RoyalRender") self._settings = settings self._initialize_rr(project) @@ -137,7 +135,7 @@ class Api: rr_console += ".exe" args = [rr_console, file] - run_subprocess(" ".join(args), logger=log) + run_subprocess(" ".join(args), logger=self.log) def _submit_using_api(self, file): # type: (SubmitFile) -> None @@ -159,11 +157,11 @@ class Api: rr_server = tcp.getRRServer() if len(rr_server) == 0: - log.info("Got RR IP address {}".format(rr_server)) + self.log.info("Got RR IP address {}".format(rr_server)) # TODO: Port is hardcoded in RR? If not, move it to Settings if not tcp.setServer(rr_server, 7773): - log.error( + self.log.error( "Can not set RR server: {}".format(tcp.errorMessage())) raise RoyalRenderException(tcp.errorMessage()) diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 2b4b51e3ad..1e7eca4dec 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -23,6 +23,11 @@ class SettingsAction(OpenPypeModule, ITrayAction): """Initialization in tray implementation of ITrayAction.""" self.create_settings_window() + def tray_exit(self): + # Close settings UI to remove settings lock + if self.settings_window: + self.settings_window.close() + def on_action_trigger(self): """Implementation for action trigger of ITrayAction.""" self.show_settings_window() diff --git a/openpype/modules/shotgrid/README.md b/openpype/modules/shotgrid/README.md new file mode 100644 index 0000000000..cbee0e9bf4 --- /dev/null +++ b/openpype/modules/shotgrid/README.md @@ -0,0 +1,19 @@ +## Shotgrid Module + +### Pre-requisites + +Install and launch a [shotgrid leecher](https://github.com/Ellipsanime/shotgrid-leecher) server + +### Quickstart + +The goal of this tutorial is to synchronize an already existing shotgrid project with OpenPype. + +- Activate the shotgrid module in the **system settings** and inform the shotgrid leecher server API url + +- Create a new OpenPype project with the **project manager** + +- Inform the shotgrid authentication infos (url, script name, api key) and the shotgrid project ID related to this OpenPype project in the **project settings** + +- Use the batch interface (Tray > shotgrid > Launch batch), select your project and click "batch" + +- You can now access your shotgrid entities within the **avalon launcher** and publish informations to shotgrid with **pyblish** diff --git a/openpype/modules/shotgrid/__init__.py b/openpype/modules/shotgrid/__init__.py new file mode 100644 index 0000000000..f1337a9492 --- /dev/null +++ b/openpype/modules/shotgrid/__init__.py @@ -0,0 +1,5 @@ +from .shotgrid_module import ( + ShotgridModule, +) + +__all__ = ("ShotgridModule",) diff --git a/openpype/modules/shotgrid/lib/__init__.py b/openpype/modules/shotgrid/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/shotgrid/lib/const.py b/openpype/modules/shotgrid/lib/const.py new file mode 100644 index 0000000000..2a34800fac --- /dev/null +++ b/openpype/modules/shotgrid/lib/const.py @@ -0,0 +1 @@ +MODULE_NAME = "shotgrid" diff --git a/openpype/modules/shotgrid/lib/credentials.py b/openpype/modules/shotgrid/lib/credentials.py new file mode 100644 index 0000000000..337c4f6ecb --- /dev/null +++ b/openpype/modules/shotgrid/lib/credentials.py @@ -0,0 +1,125 @@ + +from urllib.parse import urlparse + +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSecureRegistry, OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.record import Credentials + + +def _get_shotgrid_secure_key(hostname, key): + """Secure item key for entered hostname.""" + return f"shotgrid/{hostname}/{key}" + + +def _get_secure_value_and_registry( + hostname, + name, +): + key = _get_shotgrid_secure_key(hostname, name) + registry = OpenPypeSecureRegistry(key) + return registry.get_item(name, None), registry + + +def get_shotgrid_hostname(shotgrid_url): + + if not shotgrid_url: + raise Exception("Shotgrid url cannot be a null") + valid_shotgrid_url = ( + f"//{shotgrid_url}" if "//" not in shotgrid_url else shotgrid_url + ) + return urlparse(valid_shotgrid_url).hostname + + +# Credentials storing function (using keyring) + + +def get_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + if not hostname: + return None + login_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + return Credentials(login_value, password_value) + + +def save_credentials(login, password, shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + _, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + _, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + clear_credentials(shotgrid_url) + login_registry.set_item(Credentials.login_key_prefix(), login) + password_registry.set_item(Credentials.password_key_prefix(), password) + + +def clear_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + login_value, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + + if login_value is not None: + login_registry.delete_item(Credentials.login_key_prefix()) + + if password_value is not None: + password_registry.delete_item(Credentials.password_key_prefix()) + + +# Login storing function (using json) + + +def get_local_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None + + +def save_local_login(login): + reg = OpenPypeSettingsRegistry() + reg.set_item("shotgrid_login", login) + + +def clear_local_login(): + reg = OpenPypeSettingsRegistry() + reg.delete_item("shotgrid_login") + + +def check_credentials( + login, + password, + shotgrid_url, +): + + if not shotgrid_url or not login or not password: + return False + try: + session = shotgun_api3.Shotgun( + shotgrid_url, + login=login, + password=password, + ) + session.preferences_read() + session.close() + except AuthenticationFault: + return False + return True diff --git a/openpype/modules/shotgrid/lib/record.py b/openpype/modules/shotgrid/lib/record.py new file mode 100644 index 0000000000..f62f4855d5 --- /dev/null +++ b/openpype/modules/shotgrid/lib/record.py @@ -0,0 +1,20 @@ + +class Credentials: + login = None + password = None + + def __init__(self, login, password) -> None: + super().__init__() + self.login = login + self.password = password + + def is_empty(self): + return not (self.login and self.password) + + @staticmethod + def login_key_prefix(): + return "login" + + @staticmethod + def password_key_prefix(): + return "password" diff --git a/openpype/modules/shotgrid/lib/settings.py b/openpype/modules/shotgrid/lib/settings.py new file mode 100644 index 0000000000..924099f04b --- /dev/null +++ b/openpype/modules/shotgrid/lib/settings.py @@ -0,0 +1,18 @@ +from openpype.api import get_system_settings, get_project_settings +from openpype.modules.shotgrid.lib.const import MODULE_NAME + + +def get_shotgrid_project_settings(project): + return get_project_settings(project).get(MODULE_NAME, {}) + + +def get_shotgrid_settings(): + return get_system_settings().get("modules", {}).get(MODULE_NAME, {}) + + +def get_shotgrid_servers(): + return get_shotgrid_settings().get("shotgrid_settings", {}) + + +def get_leecher_backend_url(): + return get_shotgrid_settings().get("leecher_backend_url") diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py new file mode 100644 index 0000000000..0b03ac2e5d --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py @@ -0,0 +1,100 @@ +import os + +import pyblish.api +from openpype.lib.mongo import OpenPypeMongoConnection + + +class CollectShotgridEntities(pyblish.api.ContextPlugin): + """Collect shotgrid entities according to the current context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Shotgrid entities" + + def process(self, context): + + avalon_project = context.data.get("projectEntity") + avalon_asset = context.data.get("assetEntity") + avalon_task_name = os.getenv("AVALON_TASK") + + self.log.info(avalon_project) + self.log.info(avalon_asset) + + sg_project = _get_shotgrid_project(context) + sg_task = _get_shotgrid_task( + avalon_project, + avalon_asset, + avalon_task_name + ) + sg_entity = _get_shotgrid_entity(avalon_project, avalon_asset) + + if sg_project: + context.data["shotgridProject"] = sg_project + self.log.info( + "Collected correspondig shotgrid project : {}".format( + sg_project + ) + ) + + if sg_task: + context.data["shotgridTask"] = sg_task + self.log.info( + "Collected correspondig shotgrid task : {}".format(sg_task) + ) + + if sg_entity: + context.data["shotgridEntity"] = sg_entity + self.log.info( + "Collected correspondig shotgrid entity : {}".format(sg_entity) + ) + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + + sg = context.data.get("shotgridSession") + return sg.find_one("Version", filters, []) + + +def _get_shotgrid_collection(project): + client = OpenPypeMongoConnection.get_mongo_client() + return client.get_database("shotgrid_openpype").get_collection(project) + + +def _get_shotgrid_project(context): + shotgrid_project_id = context.data["project_settings"].get( + "shotgrid_project_id") + if shotgrid_project_id: + return {"type": "Project", "id": shotgrid_project_id} + return {} + + +def _get_shotgrid_task(avalon_project, avalon_asset, avalon_task): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_task_hierarchy_row = sg_col.find_one( + { + "type": "Task", + "_id": {"$regex": "^" + avalon_task + "_[0-9]*"}, + "parent": {"$regex": ".*," + avalon_asset["name"] + ","}, + } + ) + if shotgrid_task_hierarchy_row: + return {"type": "Task", "id": shotgrid_task_hierarchy_row["src_id"]} + return {} + + +def _get_shotgrid_entity(avalon_project, avalon_asset): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_entity_hierarchy_row = sg_col.find_one( + {"_id": avalon_asset["name"]} + ) + if shotgrid_entity_hierarchy_row: + return { + "type": shotgrid_entity_hierarchy_row["type"], + "id": shotgrid_entity_hierarchy_row["src_id"], + } + return {} diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py new file mode 100644 index 0000000000..9d5d2271bf --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py @@ -0,0 +1,123 @@ +import os + +import pyblish.api +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.settings import ( + get_shotgrid_servers, + get_shotgrid_project_settings, +) + + +class CollectShotgridSession(pyblish.api.ContextPlugin): + """Collect shotgrid session using user credentials""" + + order = pyblish.api.CollectorOrder + label = "Shotgrid user session" + + def process(self, context): + + certificate_path = os.getenv("SHOTGUN_API_CACERTS") + if certificate_path is None or not os.path.exists(certificate_path): + self.log.info( + "SHOTGUN_API_CACERTS does not contains a valid \ + path: {}".format( + certificate_path + ) + ) + certificate_path = get_shotgrid_certificate() + self.log.info("Get Certificate from shotgrid_api") + + if not os.path.exists(certificate_path): + self.log.error( + "Could not find certificate in shotgun_api3: \ + {}".format( + certificate_path + ) + ) + return + + set_shotgrid_certificate(certificate_path) + self.log.info("Set Certificate: {}".format(certificate_path)) + + avalon_project = os.getenv("AVALON_PROJECT") + + shotgrid_settings = get_shotgrid_project_settings(avalon_project) + self.log.info("shotgrid settings: {}".format(shotgrid_settings)) + shotgrid_servers_settings = get_shotgrid_servers() + self.log.info( + "shotgrid_servers_settings: {}".format(shotgrid_servers_settings) + ) + + shotgrid_server = shotgrid_settings.get("shotgrid_server", "") + if not shotgrid_server: + self.log.error( + "No Shotgrid server found, please choose a credential" + "in script name and script key in OpenPype settings" + ) + + shotgrid_server_setting = shotgrid_servers_settings.get( + shotgrid_server, {} + ) + shotgrid_url = shotgrid_server_setting.get("shotgrid_url", "") + + shotgrid_script_name = shotgrid_server_setting.get( + "shotgrid_script_name", "" + ) + shotgrid_script_key = shotgrid_server_setting.get( + "shotgrid_script_key", "" + ) + if not shotgrid_script_name and not shotgrid_script_key: + self.log.error( + "No Shotgrid api credential found, please enter " + "script name and script key in OpenPype settings" + ) + + login = get_login() or os.getenv("OPENPYPE_SG_USER") + + if not login: + self.log.error( + "No Shotgrid login found, please " + "login to shotgrid withing openpype Tray" + ) + + session = shotgun_api3.Shotgun( + base_url=shotgrid_url, + script_name=shotgrid_script_name, + api_key=shotgrid_script_key, + sudo_as_login=login, + ) + + try: + session.preferences_read() + except AuthenticationFault: + raise ValueError( + "Could not connect to shotgrid {} with user {}".format( + shotgrid_url, login + ) + ) + + self.log.info( + "Logged to shotgrid {} with user {}".format(shotgrid_url, login) + ) + context.data["shotgridSession"] = session + context.data["shotgridUser"] = login + + +def get_shotgrid_certificate(): + shotgun_api_path = os.path.dirname(shotgun_api3.__file__) + return os.path.join(shotgun_api_path, "lib", "certifi", "cacert.pem") + + +def set_shotgrid_certificate(certificate): + os.environ["SHOTGUN_API_CACERTS"] = certificate + + +def get_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py new file mode 100644 index 0000000000..cfd2d10fd9 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py @@ -0,0 +1,77 @@ +import os +import pyblish.api + + +class IntegrateShotgridPublish(pyblish.api.InstancePlugin): + """ + Create published Files from representations and add it to version. If + representation is tagged add shotgrid review, it will add it in + path to movie for a movie file or path to frame for an image sequence. + """ + + order = pyblish.api.IntegratorOrder + 0.499 + label = "Shotgrid Published Files" + + def process(self, instance): + + context = instance.context + + self.sg = context.data.get("shotgridSession") + + shotgrid_version = instance.data.get("shotgridVersion") + + for representation in instance.data.get("representations", []): + + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if representation.get("tags", []): + continue + + published_file = self._find_existing_publish( + code, context, shotgrid_version + ) + + published_file_data = { + "project": context.data.get("shotgridProject"), + "code": code, + "entity": context.data.get("shotgridEntity"), + "task": context.data.get("shotgridTask"), + "version": shotgrid_version, + "path": {"local_path": local_path}, + } + if not published_file: + published_file = self._create_published(published_file_data) + self.log.info( + "Create Shotgrid PublishedFile: {}".format(published_file) + ) + else: + self.sg.update( + published_file["type"], + published_file["id"], + published_file_data, + ) + self.log.info( + "Update Shotgrid PublishedFile: {}".format(published_file) + ) + + if instance.data["family"] == "image": + self.sg.upload_thumbnail( + published_file["type"], published_file["id"], local_path + ) + instance.data["shotgridPublishedFile"] = published_file + + def _find_existing_publish(self, code, context, shotgrid_version): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["version", "is", shotgrid_version], + ["code", "is", code], + ] + return self.sg.find_one("PublishedFile", filters, []) + + def _create_published(self, published_file_data): + + return self.sg.create("PublishedFile", published_file_data) diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py new file mode 100644 index 0000000000..a1b7140e22 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py @@ -0,0 +1,92 @@ +import os +import pyblish.api + + +class IntegrateShotgridVersion(pyblish.api.InstancePlugin): + """Integrate Shotgrid Version""" + + order = pyblish.api.IntegratorOrder + 0.497 + label = "Shotgrid Version" + + sg = None + + def process(self, instance): + + context = instance.context + self.sg = context.data.get("shotgridSession") + + # TODO: Use path template solver to build version code from settings + anatomy = instance.data.get("anatomyData", {}) + code = "_".join( + [ + anatomy["project"]["code"], + anatomy["parent"], + anatomy["asset"], + anatomy["task"]["name"], + "v{:03}".format(int(anatomy["version"])), + ] + ) + + version = self._find_existing_version(code, context) + + if not version: + version = self._create_version(code, context) + self.log.info("Create Shotgrid version: {}".format(version)) + else: + self.log.info("Use existing Shotgrid version: {}".format(version)) + + data_to_update = {} + status = context.data.get("intent", {}).get("value") + if status: + data_to_update["sg_status_list"] = status + + for representation in instance.data.get("representations", []): + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if "shotgridreview" in representation.get("tags", []): + + if representation["ext"] in ["mov", "avi"]: + self.log.info( + "Upload review: {} for version shotgrid {}".format( + local_path, version.get("id") + ) + ) + self.sg.upload( + "Version", + version.get("id"), + local_path, + field_name="sg_uploaded_movie", + ) + + data_to_update["sg_path_to_movie"] = local_path + + elif representation["ext"] in ["jpg", "png", "exr", "tga"]: + path_to_frame = local_path.replace("0000", "#") + data_to_update["sg_path_to_frames"] = path_to_frame + + self.log.info("Update Shotgrid version with {}".format(data_to_update)) + self.sg.update("Version", version["id"], data_to_update) + + instance.data["shotgridVersion"] = version + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + return self.sg.find_one("Version", filters, []) + + def _create_version(self, code, context): + + version_data = { + "project": context.data.get("shotgridProject"), + "sg_task": context.data.get("shotgridTask"), + "entity": context.data.get("shotgridEntity"), + "code": code, + } + + return self.sg.create("Version", version_data) diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py new file mode 100644 index 0000000000..c14c980e2a --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -0,0 +1,38 @@ +import pyblish.api +import openpype.api + + +class ValidateShotgridUser(pyblish.api.ContextPlugin): + """ + Check if user is valid and have access to the project. + """ + + label = "Validate Shotgrid User" + order = openpype.api.ValidateContentsOrder + + def process(self, context): + sg = context.data.get("shotgridSession") + + login = context.data.get("shotgridUser") + self.log.info("Login shotgrid set in OpenPype is {}".format(login)) + project = context.data.get("shotgridProject") + self.log.info("Current shotgun project is {}".format(project)) + + if not (login and sg and project): + raise KeyError() + + user = sg.find_one("HumanUser", [["login", "is", login]], ["projects"]) + + self.log.info(user) + self.log.info(login) + user_projects_id = [p["id"] for p in user.get("projects", [])] + if not project.get("id") in user_projects_id: + raise PermissionError( + "Login {} don't have access to the project {}".format( + login, project + ) + ) + + self.log.info( + "Login {} have access to the project {}".format(login, project) + ) diff --git a/openpype/modules/shotgrid/server/README.md b/openpype/modules/shotgrid/server/README.md new file mode 100644 index 0000000000..15e056ff3e --- /dev/null +++ b/openpype/modules/shotgrid/server/README.md @@ -0,0 +1,5 @@ + +### Shotgrid server + +Please refer to the external project that covers Openpype/Shotgrid communication: + - https://github.com/Ellipsanime/shotgrid-leecher diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py new file mode 100644 index 0000000000..281c6fdcad --- /dev/null +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -0,0 +1,55 @@ +import os + +from openpype_interfaces import ( + ITrayModule, + IPluginPaths, +) + +from openpype.modules import OpenPypeModule + +SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class ShotgridModule(OpenPypeModule, ITrayModule, IPluginPaths): + leecher_manager_url = None + name = "shotgrid" + enabled = False + project_id = None + tray_wrapper = None + + def initialize(self, modules_settings): + shotgrid_settings = modules_settings.get(self.name, dict()) + self.enabled = shotgrid_settings.get("enabled", False) + self.leecher_manager_url = shotgrid_settings.get( + "leecher_manager_url", "" + ) + + def connect_with_modules(self, enabled_modules): + pass + + def get_global_environments(self): + return {"PROJECT_ID": self.project_id} + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(SHOTGRID_MODULE_DIR, "plugins", "publish") + ] + } + + def get_launch_hook_paths(self): + return os.path.join(SHOTGRID_MODULE_DIR, "hooks") + + def tray_init(self): + from .tray.shotgrid_tray import ShotgridTrayWrapper + + self.tray_wrapper = ShotgridTrayWrapper(self) + + def tray_start(self): + return self.tray_wrapper.validate() + + def tray_exit(self, *args, **kwargs): + return self.tray_wrapper + + def tray_menu(self, tray_menu): + return self.tray_wrapper.tray_menu(tray_menu) diff --git a/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py new file mode 100644 index 0000000000..1f78cf77c9 --- /dev/null +++ b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py @@ -0,0 +1,34 @@ +import pytest +from assertpy import assert_that + +import openpype.modules.shotgrid.lib.credentials as sut + + +def test_missing_shotgrid_url(): + with pytest.raises(Exception) as ex: + # arrange + url = "" + # act + sut.get_shotgrid_hostname(url) + # assert + assert_that(ex).is_equal_to("Shotgrid url cannot be a null") + + +def test_full_shotgrid_url(): + # arrange + url = "https://shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") + + +def test_incomplete_shotgrid_url(): + # arrange + url = "shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py new file mode 100644 index 0000000000..9d841d98be --- /dev/null +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -0,0 +1,201 @@ +import os +from Qt import QtCore, QtWidgets, QtGui + +from openpype import style +from openpype import resources +from openpype.modules.shotgrid.lib import settings, credentials + + +class CredentialsDialog(QtWidgets.QDialog): + SIZE_W = 450 + SIZE_H = 200 + + _module = None + _is_logged = False + url_label = None + login_label = None + password_label = None + url_input = None + login_input = None + password_input = None + input_layout = None + login_button = None + buttons_layout = None + main_widget = None + + login_changed = QtCore.Signal() + + def __init__(self, module, parent=None): + super(CredentialsDialog, self).__init__(parent) + + self._module = module + self._is_logged = False + + self.setWindowTitle("OpenPype - Shotgrid Login") + + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) + + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint + ) + self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) + self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100)) + self.setStyleSheet(style.load_stylesheet()) + + self.ui_init() + + def ui_init(self): + self.url_label = QtWidgets.QLabel("Shotgrid server:") + self.login_label = QtWidgets.QLabel("Login:") + self.password_label = QtWidgets.QLabel("Password:") + + self.url_input = QtWidgets.QComboBox() + # self.url_input.setReadOnly(True) + + self.login_input = QtWidgets.QLineEdit() + self.login_input.setPlaceholderText("login") + + self.password_input = QtWidgets.QLineEdit() + self.password_input.setPlaceholderText("password") + self.password_input.setEchoMode(QtWidgets.QLineEdit.Password) + + self.error_label = QtWidgets.QLabel("") + self.error_label.setStyleSheet("color: red;") + self.error_label.setWordWrap(True) + self.error_label.hide() + + self.input_layout = QtWidgets.QFormLayout() + self.input_layout.setContentsMargins(10, 15, 10, 5) + + self.input_layout.addRow(self.url_label, self.url_input) + self.input_layout.addRow(self.login_label, self.login_input) + self.input_layout.addRow(self.password_label, self.password_input) + self.input_layout.addRow(self.error_label) + + self.login_button = QtWidgets.QPushButton("Login") + self.login_button.setToolTip("Log in shotgrid instance") + self.login_button.clicked.connect(self._on_shotgrid_login_clicked) + + self.logout_button = QtWidgets.QPushButton("Logout") + self.logout_button.setToolTip("Log out shotgrid instance") + self.logout_button.clicked.connect(self._on_shotgrid_logout_clicked) + + self.buttons_layout = QtWidgets.QHBoxLayout() + self.buttons_layout.addWidget(self.logout_button) + self.buttons_layout.addWidget(self.login_button) + + self.main_widget = QtWidgets.QVBoxLayout(self) + self.main_widget.addLayout(self.input_layout) + self.main_widget.addLayout(self.buttons_layout) + self.setLayout(self.main_widget) + + def show(self, *args, **kwargs): + super(CredentialsDialog, self).show(*args, **kwargs) + self._fill_shotgrid_url() + self._fill_shotgrid_login() + + def _fill_shotgrid_url(self): + servers = settings.get_shotgrid_servers() + + if servers: + for _, v in servers.items(): + self.url_input.addItem("{}".format(v.get('shotgrid_url'))) + self._valid_input(self.url_input) + self.login_button.show() + self.logout_button.show() + enabled = True + else: + self.set_error("Ask your admin to add shotgrid server in settings") + self._invalid_input(self.url_input) + self.login_button.hide() + self.logout_button.hide() + enabled = False + + self.login_input.setEnabled(enabled) + self.password_input.setEnabled(enabled) + + def _fill_shotgrid_login(self): + login = credentials.get_local_login() + + if login: + self.login_input.setText(login) + + def _clear_shotgrid_login(self): + self.login_input.setText("") + self.password_input.setText("") + + def _on_shotgrid_login_clicked(self): + login = self.login_input.text().strip() + password = self.password_input.text().strip() + missing = [] + + if login == "": + missing.append("login") + self._invalid_input(self.login_input) + + if password == "": + missing.append("password") + self._invalid_input(self.password_input) + + url = self.url_input.currentText() + if url == "": + missing.append("url") + self._invalid_input(self.url_input) + + if len(missing) > 0: + self.set_error("You didn't enter {}".format(" and ".join(missing))) + return + + # if credentials.check_credentials( + # login=login, + # password=password, + # shotgrid_url=url, + # ): + credentials.save_local_login( + login=login + ) + os.environ['OPENPYPE_SG_USER'] = login + self._on_login() + + self.set_error("CANT LOGIN") + + def _on_shotgrid_logout_clicked(self): + credentials.clear_local_login() + del os.environ['OPENPYPE_SG_USER'] + self._clear_shotgrid_login() + self._on_logout() + + def set_error(self, msg): + self.error_label.setText(msg) + self.error_label.show() + + def _on_login(self): + self._is_logged = True + self.login_changed.emit() + self._close_widget() + + def _on_logout(self): + self._is_logged = False + self.login_changed.emit() + + def _close_widget(self): + self.hide() + + def _valid_input(self, input_widget): + input_widget.setStyleSheet("") + + def _invalid_input(self, input_widget): + input_widget.setStyleSheet("border: 1px solid red;") + + def login_with_credentials( + self, url, login, password + ): + verification = credentials.check_credentials(url, login, password) + if verification: + credentials.save_credentials(login, password, False) + self._module.set_credentials_to_env(login, password) + self.set_credentials(login, password) + self.login_changed.emit() + return verification diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py new file mode 100644 index 0000000000..4038d77b03 --- /dev/null +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -0,0 +1,75 @@ +import os +import webbrowser + +from Qt import QtWidgets + +from openpype.modules.shotgrid.lib import credentials +from openpype.modules.shotgrid.tray.credential_dialog import ( + CredentialsDialog, +) + + +class ShotgridTrayWrapper: + module = None + credentials_dialog = None + logged_user_label = None + + def __init__(self, module): + self.module = module + self.credentials_dialog = CredentialsDialog(module) + self.credentials_dialog.login_changed.connect(self.set_login_label) + self.logged_user_label = QtWidgets.QAction("") + self.logged_user_label.setDisabled(True) + self.set_login_label() + + def show_batch_dialog(self): + if self.module.leecher_manager_url: + webbrowser.open(self.module.leecher_manager_url) + + def show_connect_dialog(self): + self.show_credential_dialog() + + def show_credential_dialog(self): + self.credentials_dialog.show() + self.credentials_dialog.activateWindow() + self.credentials_dialog.raise_() + + def set_login_label(self): + login = credentials.get_local_login() + if login: + self.logged_user_label.setText("{}".format(login)) + else: + self.logged_user_label.setText( + "No User logged in {0}".format(login) + ) + + def tray_menu(self, tray_menu): + # Add login to user menu + menu = QtWidgets.QMenu("Shotgrid", tray_menu) + show_connect_action = QtWidgets.QAction("Connect to Shotgrid", menu) + show_connect_action.triggered.connect(self.show_connect_dialog) + menu.addAction(self.logged_user_label) + menu.addSeparator() + menu.addAction(show_connect_action) + tray_menu.addMenu(menu) + + # Add manager to Admin menu + for m in tray_menu.findChildren(QtWidgets.QMenu): + if m.title() == "Admin": + shotgrid_manager_action = QtWidgets.QAction( + "Shotgrid manager", menu + ) + shotgrid_manager_action.triggered.connect( + self.show_batch_dialog + ) + m.addAction(shotgrid_manager_action) + + def validate(self): + login = credentials.get_local_login() + + if not login: + self.show_credential_dialog() + else: + os.environ["OPENPYPE_SG_USER"] = login + + return True diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 10bde7d4c0..c3b288f0cd 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -4,8 +4,8 @@ import pyblish.api import copy from datetime import datetime +from openpype.client import OpenPypeMongoConnection from openpype.lib.plugin_tools import prepare_template_data -from openpype.lib import OpenPypeMongoConnection class IntegrateSlackAPI(pyblish.api.InstancePlugin): diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 9b2976d766..499c1c19ce 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,14 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype_interfaces import ( - IPluginPaths, - ILaunchHookPaths -) +from openpype.modules.interfaces import IPluginPaths SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" @@ -18,7 +15,8 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): self.enabled = slack_settings["enabled"] def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(SLACK_MODULE_DIR, "launch_hooks") def get_plugin_paths(self): diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/standalonepublish_action.py deleted file mode 100644 index ba53ce9b9e..0000000000 --- a/openpype/modules/standalonepublish_action.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -import platform -import subprocess -from openpype.lib import get_openpype_execute_args -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction - - -class StandAlonePublishAction(OpenPypeModule, ITrayAction): - label = "Publish" - name = "standalonepublish_tool" - - def initialize(self, modules_settings): - import openpype - self.enabled = modules_settings[self.name]["enabled"] - self.publish_paths = [ - os.path.join( - openpype.PACKAGE_DIR, - "hosts", - "standalonepublisher", - "plugins", - "publish" - ) - ] - - def tray_init(self): - return - - def on_action_trigger(self): - self.run_standalone_publisher() - - def connect_with_modules(self, enabled_modules): - """Collect publish paths from other modules.""" - publish_paths = self.manager.collect_plugin_paths()["publish"] - self.publish_paths.extend(publish_paths) - - def run_standalone_publisher(self): - args = get_openpype_execute_args("standalonepublisher") - kwargs = {} - if platform.system().lower() == "darwin": - new_args = ["open", "-na", args.pop(0), "--args"] - new_args.extend(args) - args = new_args - - detached_process = getattr(subprocess, "DETACHED_PROCESS", None) - if detached_process is not None: - kwargs["creationflags"] = detached_process - - subprocess.Popen(args, **kwargs) diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 688a17f14f..e11a8ba71e 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -1,8 +1,8 @@ import abc import six -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") @six.add_metaclass(abc.ABCMeta) @@ -10,6 +10,8 @@ class AbstractProvider: CODE = '' LABEL = '' + _log = None + def __init__(self, project_name, site_name, tree=None, presets=None): self.presets = None self.active = False @@ -19,6 +21,12 @@ class AbstractProvider: super(AbstractProvider, self).__init__() + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @abc.abstractmethod def is_active(self): """ @@ -62,7 +70,7 @@ class AbstractProvider: @abc.abstractmethod def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -75,7 +83,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): name of project_name file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -87,7 +95,7 @@ class AbstractProvider: @abc.abstractmethod def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -99,7 +107,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -199,11 +207,11 @@ class AbstractProvider: path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) except IndexError: msg = "Path {} contains unfillable placeholder" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index dfc42fed75..e026ae7ef6 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -2,12 +2,9 @@ import os import dropbox -from openpype.api import Logger from .abstract_provider import AbstractProvider from ..utils import EditableScopes -log = Logger().get_logger("SyncServer") - class DropboxHandler(AbstractProvider): CODE = 'dropbox' @@ -20,26 +17,26 @@ class DropboxHandler(AbstractProvider): self.dbx = None if not self.presets: - log.info( + self.log.info( "Sync Server: There are no presets for {}.".format(site_name) ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". + self.log.debug("Sync Server: Site {} not enabled for {}.". format(site_name, project_name)) return token = self.presets.get("token", "") if not token: msg = "Sync Server: No access token for dropbox provider" - log.info(msg) + self.log.info(msg) return team_folder_name = self.presets.get("team_folder_name", "") if not team_folder_name: msg = "Sync Server: No team folder name for dropbox provider" - log.info(msg) + self.log.info(msg) return acting_as_member = self.presets.get("acting_as_member", "") @@ -47,7 +44,7 @@ class DropboxHandler(AbstractProvider): msg = ( "Sync Server: No acting member for dropbox provider" ) - log.info(msg) + self.log.info(msg) return try: @@ -55,7 +52,7 @@ class DropboxHandler(AbstractProvider): token, acting_as_member, team_folder_name ) except Exception as e: - log.info("Could not establish dropbox object: {}".format(e)) + self.log.info("Could not establish dropbox object: {}".format(e)) return super(AbstractProvider, self).__init__() @@ -224,7 +221,7 @@ class DropboxHandler(AbstractProvider): return False def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -237,7 +234,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -290,7 +287,7 @@ class DropboxHandler(AbstractProvider): cursor.offset = f.tell() server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -301,7 +298,7 @@ class DropboxHandler(AbstractProvider): return path def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -313,7 +310,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -337,7 +334,7 @@ class DropboxHandler(AbstractProvider): self.dbx.files_download_to_file(local_path, source_path) server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -448,7 +445,7 @@ class DropboxHandler(AbstractProvider): path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index aa7329b104..9a3ce89cf5 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -5,12 +5,12 @@ import sys import six import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider from ..utils import time_function, ResumableError -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("GDriveHandler") try: from googleapiclient.discovery import build @@ -69,13 +69,17 @@ class GDriveHandler(AbstractProvider): self.presets = presets if not self.presets: - log.info("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.info( + "Sync Server: There are no presets for {}.".format(site_name) + ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". - format(site_name, project_name)) + self.log.debug( + "Sync Server: Site {} not enabled for {}.".format( + site_name, project_name + ) + ) return current_platform = platform.system().lower() @@ -85,20 +89,22 @@ class GDriveHandler(AbstractProvider): if not cred_path: msg = "Sync Server: Please, fill the credentials for gdrive "\ "provider for platform '{}' !".format(current_platform) - log.info(msg) + self.log.info(msg) return try: cred_path = cred_path.format(**os.environ) except KeyError as e: - log.info("Sync Server: The key(s) {} does not exist in the " - "environment variables".format(" ".join(e.args))) + self.log.info(( + "Sync Server: The key(s) {} does not exist in the " + "environment variables" + ).format(" ".join(e.args))) return if not os.path.exists(cred_path): msg = "Sync Server: No credentials for gdrive provider " + \ "for '{}' on path '{}'!".format(site_name, cred_path) - log.info(msg) + self.log.info(msg) return self.service = None @@ -251,7 +257,7 @@ class GDriveHandler(AbstractProvider): return folder_id def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -264,7 +270,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -318,22 +324,22 @@ class GDriveHandler(AbstractProvider): fields='id') media.stream() - log.debug("Start Upload! {}".format(source_path)) + self.log.debug("Start Upload! {}".format(source_path)) last_tick = status = response = None status_val = 0 while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Uploaded %d%%." % + self.log.debug("Uploaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -350,15 +356,16 @@ class GDriveHandler(AbstractProvider): if 'has not granted' in ex._get_reason().strip(): raise PermissionError(ex._get_reason().strip()) - log.warning("Forbidden received, hit quota. " - "Injecting 60s delay.") + self.log.warning( + "Forbidden received, hit quota. Injecting 60s delay." + ) time.sleep(60) return False raise return response['id'] def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'local_path'. @@ -372,7 +379,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -410,16 +417,16 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Downloaded %d%%." % + self.log.debug("Downloaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -629,9 +636,9 @@ class GDriveHandler(AbstractProvider): ["gdrive"] ) except KeyError: - log.info(("Sync Server: There are no presets for Gdrive " + - "provider."). - format(str(provider_presets))) + log.info(( + "Sync Server: There are no presets for Gdrive provider." + ).format(str(provider_presets))) return return provider_presets @@ -704,7 +711,7 @@ class GDriveHandler(AbstractProvider): roots[self.MY_DRIVE_STR] = self.service.files() \ .get(fileId='root').execute() except errors.HttpError: - log.warning("HttpError in sync loop, " + self.log.warning("HttpError in sync loop, " "trying next loop", exc_info=True) raise ResumableError @@ -727,7 +734,7 @@ class GDriveHandler(AbstractProvider): Returns: (dictionary) path as a key, folder id as a value """ - log.debug("build_tree len {}".format(len(folders))) + self.log.debug("build_tree len {}".format(len(folders))) if not self.root: # build only when necessary, could be expensive self.root = self._prepare_root_info() @@ -779,9 +786,9 @@ class GDriveHandler(AbstractProvider): loop_cnt += 1 if len(no_parents_yet) > 0: - log.debug("Some folders path are not resolved {}". + self.log.debug("Some folders path are not resolved {}". format(no_parents_yet)) - log.debug("Remove deleted folders from trash.") + self.log.debug("Remove deleted folders from trash.") return tree diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 172cb338cf..01bc891d08 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider): return editable def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False, direction="Upload"): """ Copies file from 'source_path' to 'target_path' @@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider): thread = threading.Thread(target=self._copy, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, direction) else: if os.path.exists(target_path): @@ -105,13 +105,14 @@ class LocalDriveHandler(AbstractProvider): return os.path.basename(target_path) def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, + representation, site, overwrite, direction="Download") def delete_file(self, path): @@ -188,7 +189,7 @@ class LocalDriveHandler(AbstractProvider): except shutil.SameFileError: print("same files, skipping") - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -204,7 +205,7 @@ class LocalDriveHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 49b87b14ec..40f11cb9dd 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -4,10 +4,10 @@ import time import threading import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer-SFTPHandler") pysftp = None try: @@ -43,8 +43,9 @@ class SFTPHandler(AbstractProvider): self.presets = presets if not self.presets: - log.warning("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.warning( + "Sync Server: There are no presets for {}.".format(site_name) + ) return # store to instance for reconnect @@ -222,7 +223,7 @@ class SFTPHandler(AbstractProvider): return os.path.basename(path) def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -235,7 +236,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -256,7 +257,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._upload, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "upload") return os.path.basename(target_path) @@ -267,7 +268,7 @@ class SFTPHandler(AbstractProvider): conn.put(source_path, target_path) def download_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'target_path'. @@ -281,7 +282,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -302,7 +303,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._download, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "download") return os.path.basename(target_path) @@ -423,9 +424,9 @@ class SFTPHandler(AbstractProvider): return pysftp.Connection(**conn_params) except (paramiko.ssh_exception.SSHException, pysftp.exceptions.ConnectionException): - log.warning("Couldn't connect", exc_info=True) + self.log.warning("Couldn't connect", exc_info=True) - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -445,8 +446,8 @@ class SFTPHandler(AbstractProvider): time.time() - last_tick >= server.LOG_PROGRESS_SEC: status_val = target_file_size / source_file_size last_tick = time.time() - log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + self.log.debug(direction + "ed %d%%." % int(status_val * 100)) + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 356a75f99d..8b11055e65 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -6,15 +6,12 @@ import concurrent.futures from concurrent.futures._base import CancelledError from .providers import lib -from openpype.lib import PypeLogger +from openpype.lib import Logger from .utils import SyncStatus, ResumableError -log = PypeLogger().get_logger("SyncServer") - - -async def upload(module, collection, file, representation, provider_name, +async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Upload single 'file' of a 'representation' to 'provider'. @@ -31,7 +28,7 @@ async def upload(module, collection, file, representation, provider_name, Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source db file (dictionary): of file from representation in Mongo representation (dictionary): of representation provider_name (string): gdrive, gdc etc. @@ -47,15 +44,16 @@ async def upload(module, collection, file, representation, provider_name, # thread can do that at a time, upload/download to prepared # structure should be run in parallel remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") try: - local_file_path, remote_file_path = resolve_paths(module, - file_path, collection, remote_site_name, remote_handler + local_file_path, remote_file_path = resolve_paths( + module, file_path, project_name, + remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -74,27 +72,28 @@ async def upload(module, collection, file, representation, provider_name, local_file_path, remote_file_path, module, - collection, + project_name, file, representation, remote_site_name, True ) - module.handle_alternate_site(collection, representation, remote_site_name, + module.handle_alternate_site(project_name, representation, + remote_site_name, file["_id"], file_id) return file_id -async def download(module, collection, file, representation, provider_name, +async def download(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Downloads file to local folder denoted in representation.Context. Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source file (dictionary) : info about processed file representation (dictionary): repr that 'file' belongs to provider_name (string): 'gdrive' etc @@ -108,20 +107,20 @@ async def download(module, collection, file, representation, provider_name, """ with module.lock: remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") local_file_path, remote_file_path = resolve_paths( - module, file_path, collection, remote_site_name, remote_handler + module, file_path, project_name, remote_site_name, remote_handler ) local_folder = os.path.dirname(local_file_path) os.makedirs(local_folder, exist_ok=True) - local_site = module.get_active_site(collection) + local_site = module.get_active_site(project_name) loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, @@ -129,20 +128,20 @@ async def download(module, collection, file, representation, provider_name, remote_file_path, local_file_path, module, - collection, + project_name, file, representation, local_site, True ) - module.handle_alternate_site(collection, representation, local_site, + module.handle_alternate_site(project_name, representation, local_site, file["_id"], file_id) return file_id -def resolve_paths(module, file_path, collection, +def resolve_paths(module, file_path, project_name, remote_site_name=None, remote_handler=None): """ Returns tuple of local and remote file paths with {root} @@ -153,7 +152,7 @@ def resolve_paths(module, file_path, collection, Args: module(SyncServerModule): object to run SyncServerModule API file_path(string): path with {root} - collection(string): project name + project_name(string): project name remote_site_name(string): remote site remote_handler(AbstractProvider): implementation Returns: @@ -164,7 +163,7 @@ def resolve_paths(module, file_path, collection, remote_file_path = remote_handler.resolve_path(file_path) local_handler = lib.factory.get_provider( - 'local_drive', collection, module.get_active_site(collection)) + 'local_drive', project_name, module.get_active_site(project_name)) local_file_path = local_handler.resolve_path(file_path) return local_file_path, remote_file_path @@ -236,6 +235,7 @@ class SyncServerThread(threading.Thread): Stopped when tray is closed. """ def __init__(self, module): + self.log = Logger.get_logger(self.__class__.__name__) super(SyncServerThread, self).__init__() self.module = module self.loop = None @@ -247,17 +247,17 @@ class SyncServerThread(threading.Thread): self.is_running = True try: - log.info("Starting Sync Server") + self.log.info("Starting Sync Server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.set_default_executor(self.executor) asyncio.ensure_future(self.check_shutdown(), loop=self.loop) asyncio.ensure_future(self.sync_loop(), loop=self.loop) - log.info("Sync Server Started") + self.log.info("Sync Server Started") self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Sync Server service has failed", exc_info=True ) finally: @@ -269,8 +269,8 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each collection it looks for representations that should - be synced + - for each project_name it looks for representations that + should be synced - synchronize found collections - update representations - fills error messages for exceptions - waits X seconds and repeat @@ -282,17 +282,17 @@ class SyncServerThread(threading.Thread): import time start_time = time.time() self.module.set_sync_project_settings() # clean cache - collection = None + project_name = None enabled_projects = self.module.get_enabled_projects() - for collection in enabled_projects: - preset = self.module.sync_project_settings[collection] + for project_name in enabled_projects: + preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(collection) + local_site, remote_site = self._working_sites(project_name) if not all([local_site, remote_site]): continue sync_repres = self.module.get_sync_representations( - collection, + project_name, local_site, remote_site ) @@ -310,7 +310,7 @@ class SyncServerThread(threading.Thread): remote_provider = \ self.module.get_provider_for_site(site=remote_site) handler = lib.factory.get_provider(remote_provider, - collection, + project_name, remote_site, presets=site_preset) limit = lib.factory.get_provider_batch_limit( @@ -341,7 +341,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( upload(self.module, - collection, + project_name, file, sync, remote_provider, @@ -353,7 +353,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, remote_site, - collection + project_name )) processed_file_path.add(file_path) if status == SyncStatus.DO_DOWNLOAD: @@ -361,7 +361,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( download(self.module, - collection, + project_name, file, sync, remote_provider, @@ -373,23 +373,24 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, local_site, - collection + project_name )) processed_file_path.add(file_path) - log.debug("Sync tasks count {}". - format(len(task_files_to_process))) + self.log.debug("Sync tasks count {}".format( + len(task_files_to_process) + )) files_created = await asyncio.gather( *task_files_to_process, return_exceptions=True) for file_id, info in zip(files_created, files_processed_info): - file, representation, site, collection = info + file, representation, site, project_name = info error = None if isinstance(file_id, BaseException): error = str(file_id) file_id = None - self.module.update_db(collection, + self.module.update_db(project_name, file_id, file, representation, @@ -397,28 +398,31 @@ class SyncServerThread(threading.Thread): error) duration = time.time() - start_time - log.debug("One loop took {:.2f}s".format(duration)) + self.log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(collection) - log.debug("Waiting for {} seconds to new loop".format(delay)) + delay = self.module.get_loop_delay(project_name) + self.log.debug( + "Waiting for {} seconds to new loop".format(delay) + ) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) except ConnectionResetError: - log.warning("ConnectionResetError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ConnectionResetError in sync loop, trying next loop", + exc_info=True) except CancelledError: # just stopping server pass except ResumableError: - log.warning("ResumableError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ResumableError in sync loop, trying next loop", + exc_info=True) except Exception: self.stop() - log.warning("Unhandled except. in sync loop, stopping server", - exc_info=True) + self.log.warning( + "Unhandled except. in sync loop, stopping server", + exc_info=True) def stop(self): """Sets is_running flag to false, 'check_shutdown' shuts server down""" @@ -431,16 +435,17 @@ class SyncServerThread(threading.Thread): while self.is_running: if self.module.long_running_tasks: task = self.module.long_running_tasks.pop() - log.info("starting long running") + self.log.info("starting long running") await self.loop.run_in_executor(None, task["func"]) - log.info("finished long running") + self.log.info("finished long running") self.module.projects_processed.remove(task["project_name"]) await asyncio.sleep(0.5) tasks = [task for task in asyncio.all_tasks() if task is not asyncio.current_task()] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...') await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop self.executor.shutdown(wait=True) @@ -453,29 +458,32 @@ class SyncServerThread(threading.Thread): def reset_timer(self): """Called when waiting for next loop should be skipped""" - log.debug("Resetting timer") + self.log.debug("Resetting timer") if self.timer: self.timer.cancel() self.timer = None - def _working_sites(self, collection): - if self.module.is_project_paused(collection): - log.debug("Both sites same, skipping") + def _working_sites(self, project_name): + if self.module.is_project_paused(project_name): + self.log.debug("Both sites same, skipping") return None, None - local_site = self.module.get_active_site(collection) - remote_site = self.module.get_remote_site(collection) + local_site = self.module.get_active_site(project_name) + remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: - log.debug("{}-{} sites same, skipping".format(local_site, - remote_site)) + self.log.debug("{}-{} sites same, skipping".format( + local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, collection) + configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): - log.debug("Some of the sites {} - {} is not ".format(local_site, - remote_site) + - "working properly") + self.log.debug( + "Some of the sites {} - {} is not working properly".format( + local_site, remote_site + ) + ) + return None, None return local_site, remote_site diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 4027561d22..634b68c55f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -6,14 +6,14 @@ import platform import copy from collections import deque, defaultdict - +from openpype.client import get_projects from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule from openpype.settings import ( get_project_settings, get_system_settings, ) -from openpype.lib import PypeLogger, get_local_site_id +from openpype.lib import Logger, get_local_site_id from openpype.pipeline import AvalonMongoDB, Anatomy from openpype.settings.lib import ( get_default_anatomy_settings, @@ -25,8 +25,10 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError +from openpype.client import get_representations, get_representation_by_id -log = PypeLogger.get_logger("SyncServer") + +log = Logger.get_logger("SyncServer") class SyncServerModule(OpenPypeModule, ITrayModule): @@ -128,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.projects_processed = set() """ Start of Public API """ - def add_site(self, collection, representation_id, site_name=None, + def add_site(self, project_name, representation_id, site_name=None, force=False): """ Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or + 'project_name' must have synchronization enabled (globally or project only) Used as a API endpoint from outside applications (Loader etc). @@ -141,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to reset existing site. Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists @@ -151,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") if not site_name: site_name = self.DEFAULT_SITE - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, force=force) - def remove_site(self, collection, representation_id, site_name, + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ Removes 'site_name' for particular 'representation_id' on - 'collection' + 'project_name' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site remove_local_files (bool): remove only files for 'local_id' @@ -178,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: throws ValueError if any issue """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, remove=True) if remove_local_files: - self._remove_local_file(collection, representation_id, site_name) + self._remove_local_file(project_name, representation_id, site_name) def compute_resource_sync_sites(self, project_name): """Get available resource sync sites state for publish process. @@ -333,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return alt_site_pairs - def clear_project(self, collection, site_name): + def clear_project(self, project_name, site_name): """ - Clear 'collection' of 'site_name' and its local files + Clear 'project_name' of 'site_name' and its local files Works only on real local sites, not on 'studio' """ @@ -344,16 +346,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "files.sites.name": site_name } + # TODO currently not possible to replace with get_representations representations = list( - self.connection.database[collection].find(query)) + self.connection.database[project_name].find(query)) if not representations: self.log.debug("No repre found") return for repre in representations: - self.remove_site(collection, repre.get("_id"), site_name, True) + self.remove_site(project_name, repre.get("_id"), site_name, True) - def create_validate_project_task(self, collection, site_name): + def create_validate_project_task(self, project_name, site_name): """Adds metadata about project files validation on a queue. This process will loop through all representation and check if @@ -370,33 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ task = { "type": "validate", - "project_name": collection, - "func": lambda: self.validate_project(collection, site_name, + "project_name": project_name, + "func": lambda: self.validate_project(project_name, site_name, reset_missing=True) } - self.projects_processed.add(collection) + self.projects_processed.add(project_name) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, reset_missing=False): - """Validate 'collection' of 'site_name' and its local files + def validate_project(self, project_name, site_name, reset_missing=False): + """Validate 'project_name' of 'site_name' and its local files If file present and not marked with a 'site_name' in DB, DB is updated with site name and file modified date. Args: - collection (string): project name + project_name (string): project name site_name (string): active site name reset_missing (bool): if True reset site in DB if missing physically """ - self.log.debug("Validation of {} for {} started".format(collection, + self.log.debug("Validation of {} for {} started".format(project_name, site_name)) - query = { - "type": "representation" - } - - representations = list( - self.connection.database[collection].find(query)) + representations = list(get_representations(project_name)) if not representations: self.log.debug("No repre found") return @@ -416,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): continue file_path = repre_file.get("path", "") - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file_path) @@ -428,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "Adding site {} for {}".format(site_name, repre_id)) - query = { - "_id": repre_id - } created_dt = datetime.fromtimestamp( os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, repre, elem, + self._add_site(project_name, repre, elem, site_name=site_name, file_id=repre_file["_id"], force=True) @@ -445,51 +440,52 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) self.reset_site_on_representation( - collection, repre_id, site_name=site_name, + project_name, repre_id, site_name=site_name, file_id=repre_file["_id"]) sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) - self.log.debug("Validation of {} for {} ended".format(collection, + self.log.debug("Validation of {} for {} ended".format(project_name, site_name)) self.log.info("Sites added {}, sites reset {}".format(sites_added, reset_missing)) - def pause_representation(self, collection, representation_id, site_name): + def pause_representation(self, project_name, representation_id, site_name): """ Sets 'representation_id' as paused, eg. no syncing should be happening on it. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Pausing SyncServer for {}".format(representation_id)) + self.log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) - def unpause_representation(self, collection, representation_id, site_name): + def unpause_representation(self, project_name, + representation_id, site_name): """ Sets 'representation_id' as unpaused. Does not fail or warn if repre wasn't paused. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Unpausing SyncServer for {}".format(representation_id)) + self.log.info("Unpausing SyncServer for {}".format(representation_id)) try: self._paused_representations.remove(representation_id) except KeyError: pass # self.paused_representations is not persistent - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=False) def is_representation_paused(self, representation_id, @@ -520,9 +516,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): happening on all representation inside. Args: - project_name (string): collection name + project_name (string): project_name name """ - log.info("Pausing SyncServer for {}".format(project_name)) + self.log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) def unpause_project(self, project_name): @@ -532,9 +528,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Does not fail or warn if project wasn't paused. Args: - project_name (string): collection name + project_name (string): """ - log.info("Unpausing SyncServer for {}".format(project_name)) + self.log.info("Unpausing SyncServer for {}".format(project_name)) try: self._paused_projects.remove(project_name) except KeyError: @@ -545,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns if 'project_name' is paused or not. Args: - project_name (string): collection name + project_name (string): check_parents (bool): check if server itself is not paused Returns: @@ -562,14 +558,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): It won't check anything, not uploading/downloading... """ - log.info("Pausing SyncServer") + self.log.info("Pausing SyncServer") self._paused = True def unpause_server(self): """ Unpause server """ - log.info("Unpausing SyncServer") + self.log.info("Unpausing SyncServer") self._paused = False def is_paused(self): @@ -880,7 +876,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # val = val[platform.system().lower()] # except KeyError: # st = "{}'s field value {} should be".format(key, val) # noqa: E501 - # log.error(st + " multiplatform dict") + # self.log.error(st + " multiplatform dict") # # item["namespace"] = item["namespace"].replace('{site}', # site_name) @@ -917,7 +913,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): enabled_projects = [] if self.enabled: - for project in self.connection.projects(projection={"name": 1}): + for project in get_projects(fields=["name"]): project_name = project["name"] if self.is_project_enabled(project_name): enabled_projects.append(project_name) @@ -944,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return True return False - def handle_alternate_site(self, collection, representation, processed_site, - file_id, synced_file_id): + def handle_alternate_site(self, project_name, representation, + processed_site, file_id, synced_file_id): """ For special use cases where one site vendors another. @@ -958,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): same location >> file is accesible on 'sftp' site right away. Args: - collection (str): name of project + project_name (str): name of project representation (dict) processed_site (str): real site_name of published/uploaded file file_id (ObjectId): DB id of file handled @@ -982,26 +978,112 @@ class SyncServerModule(OpenPypeModule, ITrayModule): alternate_sites = set(alternate_sites) for alt_site in alternate_sites: - query = { - "_id": representation["_id"] - } elem = {"name": alt_site, "created_dt": datetime.now(), "id": synced_file_id} self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) - self._add_site(collection, query, + self._add_site(project_name, representation, elem, alt_site, file_id=file_id, force=True) + def get_repre_info_for_versions(self, project_name, version_ids, + active_site, remote_site): + """Returns representation documents for versions and sites combi + + Args: + project_name (str) + version_ids (list): of version[_id] + active_site (string): 'local', 'studio' etc + remote_site (string): dtto + Returns: + + """ + self.connection.Session["AVALON_PROJECT"] = project_name + query = [ + {"$match": {"parent": {"$in": version_ids}, + "type": "representation", + "files.sites.name": {"$exists": 1}}}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_local': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', active_site]} + } + } + }}, + {'$addFields': { + 'order_remote': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', remote_site]} + } + } + }}, + {'$addFields': { + 'progress_local': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_local.progress"}, + "$order_local.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$addFields': { + 'progress_remote': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_remote.progress"}, + "$order_remote.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$group': { # first group by repre + '_id': '$_id', + 'parent': {'$first': '$parent'}, + 'avail_ratio_local': { + '$first': { + '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] + } + }, + 'avail_ratio_remote': { + '$first': { + '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] + } + } + }}, + {'$group': { # second group by parent, eg version_id + '_id': '$parent', + 'repre_count': {'$sum': 1}, # total representations + # fully available representation for site + 'avail_repre_local': {'$sum': "$avail_ratio_local"}, + 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, + }}, + ] + # docs = list(self.connection.aggregate(query)) + return self.connection.aggregate(query) + """ End of Public API """ - def get_local_file_path(self, collection, site_name, file_path): + def get_local_file_path(self, project_name, site_name, file_path): """ Externalized for app """ - handler = LocalDriveHandler(collection, site_name) + handler = LocalDriveHandler(project_name, site_name) local_file_path = handler.resolve_path(file_path) return local_file_path @@ -1066,7 +1148,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if self.enabled: self.sync_server_thread.start() else: - log.info("No presets or active providers. " + + self.log.info("No presets or active providers. " + "Synchronization not possible.") def tray_exit(self): @@ -1084,12 +1166,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if not self.is_running: return try: - log.info("Stopping sync server server") + self.log.info("Stopping sync server server") self.sync_server_thread.is_running = False self.sync_server_thread.stop() - log.info("Sync server stopped") + self.log.info("Sync server stopped") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing sync server", exc_info=True ) @@ -1160,10 +1242,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def _prepare_sync_project_settings(self, exclude_locals): sync_project_settings = {} system_sites = self.get_all_site_configs() - project_docs = self.connection.projects( - projection={"name": 1}, - only_active=True - ) + project_docs = get_projects(fields=["name"]) for project_doc in project_docs: project_name = project_doc["name"] sites = copy.deepcopy(system_sites) # get all configured sites @@ -1177,7 +1256,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): sync_project_settings[project_name] = proj_settings if not sync_project_settings: - log.info("No enabled and configured projects for sync.") + self.log.info("No enabled and configured projects for sync.") return sync_project_settings def get_sync_project_setting(self, project_name, exclude_locals=False, @@ -1288,7 +1367,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites.get(site, 'N/A') @time_function - def get_sync_representations(self, collection, active_site, remote_site): + def get_sync_representations(self, project_name, active_site, remote_site): """ Get representations that should be synced, these could be recognised by presence of document in 'files.sites', where key is @@ -1299,8 +1378,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): better performance. Goal is to get as few representations as possible. Args: - collection (string): name of collection (in most cases matches - project name + project_name (string): active_site (string): identifier of current active site (could be 'local_0' when working from home, 'studio' when working in the studio (default) @@ -1309,10 +1387,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(collection)) - self.connection.Session["AVALON_PROJECT"] = collection + self.log.debug("Check representations for : {}".format(project_name)) + self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up - retries_arr = self._get_retries_arr(collection) + retries_arr = self._get_retries_arr(project_name) match = { "type": "representation", "$or": [ @@ -1388,9 +1466,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): }}, {"$sort": {'priority': -1, '_id': 1}}, ] - log.debug("active_site:{} - remote_site:{}".format(active_site, - remote_site)) - log.debug("query: {}".format(aggr)) + self.log.debug("active_site:{} - remote_site:{}".format( + active_site, remote_site + )) + self.log.debug("query: {}".format(aggr)) representations = self.connection.aggregate(aggr) return representations @@ -1425,7 +1504,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if get_local_site_id() not in (local_site, remote_site): # don't do upload/download for studio sites - log.debug("No local site {} - {}".format(local_site, remote_site)) + self.log.debug( + "No local site {} - {}".format(local_site, remote_site) + ) return SyncStatus.DO_NOTHING _, remote_rec = self._get_site_rec(sites, remote_site) or {} @@ -1449,14 +1530,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return SyncStatus.DO_NOTHING - def update_db(self, collection, new_file_id, file, representation, + def update_db(self, project_name, new_file_id, file, representation, site, error=None, progress=None, priority=None): """ Update 'provider' portion of records in DB with success (file_id) or error (exception) Args: - collection (string): name of project - force to db connection as + project_name (string): name of project - force to db connection as each file might come from different collection new_file_id (string): file (dictionary): info about processed file (pulled from DB) @@ -1499,7 +1580,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if file_id: arr_filter.append({'f._id': ObjectId(file_id)}) - self.connection.database[collection].update_one( + self.connection.database[project_name].update_one( query, update, upsert=True, @@ -1516,11 +1597,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule): error_str = '' source_file = file.get("path", "") - log.debug("File for {} - {source_file} process {status} {error_str}". - format(representation_id, - status=status, - source_file=source_file, - error_str=error_str)) + self.log.debug( + ( + "File for {} - {source_file} process {status} {error_str}" + ).format( + representation_id, + status=status, + source_file=source_file, + error_str=error_str + ) + ) def _get_file_info(self, files, _id): """ @@ -1562,7 +1648,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return -1, None - def reset_site_on_representation(self, collection, representation_id, + def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, remove=False, pause=None, force=False): """ @@ -1579,7 +1665,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Should be used when repre should be synced to new site. Args: - collection (string): name of project (eg. collection) in DB + project_name (string): name of project (eg. collection) in DB representation_id(string): _id of representation file_id (string): file _id in representation side (string): local or remote side @@ -1593,20 +1679,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - query = { - "_id": ObjectId(representation_id) - } - - representation = self.connection.database[collection].find_one(query) + representation = get_representation_by_id(project_name, + representation_id) if not representation: raise ValueError("Representation {} not found in {}". - format(representation_id, collection)) + format(representation_id, project_name)) + if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") - local_site = self.get_active_site(collection) - remote_site = self.get_remote_site(collection) + local_site = self.get_active_site(project_name) + remote_site = self.get_remote_site(project_name) if side: if side == 'local': @@ -1617,37 +1701,43 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} if file_id: # reset site for particular file - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) elif side: # reset site for whole representation - self._reset_site(collection, query, elem, site_name) + self._reset_site(project_name, representation_id, elem, site_name) elif remove: # remove site for whole representation - self._remove_site(collection, query, representation, site_name) + self._remove_site(project_name, + representation, site_name) elif pause is not None: - self._pause_unpause_site(collection, query, + self._pause_unpause_site(project_name, representation, site_name, pause) else: # add new site to all files for representation - self._add_site(collection, query, representation, elem, site_name, + self._add_site(project_name, representation, elem, site_name, force=force) - def _update_site(self, collection, query, update, arr_filter): + def _update_site(self, project_name, representation_id, + update, arr_filter): """ Auxiliary method to call update_one function on DB Used for refactoring ugly reset_provider_for_file """ - self.connection.database[collection].update_one( + query = { + "_id": ObjectId(representation_id) + } + + self.connection.database[project_name].update_one( query, update, upsert=True, array_filters=arr_filter ) - def _reset_site_for_file(self, collection, query, + def _reset_site_for_file(self, project_name, representation_id, elem, file_id, site_name): """ Resets 'site_name' for 'file_id' on representation in 'query' on - 'collection' + 'project_name' """ update = { "$set": {"files.$[f].sites.$[s]": elem} @@ -1660,9 +1750,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _reset_site(self, collection, query, elem, site_name): + def _reset_site(self, project_name, representation_id, elem, site_name): """ Resets 'site_name' for all files of representation in 'query' """ @@ -1674,9 +1764,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _remove_site(self, collection, query, representation, site_name): + def _remove_site(self, project_name, representation, site_name): """ Removes 'site_name' for 'representation' in 'query' @@ -1690,7 +1780,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) update = { @@ -1698,10 +1788,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): } arr_filter = [] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _pause_unpause_site(self, collection, query, - representation, site_name, pause): + def _pause_unpause_site(self, project_name, representation, + site_name, pause): """ Pauses/unpauses all files for 'representation' based on 'pause' @@ -1716,7 +1807,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) if pause: @@ -1733,12 +1824,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _add_site(self, collection, query, representation, elem, site_name, + def _add_site(self, project_name, representation, elem, site_name, force=False, file_id=None): """ - Adds 'site_name' to 'representation' on 'collection' + Adds 'site_name' to 'representation' on 'project_name' Args: representation (dict) @@ -1746,10 +1838,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to remove existing or raises ValueError """ + representation_id = representation["_id"] reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation["_id"])) + self.log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1759,13 +1852,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: if force or site.get("error"): - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, + representation_id, elem, repre_file["_id"], site_name) reset_existing = True else: msg = "Site {} already present".format(site_name) - log.info(msg) + self.log.info(msg) raise SiteAlreadyPresentError(msg) if reset_existing: @@ -1785,14 +1879,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, + update, arr_filter) - def _remove_local_file(self, collection, representation_id, site_name): + def _remove_local_file(self, project_name, representation_id, site_name): """ Removes all local files for 'site_name' of 'representation_id' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site @@ -1808,21 +1903,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - query = { - "_id": ObjectId(representation_id) - } - - representation = list( - self.connection.database[collection].find(query)) + representation = get_representation_by_id(project_name, + representation_id, + fields=["files"]) if not representation: self.log.debug("No repre {} found".format( representation_id)) return - representation = representation.pop() local_file_path = '' for file in representation.get("files"): - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file.get("path", "") ) @@ -1868,16 +1959,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.widget = SyncServerWindow(self) no_errors = True except ValueError: - log.info("No system setting for sync. Not syncing.", exc_info=True) + self.log.info( + "No system setting for sync. Not syncing.", exc_info=True + ) except KeyError: - log.info(( + self.log.info(( "There are not set presets for SyncServer OR " "Credentials provided are invalid, " "no syncing possible"). format(str(self.sync_project_settings)), exc_info=True) except: - log.error("Uncaught exception durin start of SyncServer", - exc_info=True) + self.log.error( + "Uncaught exception durin start of SyncServer", + exc_info=True) self.enabled = no_errors self.widget.show() diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 96fad6a247..9b9768327e 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -2,7 +2,6 @@ from Qt import QtWidgets, QtCore, QtGui from openpype.tools.settings import style -from openpype.lib import PypeLogger from openpype import resources from .widgets import ( @@ -10,8 +9,6 @@ from .widgets import ( SyncRepresentationSummaryWidget ) -log = PypeLogger().get_logger("SyncServer") - class SyncServerWindow(QtWidgets.QDialog): """ diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 5ab809a816..988eb40d28 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -1,8 +1,7 @@ import os from Qt import QtCore, QtWidgets, QtGui -from openpype.lib import PypeLogger -from . import lib +from openpype.lib import Logger from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, @@ -16,7 +15,7 @@ from openpype.tools.utils.constants import ( EDIT_ICON_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class PriorityDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/sync_server/tray/lib.py index 87344be634..ff93815639 100644 --- a/openpype/modules/sync_server/tray/lib.py +++ b/openpype/modules/sync_server/tray/lib.py @@ -2,11 +2,6 @@ import attr import abc import six -from openpype.lib import PypeLogger - - -log = PypeLogger().get_logger("SyncServer") - STATUS = { 0: 'In Progress', 1: 'Queued', diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 6d1e85c17a..d63d046508 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -9,8 +9,8 @@ import qtawesome from openpype.tools.utils.delegates import pretty_timestamp -from openpype.lib import PypeLogger -from openpype.api import get_local_site_id +from openpype.lib import Logger, get_local_site_id +from openpype.client import get_representation_by_id from . import lib @@ -32,7 +32,7 @@ from openpype.tools.utils.constants import ( ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class _SyncRepresentationModel(QtCore.QAbstractTableModel): @@ -440,7 +440,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): full text filtering. Allows pagination, most of heavy lifting is being done on DB side. - Single model matches to single collection. When project is changed, + Single model matches to single project. When project is changed, model is reset and refreshed. Args: @@ -919,11 +919,10 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): repre_id = self.data(index, Qt.UserRole) - representation = list(self.dbcon.find({"type": "representation", - "_id": repre_id})) + representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, - representation.pop(), + representation, get_local_site_id(), priority=value) self.is_editing = False @@ -1357,11 +1356,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel): file_id = self.data(index, Qt.UserRole) updated_file = None - # conversion from cursor to list - representations = list(self.dbcon.find({"type": "representation", - "_id": self._id})) + representation = get_representation_by_id(self.project, self._id) + if not representation: + return - representation = representations.pop() for repre_file in representation["files"]: if repre_file["_id"] == file_id: updated_file = repre_file diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index b4ee447ac4..c40aa98f24 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -9,8 +9,7 @@ import qtawesome from openpype.tools.settings import style -from openpype.api import get_local_site_id -from openpype.lib import PypeLogger +from openpype.lib import Logger, get_local_site_id from openpype.tools.utils.delegates import pretty_timestamp @@ -36,7 +35,7 @@ from openpype.tools.utils.constants import ( TRIES_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class SyncProjectListWidget(QtWidgets.QWidget): diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 03f362202f..4caa01e9d7 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -1,6 +1,8 @@ import time -from openpype.api import Logger -log = Logger().get_logger("SyncServer") + +from openpype.lib import Logger + +log = Logger.get_logger("SyncServer") class ResumableError(Exception): diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py index 9ec27e659b..7242761143 100644 --- a/openpype/modules/timers_manager/idle_threads.py +++ b/openpype/modules/timers_manager/idle_threads.py @@ -2,7 +2,7 @@ import time from Qt import QtCore from pynput import mouse, keyboard -from openpype.lib import PypeLogger +from openpype.lib import Logger class IdleItem: @@ -31,7 +31,7 @@ class IdleManager(QtCore.QThread): def __init__(self): super(IdleManager, self).__init__() - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.signal_reset_timer.connect(self._reset_time) self.idle_item = IdleItem() diff --git a/openpype/modules/timers_manager/plugins/publish/start_timer.py b/openpype/modules/timers_manager/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6408327ca1 --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/start_timer.py @@ -0,0 +1,39 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + +import pyblish.api + +from openpype.pipeline import legacy_io + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + project_name = legacy_io.active_project() + asset_name = legacy_io.Session.get("AVALON_ASSET") + task_name = legacy_io.Session.get("AVALON_TASK") + if not project_name or not asset_name or not task_name: + self.log.info(( + "Current context does not contain all" + " required information to start a timer." + )) + return + timers_manager.start_timer_with_webserver( + project_name, asset_name, task_name, self.log + ) diff --git a/openpype/modules/timers_manager/plugins/publish/stop_timer.py b/openpype/modules/timers_manager/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..a8674ff2ca --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/stop_timer.py @@ -0,0 +1,27 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + + +import pyblish.api + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + timers_manager.stop_timer_with_webserver(self.log) diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index f16cb316c3..4a2e9e6575 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -1,9 +1,7 @@ import json from aiohttp.web_response import Response -from openpype.api import Logger - -log = Logger().get_logger("Event processor") +from openpype.lib import Logger class TimersManagerModuleRestApi: @@ -12,6 +10,7 @@ class TimersManagerModuleRestApi: happens in Workfile app. """ def __init__(self, user_module, server_manager): + self._log = None self.module = user_module self.server_manager = server_manager @@ -19,6 +18,12 @@ class TimersManagerModuleRestApi: self.register() + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__ckass__.__name__) + return self._log + def register(self): self.server_manager.add_route( "POST", @@ -47,7 +52,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name," " 'asset_name' and 'task_name'" ) - log.error(msg) + self.log.error(msg) return Response(status=400, message=msg) self.module.stop_timers() @@ -73,7 +78,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name, 'asset_name'," " 'task_name'" ) - log.warning(message) + self.log.warning(message) return Response(text=message, status=404) time = self.module.get_task_time(project_name, asset_name, task_name) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 3453e4bc4c..c168e9534d 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,12 +6,14 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths + IPluginPaths ) from openpype.lib.events import register_event_callback from .exceptions import InvalidContextError +TIMER_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + class ExampleTimersManagerConnector: """Timers manager can handle timers of multiple modules/addons. @@ -33,6 +35,7 @@ class ExampleTimersManagerConnector: } ``` """ + # Not needed at all def __init__(self, module): # Store timer manager module to be able call it's methods when needed @@ -72,7 +75,11 @@ class ExampleTimersManagerConnector: self._timers_manager_module.timer_stopped(self._module.id) -class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): +class TimersManager( + OpenPypeModule, + ITrayService, + IPluginPaths +): """ Handles about Timers. Should be able to start/stop all timers at once. @@ -176,11 +183,18 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): ) def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" - return os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "launch_hooks" - ) + """Implementation for applications launch hooks.""" + + return [ + os.path.join(TIMER_MODULE_DIR, "launch_hooks") + ] + + def get_plugin_paths(self): + """Implementation of `IPluginPaths`.""" + + return { + "publish": [os.path.join(TIMER_MODULE_DIR, "plugins", "publish")] + } @staticmethod def get_timer_data_for_context( @@ -388,6 +402,7 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): logger (logging.Logger): Logger object. Using 'print' if not passed. """ + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: msg = "Couldn't find webserver url" @@ -415,6 +430,36 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): return requests.post(rest_api_url, json=data) + @staticmethod + def stop_timer_with_webserver(logger=None): + """Prepared method for calling stop timers on REST api. + + Args: + logger (logging.Logger): Logger used for logging messages. + """ + + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + msg = "Couldn't find webserver url" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + try: + import requests + except Exception: + msg = "Couldn't start timer ('requests' is not available)" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + return requests.post(rest_api_url) + def on_host_install(self, host, host_name, project_name): self.log.debug("Installing task changed callback") register_event_callback("taskChanged", self._on_host_task_change) diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py index 82b681f406..120925a362 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/webserver/server.py @@ -4,16 +4,16 @@ import asyncio from aiohttp import web -from openpype.lib import PypeLogger +from openpype.lib import Logger from .cors_middleware import cors_middleware -log = PypeLogger.get_logger("WebServer") - class WebServerManager: """Manger that care about web server thread.""" def __init__(self, port=None, host=None): + self._log = None + self.port = port or 8079 self.host = host or "localhost" @@ -33,6 +33,12 @@ class WebServerManager: self.webserver_thread = WebServerThread(self) + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def url(self): return "http://{}:{}".format(self.host, self.port) @@ -51,12 +57,12 @@ class WebServerManager: if not self.is_running: return try: - log.debug("Stopping Web server") + self.log.debug("Stopping Web server") self.webserver_thread.is_running = False self.webserver_thread.stop() except Exception: - log.warning( + self.log.warning( "Error has happened during Killing Web server", exc_info=True ) @@ -74,7 +80,10 @@ class WebServerManager: class WebServerThread(threading.Thread): """ Listener for requests in thread.""" + def __init__(self, manager): + self._log = None + super(WebServerThread, self).__init__() self.is_running = False @@ -84,6 +93,12 @@ class WebServerThread(threading.Thread): self.site = None self.tasks = [] + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def port(self): return self.manager.port @@ -96,13 +111,13 @@ class WebServerThread(threading.Thread): self.is_running = True try: - log.info("Starting WebServer server") + self.log.info("Starting WebServer server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.run_until_complete(self.start_server()) - log.debug( + self.log.debug( "Running Web server on URL: \"localhost:{}\"".format(self.port) ) @@ -110,7 +125,7 @@ class WebServerThread(threading.Thread): self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Web Server service has failed", exc_info=True ) finally: @@ -118,7 +133,7 @@ class WebServerThread(threading.Thread): self.is_running = False self.manager.thread_stopped() - log.info("Web server stopped") + self.log.info("Web server stopped") async def start_server(self): """ Starts runner and TCPsite """ @@ -138,17 +153,17 @@ class WebServerThread(threading.Thread): while self.is_running: while self.tasks: task = self.tasks.pop(0) - log.debug("waiting for task {}".format(task)) + self.log.debug("waiting for task {}".format(task)) await task - log.debug("returned value {}".format(task.result)) + self.log.debug("returned value {}".format(task.result)) await asyncio.sleep(0.5) - log.debug("Starting shutdown") + self.log.debug("Starting shutdown") await self.site.stop() - log.debug("Site stopped") + self.log.debug("Site stopped") await self.runner.cleanup() - log.debug("Runner stopped") + self.log.debug("Runner stopped") tasks = [ task for task in asyncio.all_tasks() @@ -156,7 +171,9 @@ class WebServerThread(threading.Thread): ] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...' + ) await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop await asyncio.sleep(0.07) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 686bd27bfd..16861abd29 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -53,9 +53,12 @@ class WebServerModule(OpenPypeModule, ITrayService): try: module.webserver_initialization(self.server_manager) except Exception: - self.log.warning(( - "Failed to connect module \"{}\" to webserver." - ).format(module.name)) + self.log.warning( + ( + "Failed to connect module \"{}\" to webserver." + ).format(module.name), + exc_info=True + ) def tray_init(self): self.create_server_manager() diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 08db4749b3..cb6e07154b 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -14,9 +14,9 @@ from openpype.lib.path_templates import ( TemplatesDict, FormatObject, ) -from openpype.lib.log import PypeLogger +from openpype.lib.log import Logger -log = PypeLogger.get_logger(__name__) +log = Logger.get_logger(__name__) class ProjectNotSet(Exception): diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index e719e46514..00fe353208 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -10,10 +10,23 @@ import pyblish.api from pyblish.lib import MessageHandler import openpype +from openpype.client import ( + get_project, + get_asset_by_id, + get_asset_by_name, + version_is_latest, +) +from openpype.lib.events import emit_event from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings -from openpype.lib import filter_pyblish_plugins + +from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy +from .template_data import get_template_data_with_names +from .workfile import ( + get_workfile_template_key, + get_custom_workfile_template_by_string_context, +) from . import ( legacy_io, register_loader_plugin_path, @@ -240,29 +253,7 @@ def registered_host(): def deregister_host(): - _registered_host["_"] = default_host() - - -def default_host(): - """A default host, in place of anything better - - This may be considered as reference for the - interface a host must implement. It also ensures - that the system runs, even when nothing is there - to support it. - - """ - - host = types.ModuleType("defaultHost") - - def ls(): - return list() - - host.__dict__.update({ - "ls": ls - }) - - return host + _registered_host["_"] = None def debug_host(): @@ -304,3 +295,254 @@ def debug_host(): }) return host + + +def get_current_project(fields=None): + """Helper function to get project document based on global Session. + + This function should be called only in process where host is installed. + + Returns: + dict: Project document. + None: Project is not set. + """ + + project_name = legacy_io.active_project() + return get_project(project_name, fields=fields) + + +def get_current_project_asset(asset_name=None, asset_id=None, fields=None): + """Helper function to get asset document based on global Session. + + This function should be called only in process where host is installed. + + Asset is found out based on passed asset name or id (not both). Asset name + is not used for filtering if asset id is passed. When both asset name and + id are missing then asset name from current process is used. + + Args: + asset_name (str): Name of asset used for filter. + asset_id (Union[str, ObjectId]): Asset document id. If entered then + is used as only filter. + fields (Union[List[str], None]): Limit returned data of asset documents + to specific keys. + + Returns: + dict: Asset document. + None: Asset is not set or not exist. + """ + + project_name = legacy_io.active_project() + if asset_id: + return get_asset_by_id(project_name, asset_id, fields=fields) + + if not asset_name: + asset_name = legacy_io.Session.get("AVALON_ASSET") + # Skip if is not set even on context + if not asset_name: + return None + return get_asset_by_name(project_name, asset_name, fields=fields) + + +def is_representation_from_latest(representation): + """Return whether the representation is from latest version + + Args: + representation (dict): The representation document from the database. + + Returns: + bool: Whether the representation is of latest version. + """ + + project_name = legacy_io.active_project() + return version_is_latest(project_name, representation["parent"]) + + +def get_template_data_from_session(session=None, system_settings=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + system_settings (Union[Dict[str, Any], Any]): Prepared system settings. + Optional are auto received if not passed. + + Returns: + Dict[str, Any]: All available data from session. + """ + + if session is None: + session = legacy_io.Session + + project_name = session["AVALON_PROJECT"] + asset_name = session["AVALON_ASSET"] + task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] + + return get_template_data_with_names( + project_name, asset_name, task_name, host_name, system_settings + ) + + +def get_workdir_from_session(session=None, template_key=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + template_key (str): Prepared template key from which workdir is + calculated. + + Returns: + str: Workdir path. + """ + + if session is None: + session = legacy_io.Session + project_name = session["AVALON_PROJECT"] + host_name = session["AVALON_APP"] + anatomy = Anatomy(project_name) + template_data = get_template_data_from_session(session) + anatomy_filled = anatomy.format(template_data) + + if not template_key: + task_type = template_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, + host_name, + project_name=project_name + ) + path = anatomy_filled[template_key]["folder"] + if path: + path = os.path.normpath(path) + return path + + +def get_custom_workfile_template_from_session( + session=None, project_settings=None +): + """Filter and fill workfile template profiles by current context. + + Current context is defined by `legacy_io.Session`. That's why this + function should be used only inside host where context is set and stable. + + Args: + session (Union[None, Dict[str, str]]): Session from which are taken + data. + project_settings(Dict[str, Any]): Template profiles from settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + """ + + if session is None: + session = legacy_io.Session + + return get_custom_workfile_template_by_string_context( + session["AVALON_PROJECT"], + session["AVALON_ASSET"], + session["AVALON_TASK"], + session["AVALON_APP"], + project_settings=project_settings + ) + + +def compute_session_changes( + session, asset_doc, task_name, template_key=None +): + """Compute the changes for a session object on task under asset. + + Function does not change the session object, only returns changes. + + Args: + session (Dict[str, str]): The initial session to compute changes to. + This is required for computing the full Work Directory, as that + also depends on the values that haven't changed. + asset_doc (Dict[str, Any]): Asset document to switch to. + task_name (str): Name of task to switch to. + template_key (Union[str, None]): Prepare workfile template key in + anatomy templates. + + Returns: + Dict[str, str]: Changes in the Session dictionary. + """ + + changes = {} + + # Get asset document and asset + if not asset_doc: + task_name = None + asset_name = None + else: + asset_name = asset_doc["name"] + + # Detect any changes compared session + mapping = { + "AVALON_ASSET": asset_name, + "AVALON_TASK": task_name, + } + changes = { + key: value + for key, value in mapping.items() + if value != session.get(key) + } + if not changes: + return changes + + # Compute work directory (with the temporary changed session so far) + changed_session = session.copy() + changed_session.update(changes) + + workdir = None + if asset_doc: + workdir = get_workdir_from_session( + changed_session, template_key + ) + + changes["AVALON_WORKDIR"] = workdir + + return changes + + +def change_current_context(asset_doc, task_name, template_key=None): + """Update active Session to a new task work area. + + This updates the live Session to a different task under asset. + + Args: + asset_doc (Dict[str, Any]): The asset document to set. + task_name (str): The task to set under asset. + template_key (Union[str, None]): Prepared template key to be used for + workfile template in Anatomy. + + Returns: + Dict[str, str]: The changed key, values in the current Session. + """ + + changes = compute_session_changes( + legacy_io.Session, + asset_doc, + task_name, + template_key=template_key + ) + + # Update the Session and environments. Pop from environments all keys with + # value set to None. + for key, value in changes.items(): + legacy_io.Session[key] = value + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + data = changes.copy() + # Convert env keys to human readable keys + data["project_name"] = legacy_io.Session["AVALON_PROJECT"] + data["asset_name"] = legacy_io.Session["AVALON_ASSET"] + data["task_name"] = legacy_io.Session["AVALON_TASK"] + + # Emit session change + emit_event("taskChanged", data) + + return changes diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index 1beeb4267b..4b91951a08 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -1,15 +1,25 @@ from .constants import ( - SUBSET_NAME_ALLOWED_SYMBOLS + SUBSET_NAME_ALLOWED_SYMBOLS, + DEFAULT_SUBSET_TEMPLATE, ) + +from .subset_name import ( + TaskNotSetError, + get_subset_name, +) + from .creator_plugins import ( CreatorError, BaseCreator, Creator, AutoCreator, + HiddenCreator, + + discover_legacy_creator_plugins, + get_legacy_creator_by_name, discover_creator_plugins, - discover_legacy_creator_plugins, register_creator_plugin, deregister_creator_plugin, register_creator_plugin_path, @@ -29,15 +39,22 @@ from .legacy_create import ( __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", + + "TaskNotSetError", + "get_subset_name", "CreatorError", "BaseCreator", "Creator", "AutoCreator", + "HiddenCreator", + + "discover_legacy_creator_plugins", + "get_legacy_creator_by_name", "discover_creator_plugins", - "discover_legacy_creator_plugins", "register_creator_plugin", "deregister_creator_plugin", "register_creator_plugin_path", diff --git a/openpype/pipeline/create/constants.py b/openpype/pipeline/create/constants.py index bfbbccfd12..3af9651947 100644 --- a/openpype/pipeline/create/constants.py +++ b/openpype/pipeline/create/constants.py @@ -1,6 +1,8 @@ SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." +DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", ) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 9b55c3b21e..eaaed39357 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,6 +6,7 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.client import get_assets from openpype.host import INewPublisher from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( @@ -1082,15 +1083,10 @@ class CreateContext: for asset_name in task_names_by_asset_name.keys() if asset_name is not None ] - asset_docs = list(self.dbcon.find( - { - "type": "asset", - "name": {"$in": asset_names} - }, - { - "name": True, - "data.tasks": True - } + asset_docs = list(get_assets( + self.project_name, + asset_names=asset_names, + fields=["name", "data.tasks"] )) task_names_by_asset_name = {} diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 52c76db5ef..9e1530a6a7 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -1,3 +1,4 @@ +import os import copy from abc import ( @@ -7,10 +8,8 @@ from abc import ( ) import six -from openpype.lib import ( - get_subset_name_with_asset_doc, - set_plugin_attributes_from_settings, -) +from openpype.settings import get_system_settings, get_project_settings +from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -416,6 +415,12 @@ class Creator(BaseCreator): return self.pre_create_attr_defs +class HiddenCreator(BaseCreator): + @abstractmethod + def create(self, instance_data, source_data): + pass + + class AutoCreator(BaseCreator): """Creator which is automatically triggered without user interaction. @@ -432,11 +437,55 @@ def discover_creator_plugins(): def discover_legacy_creator_plugins(): + from openpype.lib import Logger + + log = Logger.get_logger("CreatorDiscover") + plugins = discover(LegacyCreator) - set_plugin_attributes_from_settings(plugins, LegacyCreator) + project_name = os.environ.get("AVALON_PROJECT") + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins +def get_legacy_creator_by_name(creator_name, case_sensitive=False): + """Find creator plugin by name. + + Args: + creator_name (str): Name of creator class that should be returned. + case_sensitive (bool): Match of creator plugin name is case sensitive. + Set to `False` by default. + + Returns: + Creator: Return first matching plugin or `None`. + """ + + # Lower input creator name if is not case sensitive + if not case_sensitive: + creator_name = creator_name.lower() + + for creator_plugin in discover_legacy_creator_plugins(): + _creator_name = creator_plugin.__name__ + + # Lower creator plugin name if is not case sensitive + if not case_sensitive: + _creator_name = _creator_name.lower() + + if _creator_name == creator_name: + return creator_plugin + return None + + def register_creator_plugin(plugin): if issubclass(plugin, BaseCreator): register_plugin(BaseCreator, plugin) diff --git a/openpype/pipeline/create/legacy_create.py b/openpype/pipeline/create/legacy_create.py index 46e0e3d663..2764b3cb95 100644 --- a/openpype/pipeline/create/legacy_create.py +++ b/openpype/pipeline/create/legacy_create.py @@ -5,6 +5,7 @@ Renamed classes and functions - 'create' -> 'legacy_create' """ +import os import logging import collections @@ -37,6 +38,48 @@ class LegacyCreator(object): self.data.update(data or {}) + @classmethod + def apply_settings(cls, project_settings, system_settings): + """Apply OpenPype settings to a plugin class.""" + + host_name = os.environ.get("AVALON_APP") + plugin_type = "create" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + def process(self): pass diff --git a/openpype/pipeline/create/subset_name.py b/openpype/pipeline/create/subset_name.py new file mode 100644 index 0000000000..f508263708 --- /dev/null +++ b/openpype/pipeline/create/subset_name.py @@ -0,0 +1,109 @@ +import os + +from openpype.settings import get_project_settings +from openpype.lib import filter_profiles, prepare_template_data +from openpype.pipeline import legacy_io + +from .constants import DEFAULT_SUBSET_TEMPLATE + + +class TaskNotSetError(KeyError): + def __init__(self, msg=None): + if not msg: + msg = "Creator's subset name template requires task name." + super(TaskNotSetError, self).__init__(msg) + + +def get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name=None, + host_name=None, + default_template=None, + dynamic_data=None, + project_settings=None +): + """Calculate subset name based on passed context and OpenPype settings. + + Subst name templates are defined in `project_settings/global/tools/creator + /subset_name_profiles` where are profiles with host name, family, task name + and task type filters. If context does not match any profile then + `DEFAULT_SUBSET_TEMPLATE` is used as default template. + + That's main reason why so many arguments are required to calculate subset + name. + + Args: + family (str): Instance family. + variant (str): In most of cases it is user input during creation. + task_name (str): Task name on which context is instance created. + asset_doc (dict): Queried asset document with it's tasks in data. + Used to get task type. + project_name (str): Name of project on which is instance created. + Important for project settings that are loaded. + host_name (str): One of filtering criteria for template profile + filters. + default_template (str): Default template if any profile does not match + passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' is used if + is not passed. + dynamic_data (dict): Dynamic data specific for a creator which creates + instance. + dbcon (AvalonMongoDB): Mongo connection to be able query asset document + if 'asset_doc' is not passed. + """ + + if not family: + return "" + + if not host_name: + host_name = os.environ["AVALON_APP"] + + # Use only last part of class family value split by dot (`.`) + family = family.rsplit(".", 1)[-1] + + if project_name is None: + project_name = legacy_io.Session["AVALON_PROJECT"] + + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + # Get settings + if not project_settings: + project_settings = get_project_settings(project_name) + tools_settings = project_settings["global"]["tools"] + profiles = tools_settings["creator"]["subset_name_profiles"] + filtering_criteria = { + "families": family, + "hosts": host_name, + "tasks": task_name, + "task_types": task_type + } + + matching_profile = filter_profiles(profiles, filtering_criteria) + template = None + if matching_profile: + template = matching_profile["template"] + + # Make sure template is set (matching may have empty string) + if not template: + template = default_template or DEFAULT_SUBSET_TEMPLATE + + # Simple check of task name existence for template with {task} in + # - missing task should be possible only in Standalone publisher + if not task_name and "{task" in template.lower(): + raise TaskNotSetError() + + fill_pairs = { + "variant": variant, + "family": family, + "task": task_name + } + if dynamic_data: + # Dynamic data may override default values + for key, value in dynamic_data.items(): + fill_pairs[key] = value + + return template.format(**prepare_template_data(fill_pairs)) diff --git a/openpype/pipeline/editorial.py b/openpype/pipeline/editorial.py index f62a1842e0..564d78ea6f 100644 --- a/openpype/pipeline/editorial.py +++ b/openpype/pipeline/editorial.py @@ -263,16 +263,17 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end): "retime": True, "speed": time_scalar, "timewarps": time_warp_nodes, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)) } } returning_dict = { "mediaIn": media_in_trimmed, "mediaOut": media_out_trimmed, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)), + "speed": time_scalar } # add version data only if retime diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index 6e7612d4c1..b6bdd13d50 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -16,6 +16,7 @@ from .utils import ( switch_container, get_loader_identifier, + get_loaders_by_name, get_representation_path_from_context, get_representation_path, @@ -24,6 +25,10 @@ from .utils import ( loaders_from_repre_context, loaders_from_representation, + + any_outdated_containers, + get_outdated_containers, + filter_containers, ) from .plugins import ( @@ -57,6 +62,7 @@ __all__ = ( "switch_container", "get_loader_identifier", + "get_loaders_by_name", "get_representation_path_from_context", "get_representation_path", @@ -66,6 +72,10 @@ __all__ = ( "loaders_from_repre_context", "loaders_from_representation", + "any_outdated_containers", + "get_outdated_containers", + "filter_containers", + # plugins.py "LoaderPlugin", "SubsetLoaderPlugin", diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index a30a2188a4..8cba8d8217 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -1,6 +1,8 @@ +import os import logging -from openpype.lib import set_plugin_attributes_from_settings +from openpype.settings import get_system_settings, get_project_settings +from openpype.pipeline import legacy_io from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -37,6 +39,46 @@ class LoaderPlugin(list): def __init__(self, context): self.fname = self.filepath_from_context(context) + @classmethod + def apply_settings(cls, project_settings, system_settings): + host_name = os.environ.get("AVALON_APP") + plugin_type = "load" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + @classmethod def get_representations(cls): return cls.representations @@ -110,9 +152,25 @@ class SubsetLoaderPlugin(LoaderPlugin): pass -def discover_loader_plugins(): +def discover_loader_plugins(project_name=None): + from openpype.lib import Logger + + log = Logger.get_logger("LoaderDiscover") plugins = discover(LoaderPlugin) - set_plugin_attributes_from_settings(plugins, LoaderPlugin) + if not project_name: + project_name = legacy_io.active_project() + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 2c213aff6f..99d6876d4b 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -4,8 +4,10 @@ import copy import getpass import logging import inspect +import collections import numbers +from openpype.host import ILoadHost from openpype.client import ( get_project, get_assets, @@ -15,6 +17,7 @@ from openpype.client import ( get_last_version_by_subset_id, get_hero_version_by_subset_id, get_version_by_name, + get_last_versions, get_representations, get_representation_by_id, get_representation_by_name, @@ -28,6 +31,11 @@ from openpype.pipeline import ( log = logging.getLogger(__name__) +ContainersFilterResult = collections.namedtuple( + "ContainersFilterResult", + ["latest", "outdated", "not_foud", "invalid"] +) + class HeroVersionType(object): def __init__(self, version): @@ -214,13 +222,20 @@ def get_representation_context(representation): project_name, representation ) + if not representation: + raise AssertionError("Representation was not found in database") + version, subset, asset, project = get_representation_parents( project_name, representation ) - - assert all([representation, version, subset, asset, project]), ( - "This is a bug" - ) + if not version: + raise AssertionError("Version was not found in database") + if not subset: + raise AssertionError("Subset was not found in database") + if not asset: + raise AssertionError("Asset was not found in database") + if not project: + raise AssertionError("Project was not found in database") context = { "project": { @@ -361,6 +376,20 @@ def get_loader_identifier(loader): return loader.__name__ +def get_loaders_by_name(): + from .plugins import discover_loader_plugins + + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {} !".format(loader_name) + ) + loaders_by_name[loader_name] = loader + return loaders_by_name + + def _get_container_loader(container): """Return the Loader corresponding to the container""" from .plugins import discover_loader_plugins @@ -685,3 +714,164 @@ def loaders_from_representation(loaders, representation): context = get_representation_context(representation) return loaders_from_repre_context(loaders, context) + + +def any_outdated_containers(host=None, project_name=None): + """Check if there are any outdated containers in scene.""" + + if get_outdated_containers(host, project_name): + return True + return False + + +def get_outdated_containers(host=None, project_name=None): + """Collect outdated containers from host scene. + + Currently registered host and project in global session are used if + arguments are not passed. + + Args: + host (ModuleType): Host implementation with 'ls' function available. + project_name (str): Name of project in which context we are. + """ + + if host is None: + from openpype.pipeline import registered_host + host = registered_host() + + if project_name is None: + project_name = legacy_io.active_project() + + if isinstance(host, ILoadHost): + containers = host.get_containers() + else: + containers = host.ls() + return filter_containers(containers, project_name).outdated + + +def filter_containers(containers, project_name): + """Filter containers and split them into 4 categories. + + Categories are 'latest', 'outdated', 'invalid' and 'not_found'. + The 'lastest' containers are from last version, 'outdated' are not, + 'invalid' are invalid containers (invalid content) and 'not_foud' has + some missing entity in database. + + Args: + containers (Iterable[dict]): List of containers referenced into scene. + project_name (str): Name of project in which context shoud look for + versions. + + Returns: + ContainersFilterResult: Named tuple with 'latest', 'outdated', + 'invalid' and 'not_found' containers. + """ + + # Make sure containers is list that won't change + containers = list(containers) + + outdated_containers = [] + uptodate_containers = [] + not_found_containers = [] + invalid_containers = [] + output = ContainersFilterResult( + uptodate_containers, + outdated_containers, + not_found_containers, + invalid_containers + ) + # Query representation docs to get it's version ids + repre_ids = { + container["representation"] + for container in containers + if container["representation"] + } + if not repre_ids: + if containers: + invalid_containers.extend(containers) + return output + + repre_docs = get_representations( + project_name, + representation_ids=repre_ids, + fields=["_id", "parent"] + ) + # Store representations by stringified representation id + repre_docs_by_str_id = {} + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + repre_id = str(repre_doc["_id"]) + version_id = repre_doc["parent"] + repre_docs_by_str_id[repre_id] = repre_doc + repre_docs_by_version_id[version_id].append(repre_doc) + + # Query version docs to get it's subset ids + # - also query hero version to be able identify if representation + # belongs to existing version + version_docs = get_versions( + project_name, + version_ids=repre_docs_by_version_id.keys(), + hero=True, + fields=["_id", "parent", "type"] + ) + verisons_by_id = {} + versions_by_subset_id = collections.defaultdict(list) + hero_version_ids = set() + for version_doc in version_docs: + version_id = version_doc["_id"] + # Store versions by their ids + verisons_by_id[version_id] = version_doc + # There's no need to query subsets for hero versions + # - they are considered as latest? + if version_doc["type"] == "hero_version": + hero_version_ids.add(version_id) + continue + subset_id = version_doc["parent"] + versions_by_subset_id[subset_id].append(version_doc) + + last_versions = get_last_versions( + project_name, + subset_ids=versions_by_subset_id.keys(), + fields=["_id"] + ) + # Figure out which versions are outdated + outdated_version_ids = set() + for subset_id, last_version_doc in last_versions.items(): + for version_doc in versions_by_subset_id[subset_id]: + version_id = version_doc["_id"] + if version_id != last_version_doc["_id"]: + outdated_version_ids.add(version_id) + + # Based on all collected data figure out which containers are outdated + # - log out if there are missing representation or version documents + for container in containers: + container_name = container["objectName"] + repre_id = container["representation"] + if not repre_id: + invalid_containers.append(container) + continue + + repre_doc = repre_docs_by_str_id.get(repre_id) + if not repre_doc: + log.debug(( + "Container '{}' has an invalid representation." + " It is missing in the database." + ).format(container_name)) + not_found_containers.append(container) + continue + + version_id = repre_doc["parent"] + if version_id in outdated_version_ids: + outdated_containers.append(container) + + elif version_id not in verisons_by_id: + log.debug(( + "Representation on container '{}' has an invalid version." + " It is missing in the database." + ).format(container_name)) + not_found_containers.append(container) + + else: + uptodate_containers.append(container) + + return output diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index dab5bb9e13..be2b67a5e7 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -5,6 +5,8 @@ import logging import pymongo from uuid import uuid4 +from openpype.client import OpenPypeMongoConnection + from . import schema @@ -156,8 +158,6 @@ class AvalonMongoDB: @property def mongo_client(self): - from openpype.lib import OpenPypeMongoConnection - return OpenPypeMongoConnection.get_mongo_client() @property diff --git a/openpype/pipeline/publish/abstract_collect_render.py b/openpype/pipeline/publish/abstract_collect_render.py index 2e537227c3..ccb2415346 100644 --- a/openpype/pipeline/publish/abstract_collect_render.py +++ b/openpype/pipeline/publish/abstract_collect_render.py @@ -63,6 +63,8 @@ class RenderInstance(object): family = attr.ib(default="renderlayer") families = attr.ib(default=["renderlayer"]) # list of families + # True if should be rendered on farm, eg not integrate + farm = attr.ib(default=False) # format settings multipartExr = attr.ib(default=False) # flag for multipart exrs diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 739b2c8806..9060a0bf4b 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -6,6 +6,10 @@ import xml.etree.ElementTree import six import pyblish.plugin +import pyblish.api + +from openpype.lib import Logger +from openpype.settings import get_project_settings, get_system_settings class DiscoverResult: @@ -180,3 +184,132 @@ def publish_plugins_discover(paths=None): result.plugins = plugins return result + + +def filter_pyblish_plugins(plugins): + """Pyblish plugin filter which applies OpenPype settings. + + Apply OpenPype settings on discovered plugins. On plugin with implemented + class method 'def apply_settings(cls, project_settings, system_settings)' + is called the method. Default behavior looks for plugin name and current + host name to look for + + Args: + plugins (List[pyblish.plugin.Plugin]): Discovered plugins on which + are applied settings. + """ + + log = Logger.get_logger("filter_pyblish_plugins") + + # TODO: Don't use host from 'pyblish.api' but from defined host by us. + # - kept becau on farm is probably used host 'shell' which propably + # affect how settings are applied there + host = pyblish.api.current_host() + project_name = os.environ.get("AVALON_PROJECT") + + project_setting = get_project_settings(project_name) + system_settings = get_system_settings() + + # iterate over plugins + for plugin in plugins[:]: + if hasattr(plugin, "apply_settings"): + try: + # Use classmethod 'apply_settings' + # - can be used to target settings from custom settings place + # - skip default behavior when successful + plugin.apply_settings(project_setting, system_settings) + continue + + except Exception: + log.warning( + ( + "Failed to apply settings on plugin {}" + ).format(plugin.__name__), + exc_info=True + ) + + try: + config_data = ( + project_setting + [host] + ["publish"] + [plugin.__name__] + ) + except KeyError: + # host determined from path + file = os.path.normpath(inspect.getsourcefile(plugin)) + file = os.path.normpath(file) + + split_path = file.split(os.path.sep) + if len(split_path) < 4: + log.warning( + 'plugin path too short to extract host {}'.format(file) + ) + continue + + host_from_file = split_path[-4] + plugin_kind = split_path[-2] + + # TODO: change after all plugins are moved one level up + if host_from_file == "openpype": + host_from_file = "global" + + try: + config_data = ( + project_setting + [host_from_file] + [plugin_kind] + [plugin.__name__] + ) + except KeyError: + continue + + for option, value in config_data.items(): + if option == "enabled" and value is False: + log.info('removing plugin {}'.format(plugin.__name__)) + plugins.remove(plugin) + else: + log.info('setting {}:{} on plugin {}'.format( + option, value, plugin.__name__)) + + setattr(plugin, option, value) + + +def find_close_plugin(close_plugin_name, log): + if close_plugin_name: + plugins = pyblish.api.discover() + for plugin in plugins: + if plugin.__name__ == close_plugin_name: + return plugin + + log.debug("Close plugin not found, app might not close.") + + +def remote_publish(log, close_plugin_name=None, raise_error=False): + """Loops through all plugins, logs to console. Used for tests. + + Args: + log (openpype.lib.Logger) + close_plugin_name (str): name of plugin with responsibility to + close host app + """ + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + + close_plugin = find_close_plugin(close_plugin_name, log) + + for result in pyblish.util.publish_iter(): + for record in result["records"]: + log.info("{}: {}".format( + result["plugin"].label, record.msg)) + + if result["error"]: + error_message = error_format.format(**result) + log.error(error_message) + if close_plugin: # close host app explicitly after error + context = pyblish.api.Context() + close_plugin().process(context) + if raise_error: + # Fatal Error is because of Deadline + error_message = "Fatal Error: " + error_format.format(**result) + raise RuntimeError(error_message) diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py new file mode 100644 index 0000000000..824a25127c --- /dev/null +++ b/openpype/pipeline/template_data.py @@ -0,0 +1,228 @@ +from openpype.client import get_project, get_asset_by_name +from openpype.settings import get_system_settings +from openpype.lib.local_settings import get_openpype_username + + +def get_general_template_data(system_settings=None): + """General template data based on system settings or machine. + + Output contains formatting keys: + - 'studio[name]' - Studio name filled from system settings + - 'studio[code]' - Studio code filled from system settings + - 'user' - User's name using 'get_openpype_username' + + Args: + system_settings (Dict[str, Any]): System settings. + """ + + if not system_settings: + system_settings = get_system_settings() + studio_name = system_settings["general"]["studio_name"] + studio_code = system_settings["general"]["studio_code"] + return { + "studio": { + "name": studio_name, + "code": studio_code + }, + "user": get_openpype_username() + } + + +def get_project_template_data(project_doc): + """Extract data from project document that are used in templates. + + Project document must have 'name' and (at this moment) optional + key 'data.code'. + + Output contains formatting keys: + - 'project[name]' - Project name + - 'project[code]' - Project code + + Args: + project_doc (Dict[str, Any]): Queried project document. + + Returns: + Dict[str, Dict[str, str]]: Template data based on project document. + """ + + project_code = project_doc.get("data", {}).get("code") + return { + "project": { + "name": project_doc["name"], + "code": project_code + } + } + + +def get_asset_template_data(asset_doc, project_name): + """Extract data from asset document that are used in templates. + + Output dictionary contains keys: + - 'asset' - asset name + - 'hierarchy' - parent asset names joined with '/' + - 'parent' - direct parent name, project name used if is under project + + Required document fields: + Asset: 'name', 'data.parents' + + Args: + asset_doc (Dict[str, Any]): Queried asset document. + project_name (str): Is used for 'parent' key if asset doc does not have + any. + + Returns: + Dict[str, str]: Data that are based on asset document and can be used + in templates. + """ + + asset_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(asset_parents) + if asset_parents: + parent_name = asset_parents[-1] + else: + parent_name = project_name + + return { + "asset": asset_doc["name"], + "hierarchy": hierarchy, + "parent": parent_name + } + + +def get_task_type(asset_doc, task_name): + """Get task type based on asset document and task name. + + Required document fields: + Asset: 'data.tasks' + + Args: + asset_doc (Dict[str, Any]): Queried asset document. + task_name (str): Task name which is under asset. + + Returns: + str: Task type name. + None: Task was not found on asset document. + """ + + asset_tasks_info = asset_doc["data"]["tasks"] + return asset_tasks_info.get(task_name, {}).get("type") + + +def get_task_template_data(project_doc, asset_doc, task_name): + """"Extract task specific data from project and asset documents. + + Required document fields: + Project: 'config.tasks' + Asset: 'data.tasks'. + + Args: + project_doc (Dict[str, Any]): Queried project document. + asset_doc (Dict[str, Any]): Queried asset document. + tas_name (str): Name of task for which data should be returned. + + Returns: + Dict[str, Dict[str, str]]: Template data + """ + + project_task_types = project_doc["config"]["tasks"] + task_type = get_task_type(asset_doc, task_name) + task_code = project_task_types.get(task_type, {}).get("short_name") + + return { + "task": { + "name": task_name, + "type": task_type, + "short": task_code, + } + } + + +def get_template_data( + project_doc, + asset_doc=None, + task_name=None, + host_name=None, + system_settings=None +): + """Prepare data for templates filling from entered documents and info. + + This function does not "auto fill" any values except system settings and + it's on purpose. + + Universal function to receive template data from passed arguments. Only + required argument is project document all other arguments are optional + and their values won't be added to template data if are not passed. + + Required document fields: + Project: 'name', 'data.code', 'config.tasks' + Asset: 'name', 'data.parents', 'data.tasks' + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (Union[str, None]): Task name under passed asset. + host_name (Union[str, None]): Used to fill '{app}' key. + system_settings (Union[Dict, None]): Prepared system settings. + They're queried if not passed (may be slower). + + Returns: + Dict[str, Any]: Data prepared for filling workdir template. + """ + + template_data = get_general_template_data(system_settings) + template_data.update(get_project_template_data(project_doc)) + if asset_doc: + template_data.update(get_asset_template_data( + asset_doc, project_doc["name"] + )) + if task_name: + template_data.update(get_task_template_data( + project_doc, asset_doc, task_name + )) + + if host_name: + template_data["app"] = host_name + + return template_data + + +def get_template_data_with_names( + project_name, + asset_name=None, + task_name=None, + host_name=None, + system_settings=None +): + """Prepare data for templates filling from entered entity names and info. + + Copy of 'get_template_data' but based on entity names instead of documents. + Only difference is that documents are queried. + + Args: + project_name (str): Project name for which template data are + calculated. + asset_name (Union[str, None]): Asset name for which template data are + calculated. + task_name (Union[str, None]): Task name under passed asset. + host_name (Union[str, None]):Used to fill '{app}' key. + because workdir template may contain `{app}` key. + system_settings (Union[Dict, None]): Prepared system settings. + They're queried if not passed. + + Returns: + Dict[str, Any]: Data prepared for filling workdir template. + """ + + project_doc = get_project( + project_name, fields=["name", "data.code", "config.tasks"] + ) + asset_doc = None + if asset_name: + asset_doc = get_asset_by_name( + project_name, + asset_name, + fields=["name", "data.parents", "data.tasks"] + ) + return get_template_data( + project_doc, asset_doc, task_name, host_name, system_settings + ) diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index ec97b36954..eb383b16d9 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -2,6 +2,7 @@ import os import copy import logging +from openpype.client import get_project from . import legacy_io from .plugin_discover import ( discover, @@ -85,13 +86,8 @@ class TemplateResolver(ThumbnailResolver): self.log.debug("Thumbnail entity does not have set template") return - project = self.dbcon.find_one( - {"type": "project"}, - { - "name": True, - "data.code": True - } - ) + project_name = self.dbcon.active_project() + project = get_project(project_name, fields=["name", "data.code"]) template_data = copy.deepcopy( thumbnail_entity["data"].get("template_data") or {} diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py new file mode 100644 index 0000000000..0aad29b6f9 --- /dev/null +++ b/openpype/pipeline/workfile/__init__.py @@ -0,0 +1,30 @@ +from .path_resolving import ( + get_workfile_template_key_from_context, + get_workfile_template_key, + get_workdir_with_workdir_data, + get_workdir, + + get_last_workfile_with_version, + get_last_workfile, + + get_custom_workfile_template, + get_custom_workfile_template_by_string_context, +) + +from .build_workfile import BuildWorkfile + + +__all__ = ( + "get_workfile_template_key_from_context", + "get_workfile_template_key", + "get_workdir_with_workdir_data", + "get_workdir", + + "get_last_workfile_with_version", + "get_last_workfile", + + "get_custom_workfile_template", + "get_custom_workfile_template_by_string_context", + + "BuildWorkfile", +) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py new file mode 100644 index 0000000000..05a98a1ddc --- /dev/null +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -0,0 +1,526 @@ +import os +from abc import ABCMeta, abstractmethod + +import six +import logging +from functools import reduce + +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import ( + StringTemplate, + Logger, + filter_profiles, + get_linked_assets, +) +from openpype.pipeline import legacy_io, Anatomy +from openpype.pipeline.load import ( + get_loaders_by_name, + get_representation_context, + load_with_repre_context, +) + +from .build_template_exceptions import ( + TemplateAlreadyImported, + TemplateLoadingFailed, + TemplateProfileNotFound, + TemplateNotFound +) + +log = logging.getLogger(__name__) + + +def update_representations(entities, entity): + if entity['context']['subset'] not in entities: + entities[entity['context']['subset']] = entity + else: + current = entities[entity['context']['subset']] + incomming = entity + entities[entity['context']['subset']] = max( + current, incomming, + key=lambda entity: entity["context"].get("version", -1)) + + return entities + + +def parse_loader_args(loader_args): + if not loader_args: + return dict() + try: + parsed_args = eval(loader_args) + if not isinstance(parsed_args, dict): + return dict() + else: + return parsed_args + except Exception as err: + print( + "Error while parsing loader arguments '{}'.\n{}: {}\n\n" + "Continuing with default arguments. . .".format( + loader_args, + err.__class__.__name__, + err)) + return dict() + + +@six.add_metaclass(ABCMeta) +class AbstractTemplateLoader: + """ + Abstraction of Template Loader. + Properties: + template_path : property to get current template path + Methods: + import_template : Abstract Method. Used to load template, + depending on current host + get_template_nodes : Abstract Method. Used to query nodes acting + as placeholders. Depending on current host + """ + + _log = None + + def __init__(self, placeholder_class): + # TODO template loader should expect host as and argument + # - host have all responsibility for most of code (also provide + # placeholder class) + # - also have responsibility for current context + # - this won't work in DCCs where multiple workfiles with + # different contexts can be opened at single time + # - template loader should have ability to change context + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] + + self.loaders_by_name = get_loaders_by_name() + self.current_asset = asset_name + self.project_name = project_name + self.host_name = legacy_io.Session["AVALON_APP"] + self.task_name = legacy_io.Session["AVALON_TASK"] + self.placeholder_class = placeholder_class + self.current_asset_doc = get_asset_by_name(project_name, asset_name) + self.task_type = ( + self.current_asset_doc + .get("data", {}) + .get("tasks", {}) + .get(self.task_name, {}) + .get("type") + ) + + self.log.info( + "BUILDING ASSET FROM TEMPLATE :\n" + "Starting templated build for {asset} in {project}\n\n" + "Asset : {asset}\n" + "Task : {task_name} ({task_type})\n" + "Host : {host}\n" + "Project : {project}\n".format( + asset=self.current_asset, + host=self.host_name, + project=self.project_name, + task_name=self.task_name, + task_type=self.task_type + )) + # Skip if there is no loader + if not self.loaders_by_name: + self.log.warning( + "There is no registered loaders. No assets will be loaded") + return + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + def template_already_imported(self, err_msg): + """In case template was already loaded. + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case.""" + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateAlreadyImported(err_msg) + + def template_loading_failed(self, err_msg): + """In case template loading failed + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case. + """ + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateLoadingFailed(err_msg) + + @property + def template_path(self): + """ + Property returning template path. Avoiding setter. + Getting template path from open pype settings based on current avalon + session and solving the path variables if needed. + Returns: + str: Solved template path + Raises: + TemplateProfileNotFound: No profile found from settings for + current avalon session + KeyError: Could not solve path because a key does not exists + in avalon context + TemplateNotFound: Solved path does not exists on current filesystem + """ + project_name = self.project_name + host_name = self.host_name + task_name = self.task_name + task_type = self.task_type + + anatomy = Anatomy(project_name) + project_settings = get_project_settings(project_name) + + build_info = project_settings[host_name]["templated_workfile_build"] + profile = filter_profiles( + build_info["profiles"], + { + "task_types": task_type, + "tasks": task_name + } + ) + + if not profile: + raise TemplateProfileNotFound( + "No matching profile found for task '{}' of type '{}' " + "with host '{}'".format(task_name, task_type, host_name) + ) + + path = profile["path"] + if not path: + raise TemplateLoadingFailed( + "Template path is not set.\n" + "Path need to be set in {}\\Template Workfile Build " + "Settings\\Profiles".format(host_name.title())) + + # Try fill path with environments and anatomy roots + fill_data = { + key: value + for key, value in os.environ.items() + } + fill_data["root"] = anatomy.roots + result = StringTemplate.format_template(path, fill_data) + if result.solved: + path = result.normalized() + + if path and os.path.exists(path): + self.log.info("Found template at: '{}'".format(path)) + return path + + solved_path = None + while True: + try: + solved_path = anatomy.path_remapper(path) + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + + solved_path = os.path.normpath(solved_path) + if not os.path.exists(solved_path): + raise TemplateNotFound( + "Template found in openPype settings for task '{}' with host " + "'{}' does not exists. (Not found : {})".format( + task_name, host_name, solved_path)) + + self.log.info("Found template at: '{}'".format(solved_path)) + + return solved_path + + def populate_template(self, ignored_ids=None): + """ + Use template placeholders to load assets and parent them in hierarchy + Arguments : + ignored_ids : + Returns: + None + """ + + loaders_by_name = self.loaders_by_name + current_asset_doc = self.current_asset_doc + linked_assets = get_linked_assets(current_asset_doc) + + ignored_ids = ignored_ids or [] + placeholders = self.get_placeholders() + self.log.debug("Placeholders found in template: {}".format( + [placeholder.name for placeholder in placeholders] + )) + for placeholder in placeholders: + self.log.debug("Start to processing placeholder {}".format( + placeholder.name + )) + placeholder_representations = self.get_placeholder_representations( + placeholder, + current_asset_doc, + linked_assets + ) + + if not placeholder_representations: + self.log.info( + "There's no representation for this placeholder: " + "{}".format(placeholder.name) + ) + continue + + for representation in placeholder_representations: + self.preload(placeholder, loaders_by_name, representation) + + if self.load_data_is_incorrect( + placeholder, + representation, + ignored_ids): + continue + + self.log.info( + "Loading {}_{} with loader {}\n" + "Loader arguments used : {}".format( + representation['context']['asset'], + representation['context']['subset'], + placeholder.loader_name, + placeholder.loader_args)) + + try: + container = self.load( + placeholder, loaders_by_name, representation) + except Exception: + self.load_failed(placeholder, representation) + else: + self.load_succeed(placeholder, container) + finally: + self.postload(placeholder) + + def get_placeholder_representations( + self, placeholder, current_asset_doc, linked_asset_docs + ): + placeholder_representations = placeholder.get_representations( + current_asset_doc, + linked_asset_docs + ) + for repre_doc in reduce( + update_representations, + placeholder_representations, + dict() + ).values(): + yield repre_doc + + def load_data_is_incorrect( + self, placeholder, last_representation, ignored_ids): + if not last_representation: + self.log.warning(placeholder.err_message()) + return True + if (str(last_representation['_id']) in ignored_ids): + print("Ignoring : ", last_representation['_id']) + return True + return False + + def preload(self, placeholder, loaders_by_name, last_representation): + pass + + def load(self, placeholder, loaders_by_name, last_representation): + repre = get_representation_context(last_representation) + return load_with_repre_context( + loaders_by_name[placeholder.loader_name], + repre, + options=parse_loader_args(placeholder.loader_args)) + + def load_succeed(self, placeholder, container): + placeholder.parent_in_hierarchy(container) + + def load_failed(self, placeholder, last_representation): + self.log.warning( + "Got error trying to load {}:{} with {}".format( + last_representation['context']['asset'], + last_representation['context']['subset'], + placeholder.loader_name + ), + exc_info=True + ) + + def postload(self, placeholder): + placeholder.clean() + + def update_missing_containers(self): + loaded_containers_ids = self.get_loaded_containers_by_id() + self.populate_template(ignored_ids=loaded_containers_ids) + + def get_placeholders(self): + placeholders = map(self.placeholder_class, self.get_template_nodes()) + valid_placeholders = filter( + lambda i: i.is_valid, + placeholders + ) + sorted_placeholders = list(sorted( + valid_placeholders, + key=lambda i: i.order + )) + return sorted_placeholders + + @abstractmethod + def get_loaded_containers_by_id(self): + """ + Collect already loaded containers for updating scene + Return: + dict (string, node): A dictionnary id as key + and containers as value + """ + pass + + @abstractmethod + def import_template(self, template_path): + """ + Import template in current host + Args: + template_path (str): fullpath to current task and + host's template file + Return: + None + """ + pass + + @abstractmethod + def get_template_nodes(self): + """ + Returning a list of nodes acting as host placeholders for + templating. The data representation is by user. + AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes + Args : + None + Returns: + list(AnyNode): Solved template path + """ + pass + + +@six.add_metaclass(ABCMeta) +class AbstractPlaceholder: + """Abstraction of placeholders logic. + + Properties: + required_keys: A list of mandatory keys to decribe placeholder + and assets to load. + optional_keys: A list of optional keys to decribe + placeholder and assets to load + loader_name: Name of linked loader to use while loading assets + + Args: + identifier (str): Placeholder identifier. Should be possible to be + used as identifier in "a scene" (e.g. unique node name). + """ + + required_keys = { + "builder_type", + "family", + "representation", + "order", + "loader", + "loader_args" + } + optional_keys = {} + + def __init__(self, identifier): + self._log = None + self._name = identifier + self.get_data(identifier) + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def __repr__(self): + return "< {} {} >".format(self.__class__.__name__, self.name) + + @property + def name(self): + return self._name + + @property + def loader_args(self): + return self.data["loader_args"] + + @property + def builder_type(self): + return self.data["builder_type"] + + @property + def order(self): + return self.data["order"] + + @property + def loader_name(self): + """Return placeholder loader name. + + Returns: + str: Loader name that will be used to load placeholder + representations. + """ + + return self.data["loader"] + + @property + def is_valid(self): + """Test validity of placeholder. + + i.e.: every required key exists in placeholder data + + Returns: + bool: True if every key is in data + """ + + if set(self.required_keys).issubset(self.data.keys()): + self.log.debug("Valid placeholder : {}".format(self.name)) + return True + self.log.info("Placeholder is not valid : {}".format(self.name)) + return False + + @abstractmethod + def parent_in_hierarchy(self, container): + """Place loaded container in correct hierarchy given by placeholder + + Args: + container (Dict[str, Any]): Loaded container created by loader. + """ + + pass + + @abstractmethod + def clean(self): + """Clean placeholder from hierarchy after loading assets.""" + + pass + + @abstractmethod + def get_representations(self, current_asset_doc, linked_asset_docs): + """Query representations based on placeholder data. + + Args: + current_asset_doc (Dict[str, Any]): Document of current + context asset. + linked_asset_docs (List[Dict[str, Any]]): Documents of assets + linked to current context asset. + + Returns: + Iterable[Dict[str, Any]]: Representations that are matching + placeholder filters. + """ + + pass + + @abstractmethod + def get_data(self, identifier): + """Collect information about placeholder by identifier. + + Args: + identifier (str): A unique placeholder identifier defined by + implementation. + """ + + pass diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py new file mode 100644 index 0000000000..e6396578c5 --- /dev/null +++ b/openpype/pipeline/workfile/build_template.py @@ -0,0 +1,68 @@ +from importlib import import_module +from openpype.lib import classes_from_module +from openpype.host import HostBase +from openpype.pipeline import registered_host + +from .abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader) + +from .build_template_exceptions import ( + TemplateLoadingFailed, + TemplateAlreadyImported, + MissingHostTemplateModule, + MissingTemplatePlaceholderClass, + MissingTemplateLoaderClass +) + +_module_path_format = 'openpype.hosts.{host}.api.template_loader' + + +def build_workfile_template(*args): + template_loader = build_template_loader() + try: + template_loader.import_template(template_loader.template_path) + except TemplateAlreadyImported as err: + template_loader.template_already_imported(err) + except TemplateLoadingFailed as err: + template_loader.template_loading_failed(err) + else: + template_loader.populate_template() + + +def update_workfile_template(args): + template_loader = build_template_loader() + template_loader.update_missing_containers() + + +def build_template_loader(): + # TODO refactor to use advantage of 'HostBase' and don't import dynamically + # - hosts should have methods that gives option to return builders + host = registered_host() + if isinstance(host, HostBase): + host_name = host.name + else: + host_name = host.__name__.partition('.')[2] + module_path = _module_path_format.format(host=host_name) + module = import_module(module_path) + if not module: + raise MissingHostTemplateModule( + "No template loader found for host {}".format(host_name)) + + template_loader_class = classes_from_module( + AbstractTemplateLoader, + module + ) + template_placeholder_class = classes_from_module( + AbstractPlaceholder, + module + ) + + if not template_loader_class: + raise MissingTemplateLoaderClass() + template_loader_class = template_loader_class[0] + + if not template_placeholder_class: + raise MissingTemplatePlaceholderClass() + template_placeholder_class = template_placeholder_class[0] + return template_loader_class(template_placeholder_class) diff --git a/openpype/pipeline/workfile/build_template_exceptions.py b/openpype/pipeline/workfile/build_template_exceptions.py new file mode 100644 index 0000000000..7a5075e3dc --- /dev/null +++ b/openpype/pipeline/workfile/build_template_exceptions.py @@ -0,0 +1,35 @@ +class MissingHostTemplateModule(Exception): + """Error raised when expected module does not exists""" + pass + + +class MissingTemplatePlaceholderClass(Exception): + """Error raised when module doesn't implement a placeholder class""" + pass + + +class MissingTemplateLoaderClass(Exception): + """Error raised when module doesn't implement a template loader class""" + pass + + +class TemplateNotFound(Exception): + """Exception raised when template does not exist.""" + pass + + +class TemplateProfileNotFound(Exception): + """Exception raised when current profile + doesn't match any template profile""" + pass + + +class TemplateAlreadyImported(Exception): + """Error raised when Template was already imported by host for + this session""" + pass + + +class TemplateLoadingFailed(Exception): + """Error raised whend Template loader was unable to load the template""" + pass diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py new file mode 100644 index 0000000000..bb6fcb4189 --- /dev/null +++ b/openpype/pipeline/workfile/build_workfile.py @@ -0,0 +1,693 @@ +import os +import re +import collections +import json + +from openpype.client import ( + get_asset_by_name, + get_subsets, + get_last_versions, + get_representations, +) +from openpype.settings import get_project_settings +from openpype.lib import ( + get_linked_assets, + filter_profiles, + Logger, +) +from openpype.pipeline import legacy_io +from openpype.pipeline.load import ( + discover_loader_plugins, + IncompatibleLoaderError, + load_container, +) + + +class BuildWorkfile: + """Wrapper for build workfile process. + + Load representations for current context by build presets. Build presets + are host related, since each host has it's loaders. + """ + + _log = None + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @staticmethod + def map_subsets_by_family(subsets): + subsets_by_family = collections.defaultdict(list) + for subset in subsets: + family = subset["data"].get("family") + if not family: + families = subset["data"].get("families") + if not families: + continue + family = families[0] + + subsets_by_family[family].append(subset) + return subsets_by_family + + def process(self): + """Main method of this wrapper. + + Building of workfile is triggered and is possible to implement + post processing of loaded containers if necessary. + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + return self.build_workfile() + + def build_workfile(self): + """Prepares and load containers into workfile. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile profiles from presets. Profiles are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded + representation is returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + loaded_containers = [] + + # Get current asset name and entity + project_name = legacy_io.active_project() + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = get_asset_by_name( + project_name, current_asset_name + ) + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return loaded_containers + + # Prepare available loaders + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {0}!".format(loader_name) + ) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + self.log.warning("There are no registered loaders.") + return loaded_containers + + # Get current task name + current_task_name = legacy_io.Session["AVALON_TASK"] + + # Load workfile presets for task + self.build_presets = self.get_build_presets( + current_task_name, current_asset_entity + ) + + # Skip if there are any presets for task + if not self.build_presets: + self.log.warning( + "Current task `{}` does not have any loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + # Get presets for loading current asset + current_context_profiles = self.build_presets.get("current_context") + # Get presets for loading linked assets + link_context_profiles = self.build_presets.get("linked_assets") + # Skip if both are missing + if not current_context_profiles and not link_context_profiles: + self.log.warning( + "Current task `{}` has empty loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + elif not current_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any loading" + " preset for it's context." + ).format(current_task_name)) + + elif not link_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any" + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context_profiles: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context_profiles: + # Find and append linked assets if preset has set linked mapping + link_assets = get_linked_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are no assets. This can happen if only linked mapping + # is set and there are no links for his asset. + if not assets: + self.log.warning( + "Asset does not have linked assets. Nothing to process." + ) + return loaded_containers + + # Prepare entities from database for assets + prepared_entities = self._collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = self.load_containers_by_asset_data( + current_context_data, current_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = self.load_containers_by_asset_data( + linked_asset_data, link_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers + + def get_build_presets(self, task_name, asset_doc): + """ Returns presets to build workfile for task name. + + Presets are loaded for current project set in + io.Session["AVALON_PROJECT"], filtered by registered host + and entered task name. + + Args: + task_name (str): Task name used for filtering build presets. + + Returns: + Dict[str, Any]: preset per entered task name + """ + + host_name = os.environ["AVALON_APP"] + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) + + host_settings = project_settings.get(host_name) or {} + # Get presets for host + wb_settings = host_settings.get("workfile_builder") + if not wb_settings: + # backward compatibility + wb_settings = host_settings.get("workfile_build") or {} + + builder_profiles = wb_settings.get("profiles") + if not builder_profiles: + return None + + task_type = ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(task_name, {}) + .get("type") + ) + filter_data = { + "task_types": task_type, + "tasks": task_name + } + return filter_profiles(builder_profiles, filter_data) + + def _filter_build_profiles(self, build_profiles, loaders_by_name): + """ Filter build profiles by loaders and prepare process data. + + Valid profile must have "loaders", "families" and "repre_names" keys + with valid values. + - "loaders" expects list of strings representing possible loaders. + - "families" expects list of strings for filtering + by main subset family. + - "repre_names" expects list of strings for filtering by + representation name. + + Lowered "families" and "repre_names" are prepared for each profile with + all required keys. + + Args: + build_profiles (Dict[str, Any]): Profiles for building workfile. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Filtered and prepared profiles. + """ + + valid_profiles = [] + for profile in build_profiles: + # Check loaders + profile_loaders = profile.get("loaders") + if not profile_loaders: + self.log.warning(( + "Build profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check if any loader is available + loaders_match = False + for loader_name in profile_loaders: + if loader_name in loaders_by_name: + loaders_match = True + break + + if not loaders_match: + self.log.warning(( + "All loaders from Build profile are not available: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check families + profile_families = profile.get("families") + if not profile_families: + self.log.warning(( + "Build profile is missing families configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check representation names + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: + self.log.warning(( + "Build profile is missing" + " representation names filtering: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Prepare lowered families and representation names + profile["families_lowered"] = [ + fam.lower() for fam in profile_families + ] + profile["repre_names_lowered"] = [ + name.lower() for name in profile_repre_names + ] + + valid_profiles.append(profile) + + return valid_profiles + + def _prepare_profile_for_subsets(self, subsets, profiles): + """Select profile for each subset by it's data. + + Profiles are filtered for each subset individually. + Profile is filtered by subset's family, optionally by name regex and + representation names set in profile. + It is possible to not find matching profile for subset, in that case + subset is skipped and it is possible that none of subsets have + matching profile. + + Args: + subsets (List[Dict[str, Any]]): Subset documents. + profiles (List[Dict[str, Any]]): Build profiles. + + Returns: + Dict[str, Any]: Profile by subset's id. + """ + + # Prepare subsets + subsets_by_family = self.map_subsets_by_family(subsets) + + profiles_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for profile in profiles: + # Skip profile if does not contain family + if family_low not in profile["families_lowered"]: + continue + + # Precompile name filters as regexes + profile_regexes = profile.get("subset_name_filters") + if profile_regexes: + _profile_regexes = [] + for regex in profile_regexes: + _profile_regexes.append(re.compile(regex)) + profile_regexes = _profile_regexes + + # TODO prepare regex compilation + for subset in subsets: + # Verify regex filtering (optional) + if profile_regexes: + valid = False + for pattern in profile_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + profiles_per_subset_id[subset["_id"]] = profile + + # break profiles loop on finding the first matching profile + break + return profiles_per_subset_id + + def load_containers_by_asset_data( + self, asset_entity_data, build_profiles, loaders_by_name + ): + """Load containers for entered asset entity by Build profiles. + + Args: + asset_entity_data (Dict[str, Any]): Prepared data with subsets, + last versions and representations for specific asset. + build_profiles (Dict[str, Any]): Build profiles. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + Dict[str, Any]: Output contains asset document + and loaded containers. + """ + + # Make sure all data are not empty + if not asset_entity_data or not build_profiles or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + + valid_profiles = self._filter_build_profiles( + build_profiles, loaders_by_name + ) + if not valid_profiles: + self.log.warning( + "There are not valid Workfile profiles. Skipping process." + ) + return + + self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) + + subsets_by_id = {} + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subset_entity = in_data["subset_entity"] + subsets_by_id[subset_entity["_id"]] = subset_entity + + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = ( + version_data["repres"] + ) + + if not subsets_by_id: + self.log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + profiles_per_subset_id = self._prepare_profile_for_subsets( + subsets_by_id.values(), valid_profiles + ) + if not profiles_per_subset_id: + self.log.warning("There are not valid subsets.") + return + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, profile in profiles_per_subset_id.items(): + profile_repre_names = profile["repre_names_lowered"] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in profile_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format( + asset_entity["name"] + ) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format( + subset["name"], subset_id + ) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + self.log.debug(msg) + + containers = self._load_containers( + valid_repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ) + + return { + "asset_entity": asset_entity, + "containers": containers + } + + def _load_containers( + self, repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ): + """Real load by collected data happens here. + + Loading of representations per subset happens here. Each subset can + loads one representation. Loading is tried in specific order. + Representations are tried to load by names defined in configuration. + If subset has representation matching representation name each loader + is tried to load it until any is successful. If none of them was + successful then next representation name is tried. + Subset process loop ends when any representation is loaded or + all matching representations were already tried. + + Args: + repres_by_subset_id (Dict[str, Dict[str, Any]]): Available + representations mapped by their parent (subset) id. + subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents + mapped by their id. + profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles + mapped by subset id. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Objects of loaded containers. + """ + + loaded_containers = [] + + # Get subset id order from build presets. + build_presets = self.build_presets.get("current_context", []) + build_presets += self.build_presets.get("linked_assets", []) + subset_ids_ordered = [] + for preset in build_presets: + for preset_family in preset["families"]: + for id, subset in subsets_by_id.items(): + if preset_family not in subset["data"].get("families", []): + continue + + subset_ids_ordered.append(id) + + # Order representations from subsets. + print("repres_by_subset_id", repres_by_subset_id) + representations_ordered = [] + representations = [] + for id in subset_ids_ordered: + for subset_id, repres in repres_by_subset_id.items(): + if repres in representations: + continue + + if id == subset_id: + representations_ordered.append((subset_id, repres)) + representations.append(repres) + + print("representations", representations) + + # Load ordered representations. + for subset_id, repres in representations_ordered: + subset_name = subsets_by_id[subset_id]["name"] + + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + + repre_by_low_name = { + repre["name"].lower(): repre for repre in repres + } + + is_loaded = False + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names_lowered"] + ): + # Break iteration if representation was already loaded + if is_loaded: + break + + repre = repre_by_low_name.get(profile_repre_name) + if not repre: + continue + + for loader_idx, loader_name in enumerate(profile["loaders"]): + if is_loaded: + break + + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = load_container( + loader, + repre["_id"], + name=subset_name + ) + loaded_containers.append(container) + is_loaded = True + + except Exception as exc: + if exc == IncompatibleLoaderError: + self.log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) + + else: + self.log.error( + "Unexpected error happened during loading", + exc_info=True + ) + + msg = "Loading failed." + if loader_idx < loaders_last_idx: + msg += " Trying next loader." + elif repre_name_idx < repre_names_last_idx: + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + self.log.info(msg) + + return loaded_containers + + def _collect_last_version_repres(self, asset_docs): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_docs (List[Dict[str, Any]]): Asset entities for which + want to find data. + + Returns: + Dict[str, Any]: collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + output = {} + if not asset_docs: + return output + + asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} + + project_name = legacy_io.active_project() + subsets = list(get_subsets( + project_name, asset_ids=asset_docs_by_ids.keys() + )) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + last_version_by_subset_id = get_last_versions( + project_name, subset_entity_by_ids.keys() + ) + last_version_docs_by_id = { + version["_id"]: version + for version in last_version_by_subset_id.values() + } + repre_docs = get_representations( + project_name, version_ids=last_version_docs_by_id.keys() + ) + + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + version_doc = last_version_docs_by_id[version_id] + + subset_id = version_doc["parent"] + subset_doc = subset_entity_by_ids[subset_id] + + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset_doc, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset_doc, + "version": { + "version_entity": version_doc, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre_doc + ) + + return output diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py new file mode 100644 index 0000000000..6d9e72dbd2 --- /dev/null +++ b/openpype/pipeline/workfile/path_resolving.py @@ -0,0 +1,469 @@ +import os +import re +import copy +import platform + +from openpype.client import get_project, get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import ( + filter_profiles, + Logger, + StringTemplate, +) +from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data + + +def get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings=None +): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. + project_settings(Dict[str, Any]): Project settings for passed + 'project_name'. Not required at all but makes function faster. + """ + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) + + +def get_workfile_template_key( + task_type, host_name, project_name, project_settings=None +): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. + """ + + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + +def get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + It is possible to pass only project's name instead of project's anatomy but + one of them **must** be entered. It is preferred to enter anatomy if is + available as initialization of a new Anatomy object may be time consuming. + + Args: + workdir_data (Dict[str, Any]): Data to fill workdir template. + project_name (str): Project's name. + anatomy (Anatomy): Anatomy object for specific project. Faster + processing if is passed. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + if not template_key: + template_key = get_workfile_template_key( + workdir_data["task"]["type"], + workdir_data["app"], + workdir_data["project"]["name"], + project_settings + ) + + anatomy_filled = anatomy.format(workdir_data) + # Output is TemplateResult object which contain useful data + output = anatomy_filled[template_key]["folder"] + if output: + return output.normalized() + return output + + +def get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. In `Session` + is stored under `AVALON_APP` key. + anatomy (Anatomy): Optional argument. Anatomy object is created using + project name from `project_doc`. It is preferred to pass this + argument as initialization of a new Anatomy object may be time + consuming. + template_key (str): Key of work templates in anatomy templates. Default + value is defined in `get_workdir_with_workdir_data`. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_doc["name"]) + + workdir_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # Output is TemplateResult object which contain useful data + return get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + template_key, + project_settings + ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Usign workfile template and it's filling data find most possible last + version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. + + Args: + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + fill_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. + + Returns: + Tuple[Union[str, None], Union[int, None]]: Last workfile with version + if there is any workfile otherwise None for both. + """ + + if not os.path.exists(workdir): + return None, None + + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = ".{}".format(ext) + dotted_extensions.add(ext) + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[-1] in dotted_extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + # Escape extensions dot for regex + regex_exts = [ + "\\" + ext + for ext in dotted_extensions + ] + ext_expression = "(?:" + "|".join(regex_exts) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + data["ext"] = data["ext"].replace('.', '') + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename + + +def get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Custom workfile template can be used as first version of workfiles. + Template is a file on a disk which is set in settings. Expected settings + structure to have this feature enabled is: + project settings + |- + |- workfile_builder + |- create_first_version - a bool which must be set to 'True' + |- custom_templates - profiles based on task name/type which + points to a file which is copied as + first workfile + + It is expected that passed argument are already queried documents of + project and asset as parents of processing task name. + + Args: + project_doc (Dict[str, Any]): Project document from MongoDB. + asset_doc (Dict[str, Any]): Asset document from MongoDB. + task_name (str): Name of task for which templates are filtered. + host_name (str): Name of host. + anatomy (Anatomy): Optionally passed anatomy object for passed project + name. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. Existence of formatted path is not validated. + None: If no profile is matching context. + """ + + log = Logger.get_logger("CustomWorkfileResolve") + + project_name = project_doc["name"] + if project_settings is None: + project_settings = get_project_settings(project_name) + + host_settings = project_settings.get(host_name) + if not host_settings: + log.info("Host \"{}\" doesn't have settings".format(host_name)) + return None + + workfile_builder_settings = host_settings.get("workfile_builder") + if not workfile_builder_settings: + log.info(( + "Seems like old version of settings is used." + " Can't access custom templates in host \"{}\"." + ).format(host_name)) + return + + if not workfile_builder_settings["create_first_version"]: + log.info(( + "Project \"{}\" has turned off to create first workfile for" + " host \"{}\"" + ).format(project_name, host_name)) + return + + # Backwards compatibility + template_profiles = workfile_builder_settings.get("custom_templates") + if not template_profiles: + log.info( + "Custom templates are not filled. Skipping template copy." + ) + return + + if anatomy is None: + anatomy = Anatomy(project_name) + + # get project, asset, task anatomy context data + anatomy_context_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # add root dict + anatomy_context_data["root"] = anatomy.roots + + # get task type for the task in context + current_task_type = anatomy_context_data["task"]["type"] + + # get path from matching profile + matching_item = filter_profiles( + template_profiles, + {"task_types": current_task_type} + ) + # when path is available try to format it in case + # there are some anatomy template strings + if matching_item: + # extend anatomy context with os.environ to + # also allow formatting against env + full_context_data = os.environ.copy() + full_context_data.update(anatomy_context_data) + + template = matching_item["path"][platform.system().lower()] + return StringTemplate.format_strict_template( + template, full_context_data + ).normalized() + + return None + + +def get_custom_workfile_template_by_string_context( + project_name, + asset_name, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Passed context are string representations of project, asset and task. + Function will query documents of project and asset to be able use + `get_custom_workfile_template` for rest of logic. + + Args: + project_name(str): Project name. + asset_name(str): Asset name. + task_name(str): Task name. + host_name (str): Name of host. + anatomy(Anatomy): Optionally prepared anatomy object for passed + project. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + None: If no profile is matching context. + """ + + project_doc = get_project(project_name) + asset_doc = get_asset_by_name(project_name, asset_name) + + return get_custom_workfile_template( + project_doc, asset_doc, task_name, host_name, anatomy, project_settings + ) diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 7585ea4c59..f6e1d4f06b 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -4,10 +4,10 @@ from collections import defaultdict from Qt import QtWidgets, QtCore, QtGui from openpype.client import get_representations -from openpype.lib import config from openpype.pipeline import load, Anatomy from openpype import resources, style +from openpype.lib.dateutils import get_datetime_data from openpype.lib.delivery import ( sizeof_fmt, path_from_representation, @@ -160,7 +160,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): selected_repres = self._get_selected_repres() - datetime_data = config.get_datetime_data() + datetime_data = get_datetime_data() template_name = self.dropdown.currentText() format_dict = get_format_dict(self.anatomy, self.root_line_edit.text()) for repre in self._representations: diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 0794adfb67..8433816908 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -15,10 +15,8 @@ Provides: import json import pyblish.api -from openpype.lib import ( - get_system_general_anatomy_data -) from openpype.pipeline import legacy_io +from openpype.pipeline.template_data import get_template_data class CollectAnatomyContextData(pyblish.api.ContextPlugin): @@ -33,11 +31,15 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): "asset": "AssetName", "hierarchy": "path/to/asset", "task": "Working", + "user": "MeDespicable", + # Duplicated entry "username": "MeDespicable", + # Current host name + "app": "maya" + *** OPTIONAL *** - "app": "maya" # Current application base name - + mutliple keys from `datetimeData` # see it's collector + + mutliple keys from `datetimeData` (See it's collector) } """ @@ -45,52 +47,26 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): label = "Collect Anatomy Context Data" def process(self, context): + host_name = context.data["hostName"] + system_settings = context.data["system_settings"] project_entity = context.data["projectEntity"] - context_data = { - "project": { - "name": project_entity["name"], - "code": project_entity["data"].get("code") - }, - "username": context.data["user"], - "app": context.data["hostName"] - } - - context.data["anatomyData"] = context_data - - # add system general settings anatomy data - system_general_data = get_system_general_anatomy_data() - context_data.update(system_general_data) - - datetime_data = context.data.get("datetimeData") or {} - context_data.update(datetime_data) - asset_entity = context.data.get("assetEntity") + task_name = None if asset_entity: task_name = legacy_io.Session["AVALON_TASK"] - asset_tasks = asset_entity["data"]["tasks"] - task_type = asset_tasks.get(task_name, {}).get("type") + anatomy_data = get_template_data( + project_entity, asset_entity, task_name, host_name, system_settings + ) + anatomy_data.update(context.data.get("datetimeData") or {}) - project_task_types = project_entity["config"]["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") + username = context.data["user"] + anatomy_data["user"] = username + # Backwards compatibility for 'username' key + anatomy_data["username"] = username - asset_parents = asset_entity["data"]["parents"] - hierarchy = "/".join(asset_parents) - - parent_name = project_entity["name"] - if asset_parents: - parent_name = asset_parents[-1] - - context_data.update({ - "asset": asset_entity["name"], - "parent": parent_name, - "hierarchy": hierarchy, - "task": { - "name": task_name, - "type": task_type, - "short": task_code, - } - }) + # Store + context.data["anatomyData"] = anatomy_data self.log.info("Global anatomy Data collected") - self.log.debug(json.dumps(context_data, indent=4)) + self.log.debug(json.dumps(anatomy_data, indent=4)) diff --git a/openpype/plugins/publish/collect_anatomy_object.py b/openpype/plugins/publish/collect_anatomy_object.py index b1415098b6..725cae2b14 100644 --- a/openpype/plugins/publish/collect_anatomy_object.py +++ b/openpype/plugins/publish/collect_anatomy_object.py @@ -1,29 +1,32 @@ """Collect Anatomy object. Requires: - os.environ -> AVALON_PROJECT + context -> projectName Provides: context -> anatomy (openpype.pipeline.anatomy.Anatomy) """ -import os + import pyblish.api -from openpype.pipeline import Anatomy +from openpype.pipeline import Anatomy, KnownPublishError class CollectAnatomyObject(pyblish.api.ContextPlugin): - """Collect Anatomy object into Context""" + """Collect Anatomy object into Context. + + Order offset could be changed to '-0.45'. + """ order = pyblish.api.CollectorOrder - 0.4 label = "Collect Anatomy Object" def process(self, context): - project_name = os.environ.get("AVALON_PROJECT") + project_name = context.data.get("projectName") if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` is not set." + raise KnownPublishError(( + "Project name is not set in 'projectName'." "Could not initialize project's Anatomy." - ) + )) context.data["anatomy"] = Anatomy(project_name) diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index 6cd0d136e8..3b05b6ae98 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -1,35 +1,38 @@ """Collect Anatomy and global anatomy data. Requires: - session -> AVALON_PROJECT, AVALON_ASSET + session -> AVALON_ASSET + context -> projectName Provides: - context -> projectEntity - project entity from database - context -> assetEntity - asset entity from database + context -> projectEntity - Project document from database. + context -> assetEntity - Asset document from database only if 'asset' is + set in context. """ import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectAvalonEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context""" + """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Avalon Entities" def process(self, context): legacy_io.install() - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] project_entity = get_project(project_name) - assert project_entity, ( - "Project '{0}' was not found." - ).format(project_name) + if not project_entity: + raise KnownPublishError( + "Project '{0}' was not found.".format(project_name) + ) self.log.debug("Collected Project \"{}\"".format(project_entity)) context.data["projectEntity"] = project_entity diff --git a/openpype/plugins/publish/collect_context_label.py b/openpype/plugins/publish/collect_context_label.py index 8cf71882aa..6cdeba8418 100644 --- a/openpype/plugins/publish/collect_context_label.py +++ b/openpype/plugins/publish/collect_context_label.py @@ -1,5 +1,6 @@ """ -Requires: +Optional: + context -> hostName (str) context -> currentFile (str) Provides: context -> label (str) @@ -16,16 +17,27 @@ class CollectContextLabel(pyblish.api.ContextPlugin): label = "Context Label" def process(self, context): + # Add ability to use custom context label + label = context.data.get("label") + if label: + self.log.debug("Context label is already set to \"{}\"".format( + label + )) + return - # Get last registered host - host = pyblish.api.registered_hosts()[-1] + host_name = context.data.get("hostName") + if not host_name: + host_name = pyblish.api.registered_hosts()[-1] + # Use host name as base for label + label = host_name.title() - # Get scene name from "currentFile" - path = context.data.get("currentFile") or "" - base = os.path.basename(path) + # Get scene name from "currentFile" and use basename as ending of label + path = context.data.get("currentFile") + if path: + label += " - {}".format(os.path.basename(path)) # Set label - label = "{host} - {scene}".format(host=host.title(), scene=base) - if host == "standalonepublisher": - label = host.title() context.data["label"] = label + self.log.debug("Context label is changed to \"{}\"".format( + label + )) diff --git a/openpype/plugins/publish/collect_current_context.py b/openpype/plugins/publish/collect_current_context.py new file mode 100644 index 0000000000..7e42700d7d --- /dev/null +++ b/openpype/plugins/publish/collect_current_context.py @@ -0,0 +1,47 @@ +""" +Provides: + context -> projectName (str) + context -> asset (str) + context -> task (str) +""" + +import pyblish.api +from openpype.pipeline import legacy_io + + +class CollectCurrentContext(pyblish.api.ContextPlugin): + """Collect project context into publish context data. + + Plugin does not override any value if is already set. + """ + + order = pyblish.api.CollectorOrder - 0.5 + label = "Collect Current context" + + def process(self, context): + # Make sure 'legacy_io' is intalled + legacy_io.install() + + # Check if values are already set + project_name = context.data.get("projectName") + asset_name = context.data.get("asset") + task_name = context.data.get("task") + if not project_name: + project_name = legacy_io.current_project() + context.data["projectName"] = project_name + + if not asset_name: + asset_name = legacy_io.Session.get("AVALON_ASSET") + context.data["asset"] = asset_name + + if not task_name: + task_name = legacy_io.Session.get("AVALON_TASK") + context.data["task"] = task_name + + # QUESTION should we be explicit with keys? (the same on instances) + # - 'asset' -> 'assetName' + # - 'task' -> 'taskName' + + self.log.info(( + "Collected project context\nProject: {}\nAsset: {}\nTask: {}" + ).format(project_name, asset_name, task_name)) diff --git a/openpype/plugins/publish/collect_datetime_data.py b/openpype/plugins/publish/collect_datetime_data.py index 1675ae1a98..b3178ca3d2 100644 --- a/openpype/plugins/publish/collect_datetime_data.py +++ b/openpype/plugins/publish/collect_datetime_data.py @@ -5,14 +5,14 @@ Provides: """ import pyblish.api -from openpype.api import config +from openpype.lib.dateutils import get_datetime_data class CollectDateTimeData(pyblish.api.ContextPlugin): - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 label = "Collect DateTime data" def process(self, context): key = "datetimeData" if key not in context.data: - context.data[key] = config.get_datetime_data() + context.data[key] = get_datetime_data() diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index d2be633cbe..9236c698ed 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -19,6 +19,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if not create_context: return + project_name = create_context.project_name + if project_name: + context.data["projectName"] = project_name for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: @@ -44,7 +47,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): "subset": subset, "asset": in_data["asset"], "task": in_data["task"], - "label": subset, + "label": in_data.get("label") or subset, "name": subset, "family": in_data["family"], "families": instance_families, diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 91d5162d62..687397be8a 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -1,7 +1,5 @@ import pyblish.api -from openpype.pipeline import legacy_io - class CollectHierarchy(pyblish.api.ContextPlugin): """Collecting hierarchy from `parents`. @@ -20,7 +18,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): def process(self, context): temp_context = {} - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] final_context = {} final_context[project_name] = {} final_context[project_name]['entity_type'] = 'Project' diff --git a/openpype/plugins/publish/collect_machine_name.py b/openpype/plugins/publish/collect_machine_name.py index 72ef68f8ed..8c25966031 100644 --- a/openpype/plugins/publish/collect_machine_name.py +++ b/openpype/plugins/publish/collect_machine_name.py @@ -11,7 +11,7 @@ import pyblish.api class CollectMachineName(pyblish.api.ContextPlugin): label = "Local Machine Name" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 hosts = ["*"] def process(self, context): diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index 2f6cb1ef0e..d76096bcd9 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder - 0.45 + order = pyblish.api.CollectorOrder - 0.5 label = "OpenPype Modules" def process(self, context): diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index c86e777850..40e89e29bc 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -23,7 +23,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): label = "Collect OTIO Frame Ranges" order = pyblish.api.CollectorOrder - 0.08 families = ["shot", "clip"] - hosts = ["resolve", "hiero", "flame"] + hosts = ["resolve", "hiero", "flame", "traypublisher"] def process(self, instance): # get basic variables diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index fc6a9b50f2..3387cd1176 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -116,13 +116,13 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): # check in two way if it is sequence if hasattr(otio.schema, "ImageSequenceReference"): # for OpenTimelineIO 0.13 and newer - if isinstance(media_ref, - otio.schema.ImageSequenceReference): - is_sequence = True - else: - # for OpenTimelineIO 0.12 and older - if metadata.get("padding"): + if isinstance( + media_ref, + otio.schema.ImageSequenceReference + ): is_sequence = True + elif metadata.get("padding"): + is_sequence = True self.log.info( "frame_start-frame_end: {}-{}".format(frame_start, frame_end)) @@ -139,11 +139,9 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): padding=media_ref.frame_zero_padding ) collection.indexes.update( - [i for i in range(a_frame_start_h, (a_frame_end_h + 1))]) + list(range(a_frame_start_h, (a_frame_end_h + 1))) + ) - self.log.debug(collection) - repre = self._create_representation( - frame_start, frame_end, collection=collection) else: # in case it is file sequence but not new OTIO schema # `ImageSequenceReference` @@ -152,9 +150,9 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): path, trimmed_media_range_h, metadata) self.staging_dir, collection = collection_data - self.log.debug(collection) - repre = self._create_representation( - frame_start, frame_end, collection=collection) + self.log.debug(collection) + repre = self._create_representation( + frame_start, frame_end, collection=collection) else: _trim = False dirname, filename = os.path.split(media_ref.target_url) @@ -198,7 +196,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): if kwargs.get("collection"): collection = kwargs.get("collection") - files = [f for f in collection] + files = list(collection) ext = collection.format("{tail}") representation_data.update({ "name": ext[1:], @@ -220,7 +218,5 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): }) if kwargs.get("trim") is True: - representation_data.update({ - "tags": ["trim"] - }) + representation_data["tags"] = ["trim"] return representation_data diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 670e57ed10..8f8d0a5eeb 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -1,7 +1,7 @@ """Loads publishing context from json and continues in publish process. Requires: - anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.4) Provides: context, instances -> All data from previous publishing process. @@ -12,7 +12,7 @@ import json import pyblish.api -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectRenderedFiles(pyblish.api.ContextPlugin): @@ -20,7 +20,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): This collector will try to find json files in provided `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + Note: + We should split this collector and move the part which handle reading + of file and it's context from session data before collect anatomy + and instance creation dependent on anatomy can be done here. """ + order = pyblish.api.CollectorOrder - 0.2 # Keep "filesequence" for backwards compatibility of older jobs targets = ["filesequence", "farm"] @@ -118,23 +123,20 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): def process(self, context): self._context = context - assert os.environ.get("OPENPYPE_PUBLISH_DATA"), ( - "Missing `OPENPYPE_PUBLISH_DATA`") + if not os.environ.get("OPENPYPE_PUBLISH_DATA"): + raise KnownPublishError("Missing `OPENPYPE_PUBLISH_DATA`") + + # QUESTION + # Do we support (or want support) multiple files in the variable? + # - what if they have different context? paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) - project_name = os.environ.get("AVALON_PROJECT") - if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` was not found." - "Could not set project `root` which may cause issues." - ) - - # TODO root filling should happen after collect Anatomy + # Using already collected Anatomy + anatomy = context.data["anatomy"] self.log.info("Getting root setting for project \"{}\"".format( - project_name + anatomy.project_name )) - anatomy = context.data["anatomy"] self.log.info("anatomy: {}".format(anatomy.roots)) try: session_is_set = False diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 8bdf70b529..00f65b8b67 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -13,8 +13,6 @@ import copy import pyblish.api -from openpype.pipeline import legacy_io - class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" @@ -58,7 +56,6 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "effect", "staticMesh", "skeletalMesh" - ] def process(self, instance): @@ -86,11 +83,10 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled["publish"]["path"] # Directory diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 8d447ba595..6b4e5f48c5 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -30,9 +30,15 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) self.project = None - self.import_to_avalon(project_name, hierarchy_context) + self.import_to_avalon(context, project_name, hierarchy_context) - def import_to_avalon(self, project_name, input_data, parent=None): + def import_to_avalon( + self, + context, + project_name, + input_data, + parent=None, + ): for name in input_data: self.log.info("input_data[name]: {}".format(input_data[name])) entity_data = input_data[name] @@ -127,12 +133,19 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if unarchive_entity is None: # Create entity if doesn"t exist entity = self.create_avalon_asset( - project_name, name, data + name, data ) else: # Unarchive if entity was archived entity = self.unarchive_entity(unarchive_entity, data) + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + entity + ) + if update_data: # Update entity data with input data legacy_io.update_many( @@ -142,7 +155,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if "childs" in entity_data: self.import_to_avalon( - project_name, entity_data["childs"], entity + context, project_name, entity_data["childs"], entity ) def unarchive_entity(self, entity, data): @@ -159,20 +172,52 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): {"_id": entity["_id"]}, new_entity ) + return new_entity - def create_avalon_asset(self, project_name, name, data): - item = { + def create_avalon_asset(self, name, data): + asset_doc = { "schema": "openpype:asset-3.0", "name": name, "parent": self.project["_id"], "type": "asset", "data": data } - self.log.debug("Creating asset: {}".format(item)) - entity_id = legacy_io.insert_one(item).inserted_id + self.log.debug("Creating asset: {}".format(asset_doc)) + asset_doc["_id"] = legacy_io.insert_one(asset_doc).inserted_id - return get_asset_by_id(project_name, entity_id) + return asset_doc + + def _set_avalon_data_to_relative_instances( + self, + context, + project_name, + asset_doc + ): + for instance in context: + # Skip instance if has filled asset entity + if instance.data.get("assetEntity"): + continue + asset_name = asset_doc["name"] + inst_asset_name = instance.data["asset"] + + if asset_name == inst_asset_name: + instance.data["assetEntity"] = asset_doc + + # get parenting data + parents = asset_doc["data"].get("parents") or list() + + # equire only relative parent + parent_name = project_name + if parents: + parent_name = parents[-1] + + # update avalon data on instance + instance.data["anatomyData"].update({ + "hierarchy": "/".join(parents), + "task": {}, + "parent": parent_name + }) def _get_active_assets(self, context): """ Returns only asset dictionary. diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py index 00c1748cdc..ed30a2f0f5 100644 --- a/openpype/plugins/publish/extract_otio_audio_tracks.py +++ b/openpype/plugins/publish/extract_otio_audio_tracks.py @@ -57,15 +57,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): audio_inputs.insert(0, empty) # create cmd - cmd = path_to_subprocess_arg(self.ffmpeg_path) + " " - cmd += self.create_cmd(audio_inputs) - cmd += path_to_subprocess_arg(audio_temp_fpath) - - # run subprocess - self.log.debug("Executing: {}".format(cmd)) - openpype.api.run_subprocess( - cmd, shell=True, logger=self.log - ) + self.mix_audio(audio_inputs, audio_temp_fpath) # remove empty os.remove(empty["mediaPath"]) @@ -245,46 +237,80 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): "durationSec": max_duration_sec } - def create_cmd(self, inputs): + def mix_audio(self, audio_inputs, audio_temp_fpath): """Creating multiple input cmd string Args: - inputs (list): list of input dicts. Order mater. + audio_inputs (list): list of input dicts. Order mater. Returns: str: the command body - """ + + longest_input = 0 + for audio_input in audio_inputs: + audio_len = audio_input["durationSec"] + if audio_len > longest_input: + longest_input = audio_len + # create cmd segments - _inputs = "" - _filters = "-filter_complex \"" - _channels = "" - for index, input in enumerate(inputs): - input_format = input.copy() - input_format.update({"i": index}) - input_format["mediaPath"] = path_to_subprocess_arg( - input_format["mediaPath"] + input_args = [] + filters = [] + tag_names = [] + for index, audio_input in enumerate(audio_inputs): + input_args.extend([ + "-ss", str(audio_input["startSec"]), + "-t", str(audio_input["durationSec"]), + "-i", audio_input["mediaPath"] + ]) + + # Output tag of a filtered audio input + tag_name = "[r{}]".format(index) + tag_names.append(tag_name) + # Delay in audio by delay in item + filters.append("[{}]adelay={}:all=1{}".format( + index, audio_input["delayMilSec"], tag_name + )) + + # Mixing filter + # - dropout transition (when audio will get loader) is set to be + # higher then any input audio item + # - volume is set to number of inputs - each mix adds 1/n volume + # where n is input inder (to get more info read ffmpeg docs and + # send a giftcard to contributor) + filters.append( + ( + "{}amix=inputs={}:duration=first:" + "dropout_transition={},volume={}[a]" + ).format( + "".join(tag_names), + len(audio_inputs), + (longest_input * 1000) + 1000, + len(audio_inputs), ) + ) - _inputs += ( - "-ss {startSec} " - "-t {durationSec} " - "-i {mediaPath} " - ).format(**input_format) + # Store filters to a file (separated by ',') + # - this is to avoid "too long" command issue in ffmpeg + with tempfile.NamedTemporaryFile( + delete=False, mode="w", suffix=".txt" + ) as tmp_file: + filters_tmp_filepath = tmp_file.name + tmp_file.write(",".join(filters)) - _filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format( - **input_format) - _channels += "[r{}]".format(index) + args = [self.ffmpeg_path] + args.extend(input_args) + args.extend([ + "-filter_complex_script", filters_tmp_filepath, + "-map", "[a]" + ]) + args.append(audio_temp_fpath) - # merge all cmd segments together - cmd = _inputs + _filters + _channels - cmd += str( - "amix=inputs={inputs}:duration=first:" - "dropout_transition=1000,volume={inputs}[a]\" " - ).format(inputs=len(inputs)) - cmd += "-map \"[a]\" " + # run subprocess + self.log.debug("Executing: {}".format(args)) + openpype.api.run_subprocess(args, logger=self.log) - return cmd + os.remove(filters_tmp_filepath) def create_temp_file(self, name): """Create temp wav file diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index 3bd217d5d4..4d310ce109 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -12,7 +12,7 @@ class ExtractOTIOFile(openpype.api.Extractor): label = "Extract OTIO file" order = pyblish.api.ExtractorOrder - 0.45 families = ["workfile"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "traypublisher"] def process(self, instance): # create representation data diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1b6e2a1d61..27117510b2 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -360,6 +360,7 @@ class ExtractReview(pyblish.api.InstancePlugin): os.unlink(f) new_repre.update({ + "fps": temp_data["fps"], "name": "{}_{}".format(output_name, output_ext), "outputName": output_name, "outputDef": output_def, @@ -1209,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin): # Get instance data pixel_aspect = temp_data["pixel_aspect"] - if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " @@ -1229,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # - settings value can't have None but has value of 0 output_width = output_def.get("width") or output_width or None output_height = output_def.get("height") or output_height or None + # Force to use input resolution if output resolution was not defined + # in settings. Resolution from instance is not used when + # 'use_input_res' is set to 'True'. + use_input_res = False # Overscal color overscan_color_value = "black" @@ -1240,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin): ) self.log.debug("Overscan color: `{}`".format(overscan_color_value)) + # Scale input to have proper pixel aspect ratio + # - scale width by the pixel aspect ratio + scale_pixel_aspect = output_def.get("scale_pixel_aspect", True) + if scale_pixel_aspect and pixel_aspect != 1: + # Change input width after pixel aspect + input_width = int(input_width * pixel_aspect) + use_input_res = True + filters.append(( + "scale={}x{}:flags=lanczos".format(input_width, input_height) + )) + # Convert overscan value video filters overscan_crop = output_def.get("overscan_crop") overscan = OverscanCrop( @@ -1250,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # resolution by it's values if overscan_crop_filters: filters.extend(overscan_crop_filters) + # Change input resolution after overscan crop input_width = overscan.width() input_height = overscan.height() - # Use output resolution as inputs after cropping to skip usage of - # instance data resolution - if output_width is None or output_height is None: - output_width = input_width - output_height = input_height + use_input_res = True # Make sure input width and height is not an odd number input_width_is_odd = bool(input_width % 2 != 0) @@ -1282,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("input_width: `{}`".format(input_width)) self.log.debug("input_height: `{}`".format(input_height)) - # Use instance resolution if output definition has not set it. - if output_width is None or output_height is None: + # Use instance resolution if output definition has not set it + # - use instance resolution only if there were not scale changes + # that may massivelly affect output 'use_input_res' + if not use_input_res and output_width is None or output_height is None: output_width = temp_data["resolution_width"] output_height = temp_data["resolution_height"] @@ -1325,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_width == input_width and output_height == input_height and not letter_box_enabled - and pixel_aspect == 1 ): self.log.debug( "Output resolution is same as input's" @@ -1335,66 +1348,16 @@ class ExtractReview(pyblish.api.InstancePlugin): new_repre["resolutionHeight"] = input_height return filters - # defining image ratios - input_res_ratio = ( - (float(input_width) * pixel_aspect) / input_height - ) - output_res_ratio = float(output_width) / float(output_height) - self.log.debug("input_res_ratio: `{}`".format(input_res_ratio)) - self.log.debug("output_res_ratio: `{}`".format(output_res_ratio)) - - # Round ratios to 2 decimal places for comparing - input_res_ratio = round(input_res_ratio, 2) - output_res_ratio = round(output_res_ratio, 2) - - # get scale factor - scale_factor_by_width = ( - float(output_width) / (input_width * pixel_aspect) - ) - scale_factor_by_height = ( - float(output_height) / input_height - ) - - self.log.debug( - "scale_factor_by_with: `{}`".format(scale_factor_by_width) - ) - self.log.debug( - "scale_factor_by_height: `{}`".format(scale_factor_by_height) - ) - # scaling none square pixels and 1920 width - if ( - input_height != output_height - or input_width != output_width - or pixel_aspect != 1 - ): - if input_res_ratio < output_res_ratio: - self.log.debug( - "Input's resolution ratio is lower then output's" - ) - width_scale = int(input_width * scale_factor_by_height) - width_half_pad = int((output_width - width_scale) / 2) - height_scale = output_height - height_half_pad = 0 - else: - self.log.debug("Input is heigher then output") - width_scale = output_width - width_half_pad = 0 - height_scale = int(input_height * scale_factor_by_width) - height_half_pad = int((output_height - height_scale) / 2) - - self.log.debug("width_scale: `{}`".format(width_scale)) - self.log.debug("width_half_pad: `{}`".format(width_half_pad)) - self.log.debug("height_scale: `{}`".format(height_scale)) - self.log.debug("height_half_pad: `{}`".format(height_half_pad)) - + if input_height != output_height or input_width != output_width: filters.extend([ - "scale={}x{}:flags=lanczos".format( - width_scale, height_scale - ), - "pad={}:{}:{}:{}:{}".format( + ( + "scale={}x{}" + ":flags=lanczos" + ":force_original_aspect_ratio=decrease" + ).format(output_width, output_height), + "pad={}:{}:(ow-iw)/2:(oh-ih)/2:{}".format( output_width, output_height, - width_half_pad, height_half_pad, overscan_color_value ), "setsar=1" @@ -1496,6 +1459,8 @@ class ExtractReview(pyblish.api.InstancePlugin): output = -1 regexes = self.compile_list_of_regexes(in_list) for regex in regexes: + if not value: + continue if re.match(regex, value): output = 1 break diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 28685c2e90..69043ee261 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -285,36 +285,34 @@ class ExtractReviewSlate(openpype.api.Extractor): audio_channels, audio_sample_rate, audio_channel_layout, + input_frame_rate ) # replace slate with silent slate for concat slate_v_path = slate_silent_path - # create ffmpeg concat text file path - conc_text_file = input_file.replace(ext, "") + "_concat" + ".txt" - conc_text_path = os.path.join( - os.path.normpath(stagingdir), conc_text_file) - _remove_at_end.append(conc_text_path) - self.log.debug("__ conc_text_path: {}".format(conc_text_path)) - - new_line = "\n" - with open(conc_text_path, "w") as conc_text_f: - conc_text_f.writelines([ - "file {}".format( - slate_v_path.replace("\\", "/")), - new_line, - "file {}".format(input_path.replace("\\", "/")) - ]) - - # concat slate and videos together + # concat slate and videos together with concat filter + # this will reencode the output + if input_audio: + fmap = [ + "-filter_complex", + "[0:v] [0:a] [1:v] [1:a] concat=n=2:v=1:a=1 [v] [a]", + "-map", '[v]', + "-map", '[a]' + ] + else: + fmap = [ + "-filter_complex", + "[0:v] [1:v] concat=n=2:v=1:a=0 [v]", + "-map", '[v]' + ] concat_args = [ ffmpeg_path, "-y", - "-f", "concat", - "-safe", "0", - "-i", conc_text_path, - "-c", "copy", + "-i", slate_v_path, + "-i", input_path, ] + concat_args.extend(fmap) if offset_timecode: concat_args.extend(["-timecode", offset_timecode]) # NOTE: Added because of OP Atom demuxers @@ -322,12 +320,18 @@ class ExtractReviewSlate(openpype.api.Extractor): # - keep format of output if format_args: concat_args.extend(format_args) + + if codec_args: + concat_args.extend(codec_args) + # Use arguments from ffmpeg preset source_ffmpeg_cmd = repre.get("ffmpeg_cmd") if source_ffmpeg_cmd: copy_args = ( "-metadata", "-metadata:s:v:0", + "-b:v", + "-b:a", ) args = source_ffmpeg_cmd.split(" ") for indx, arg in enumerate(args): @@ -335,12 +339,14 @@ class ExtractReviewSlate(openpype.api.Extractor): concat_args.append(arg) # assumes arg has one parameter concat_args.append(args[indx + 1]) + # add final output path concat_args.append(output_path) # ffmpeg concat subprocess self.log.debug( - "Executing concat: {}".format(" ".join(concat_args)) + "Executing concat filter: {}".format + (" ".join(concat_args)) ) openpype.api.run_subprocess( concat_args, logger=self.log @@ -488,9 +494,10 @@ class ExtractReviewSlate(openpype.api.Extractor): audio_channels, audio_sample_rate, audio_channel_layout, + input_frame_rate ): # Get duration of one frame in micro seconds - items = audio_sample_rate.split("/") + items = input_frame_rate.split("/") if len(items) == 1: one_frame_duration = 1.0 / float(items[0]) elif len(items) == 2: diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 7933595b89..14b43beae8 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -1,4 +1,5 @@ import os +import tempfile import pyblish.api from openpype.lib import ( @@ -8,8 +9,6 @@ from openpype.lib import ( run_subprocess, path_to_subprocess_arg, - - execute, ) @@ -20,7 +19,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder families = [ "imagesequence", "render", "render2d", "prerender", - "source", "plate", "take" + "source", "clip", "take" ] hosts = ["shell", "fusion", "resolve", "traypublisher"] enabled = False @@ -29,7 +28,27 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): ffmpeg_args = None def process(self, instance): - self.log.info("subset {}".format(instance.data['subset'])) + subset_name = instance.data["subset"] + instance_repres = instance.data.get("representations") + if not instance_repres: + self.log.debug(( + "Instance {} does not have representations. Skipping" + ).format(subset_name)) + return + + self.log.info( + "Processing instance with subset name {}".format(subset_name) + ) + + # Skip if instance have 'review' key in data set to 'False' + if not self._is_review_instance(instance): + self.log.info("Skipping - no review set on instance.") + return + + # Check if already has thumbnail created + if self._already_has_thumbnail(instance_repres): + self.log.info("Thumbnail representation already present.") + return # skip crypto passes. # TODO: This is just a quick fix and has its own side-effects - it is @@ -37,20 +56,29 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # This must be solved properly, maybe using tags on # representation that can be determined much earlier and # with better precision. - if 'crypto' in instance.data['subset'].lower(): + if "crypto" in subset_name.lower(): self.log.info("Skipping crypto passes.") return - # Skip if review not set. - if not instance.data.get("review", True): - self.log.info("Skipping - no review set on instance.") - return - - if self._already_has_thumbnail(instance): - self.log.info("Thumbnail representation already present.") - return - filtered_repres = self._get_filtered_repres(instance) + if not filtered_repres: + self.log.info(( + "Instance don't have representations" + " that can be used as source for thumbnail. Skipping" + )) + return + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) + + thumbnail_created = False + oiio_supported = is_oiio_supported() for repre in filtered_repres: repre_files = repre["files"] if not isinstance(repre_files, (list, tuple)): @@ -59,41 +87,43 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): file_index = int(float(len(repre_files)) * 0.5) input_file = repre_files[file_index] - stagingdir = os.path.normpath(repre["stagingDir"]) - - full_input_path = os.path.join(stagingdir, input_file) + src_staging = os.path.normpath(repre["stagingDir"]) + full_input_path = os.path.join(src_staging, input_file) self.log.info("input {}".format(full_input_path)) filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + jpeg_file = filename + ".jpg" + full_output_path = os.path.join(dst_staging, jpeg_file) - thumbnail_created = False - # Try to use FFMPEG if OIIO is not supported (for cases when - # oiiotool isn't available) - if not is_oiio_supported(): - thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa - else: + if oiio_supported: + self.log.info("Trying to convert with OIIO") # If the input can read by OIIO then use OIIO method for # conversion otherwise use ffmpeg - self.log.info("Trying to convert with OIIO") # noqa - thumbnail_created = self.create_thumbnail_oiio(full_input_path, full_output_path) # noqa + thumbnail_created = self.create_thumbnail_oiio( + full_input_path, full_output_path + ) - if not thumbnail_created: - self.log.info("Converting with FFMPEG because input can't be read by OIIO.") # noqa - thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa - - # Skip the rest of the process if the thumbnail wasn't created + # Try to use FFMPEG if OIIO is not supported or for cases when + # oiiotool isn't available if not thumbnail_created: - self.log.warning("Thumbanil has not been created.") - return + if oiio_supported: + self.log.info(( + "Converting with FFMPEG because input" + " can't be read by OIIO." + )) + + thumbnail_created = self.create_thumbnail_ffmpeg( + full_input_path, full_output_path + ) + + # Skip representation and try next one if wasn't created + if not thumbnail_created: + continue new_repre = { "name": "thumbnail", "ext": "jpg", "files": jpeg_file, - "stagingDir": stagingdir, + "stagingDir": dst_staging, "thumbnail": True, "tags": ["thumbnail"] } @@ -106,12 +136,21 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # There is no need to create more then one thumbnail break - def _already_has_thumbnail(self, instance): - for repre in instance.data.get("representations", []): + if not thumbnail_created: + self.log.warning("Thumbanil has not been created.") + + def _is_review_instance(self, instance): + # TODO: We should probably handle "not creating" of thumbnail + # other way then checking for "review" key on instance data? + if instance.data.get("review", True): + return True + return False + + def _already_has_thumbnail(self, repres): + for repre in repres: self.log.info("repre {}".format(repre)) if repre["name"] == "thumbnail": return True - return False def _get_filtered_repres(self, instance): @@ -136,12 +175,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): def create_thumbnail_oiio(self, src_path, dst_path): self.log.info("outputting {}".format(dst_path)) oiio_tool_path = get_oiio_tools_path() - oiio_cmd = [oiio_tool_path, "-a", - src_path, "-o", - dst_path - ] - subprocess_exr = " ".join(oiio_cmd) - self.log.info(f"running: {subprocess_exr}") + oiio_cmd = [ + oiio_tool_path, + "-a", src_path, + "-o", dst_path + ] + self.log.info("running: {}".format(" ".join(oiio_cmd))) try: run_subprocess(oiio_cmd, logger=self.log) return True diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py similarity index 74% rename from openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py rename to openpype/plugins/publish/extract_trim_video_audio.py index 51dc84e9a2..06817c4b5a 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -14,7 +14,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): # must be before `ExtractThumbnailSP` order = pyblish.api.ExtractorOrder - 0.01 label = "Extract Trim Video/Audio" - hosts = ["standalonepublisher"] + hosts = ["standalonepublisher", "traypublisher"] families = ["clip", "trimming"] # make sure it is enabled only if at least both families are available @@ -40,6 +40,21 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): fps = instance.data["fps"] video_file_path = instance.data["editorialSourcePath"] extensions = instance.data.get("extensions", ["mov"]) + output_file_type = instance.data.get("outputFileType") + reviewable = "review" in instance.data["families"] + + frame_start = int(instance.data["frameStart"]) + frame_end = int(instance.data["frameEnd"]) + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + + clip_start_h = float(instance.data["clipInH"]) + _dur = instance.data["clipDuration"] + handle_dur = (handle_start + handle_end) + clip_dur_h = float(_dur + handle_dur) + + if output_file_type: + extensions = [output_file_type] for ext in extensions: self.log.info("Processing ext: `{}`".format(ext)) @@ -49,16 +64,10 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): clip_trimed_path = os.path.join( staging_dir, instance.data["name"] + ext) - # # check video file metadata - # input_data = plib.get_ffprobe_streams(video_file_path)[0] - # self.log.debug(f"__ input_data: `{input_data}`") - - start = float(instance.data["clipInH"]) - dur = float(instance.data["clipDurationH"]) if ext == ".wav": # offset time as ffmpeg is having bug - start += 0.5 + clip_start_h += 0.5 # remove "review" from families instance.data["families"] = [ fml for fml in instance.data["families"] @@ -67,9 +76,9 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): ffmpeg_args = [ ffmpeg_path, - "-ss", str(start / fps), + "-ss", str(clip_start_h / fps), "-i", video_file_path, - "-t", str(dur / fps) + "-t", str(clip_dur_h / fps) ] if ext in [".mov", ".mp4"]: ffmpeg_args.extend([ @@ -98,14 +107,15 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): "ext": ext[1:], "files": os.path.basename(clip_trimed_path), "stagingDir": staging_dir, - "frameStart": int(instance.data["frameStart"]), - "frameEnd": int(instance.data["frameEnd"]), - "frameStartFtrack": int(instance.data["frameStartH"]), - "frameEndFtrack": int(instance.data["frameEndH"]), + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartFtrack": frame_start - handle_start, + "frameEndFtrack": frame_end + handle_end, "fps": fps, + "tags": [] } - if ext in [".mov", ".mp4"]: + if ext in [".mov", ".mp4"] and reviewable: repre.update({ "thumbnail": True, "tags": ["review", "ftrackreview", "delete"]}) diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml new file mode 100644 index 0000000000..5d18bb4c19 --- /dev/null +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -0,0 +1,23 @@ + + + +Not up-to-date assets + +## Outdated containers found + +Scene contains one or more outdated loaded containers, eg. versions loaded into scene by Loader are not latest. + +### How to repair? + +Use 'Scene Inventory' and update all highlighted old container to latest OR + refresh Publish and switch 'Validate Containers' toggle on 'Options' tab. + + WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets. + + +### __Detailed Info__ (optional) + +This validates whether you're working with the latest versions of published content loaded into your scene. This protects you from using outdated versions of an asset. + + + \ No newline at end of file diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py new file mode 100644 index 0000000000..f99c718f8a --- /dev/null +++ b/openpype/plugins/publish/integrate.py @@ -0,0 +1,907 @@ +import os +import logging +import sys +import copy +import clique +import six + +from openpype.client.operations import ( + OperationsSession, + new_subset_document, + new_version_doc, + new_representation_doc, + prepare_subset_update_data, + prepare_version_update_data, + prepare_representation_update_data, +) +from bson.objectid import ObjectId +import pyblish.api + +from openpype.client import ( + get_representations, + get_subset_by_name, + get_version_by_name, +) +from openpype.lib import source_hash +from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib.file_transaction import FileTransaction +from openpype.pipeline import legacy_io +from openpype.pipeline.publish import KnownPublishError + +log = logging.getLogger(__name__) + + +def get_instance_families(instance): + """Get all families of the instance""" + # todo: move this to lib? + family = instance.data.get("family") + families = [] + if family: + families.append(family) + + for _family in (instance.data.get("families") or []): + if _family not in families: + families.append(_family) + + return families + + +def get_frame_padded(frame, padding): + """Return frame number as string with `padding` amount of padded zeros""" + return "{frame:0{padding}d}".format(padding=padding, frame=frame) + + +class IntegrateAsset(pyblish.api.InstancePlugin): + """Register publish in the database and transfer files to destinations. + + Steps: + 1) Register the subset and version + 2) Transfer the representation files to the destination + 3) Register the representation + + Requires: + instance.data['representations'] - must be a list and each member + must be a dictionary with following data: + 'files': list of filenames for sequence, string for single file. + Only the filename is allowed, without the folder path. + 'stagingDir': "path/to/folder/with/files" + 'name': representation name (usually the same as extension) + 'ext': file extension + optional data + "frameStart" + "frameEnd" + 'fps' + "data": additional metadata for each representation. + """ + + label = "Integrate Asset" + order = pyblish.api.IntegratorOrder + families = ["workfile", + "pointcache", + "camera", + "animation", + "model", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "ass", + "vdbcache", + "scene", + "vrayproxy", + "vrayscene_layer", + "render", + "prerender", + "imagesequence", + "review", + "rendersetup", + "rig", + "plate", + "look", + "audio", + "yetiRig", + "yeticache", + "nukenodes", + "gizmo", + "source", + "matchmove", + "image", + "assembly", + "fbx", + "textures", + "action", + "harmony.template", + "harmony.palette", + "editorial", + "background", + "camerarig", + "redshiftproxy", + "effect", + "xgen", + "hda", + "usd", + "staticMesh", + "skeletalMesh", + "mvLook", + "mvUsd", + "mvUsdComposition", + "mvUsdOverride", + "simpleUnrealTexture" + ] + + default_template_name = "publish" + + # Representation context keys that should always be written to + # the database even if not used by the destination template + db_representation_context_keys = [ + "project", "asset", "task", "subset", "version", "representation", + "family", "hierarchy", "username", "output" + ] + skip_host_families = [] + + def process(self, instance): + if self._temp_skip_instance_by_settings(instance): + return + + # Mark instance as processed for legacy integrator + instance.data["processedWithNewIntegrator"] = True + + # Instance should be integrated on a farm + if instance.data.get("farm"): + self.log.info( + "Instance is marked to be processed on farm. Skipping") + return + + filtered_repres = self.filter_representations(instance) + # Skip instance if there are not representations to integrate + # all representations should not be integrated + if not filtered_repres: + self.log.warning(( + "Skipping, there are no representations" + " to integrate for instance {}" + ).format(instance.data["family"])) + return + + file_transactions = FileTransaction(log=self.log) + try: + self.register(instance, file_transactions, filtered_repres) + except Exception: + # clean destination + # todo: preferably we'd also rollback *any* changes to the database + file_transactions.rollback() + self.log.critical("Error when registering", exc_info=True) + six.reraise(*sys.exc_info()) + + # Finalizing can't rollback safely so no use for moving it to + # the try, except. + file_transactions.finalize() + + def _temp_skip_instance_by_settings(self, instance): + """Decide if instance will be processed with new or legacy integrator. + + This is temporary solution until we test all usecases with new (this) + integrator plugin. + """ + + host_name = instance.context.data["hostName"] + instance_family = instance.data["family"] + instance_families = set(instance.data.get("families") or []) + + skip = False + for item in self.skip_host_families: + if host_name not in item["host"]: + continue + + families = set(item["families"]) + if instance_family in families: + skip = True + break + + for family in instance_families: + if family in families: + skip = True + break + + if skip: + break + + if skip: + self.log.debug("Instance is marked to be skipped by settings.") + return skip + + def filter_representations(self, instance): + # Prepare repsentations that should be integrated + repres = instance.data.get("representations") + # Raise error if instance don't have any representations + if not repres: + raise KnownPublishError( + "Instance {} has no representations to integrate".format( + instance.data["family"] + ) + ) + + # Validate type of stored representations + if not isinstance(repres, (list, tuple)): + raise TypeError( + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) + ) + + # Filter representations + filtered_repres = [] + for repre in repres: + if "delete" in repre.get("tags", []): + continue + filtered_repres.append(repre) + + return filtered_repres + + def register(self, instance, file_transactions, filtered_repres): + project_name = legacy_io.active_project() + + instance_stagingdir = instance.data.get("stagingDir") + if not instance_stagingdir: + self.log.info(( + "{0} is missing reference to staging directory." + " Will try to get it from representation." + ).format(instance)) + + else: + self.log.debug( + "Establishing staging directory " + "@ {0}".format(instance_stagingdir) + ) + + template_name = self.get_template_name(instance) + + op_session = OperationsSession() + subset = self.prepare_subset( + instance, op_session, project_name + ) + version = self.prepare_version( + instance, op_session, subset, project_name + ) + instance.data["versionEntity"] = version + + # Get existing representations (if any) + existing_repres_by_name = { + repre_doc["name"].lower(): repre_doc + for repre_doc in get_representations( + project_name, + version_ids=[version["_id"]], + fields=["_id", "name"] + ) + } + + # Prepare all representations + prepared_representations = [] + for repre in filtered_repres: + # todo: reduce/simplify what is returned from this function + prepared = self.prepare_representation( + repre, + template_name, + existing_repres_by_name, + version, + instance_stagingdir, + instance) + + for src, dst in prepared["transfers"]: + # todo: add support for hardlink transfers + file_transactions.add(src, dst) + + prepared_representations.append(prepared) + + # Each instance can also have pre-defined transfers not explicitly + # part of a representation - like texture resources used by a + # .ma representation. Those destination paths are pre-defined, etc. + # todo: should we move or simplify this logic? + resource_destinations = set() + for src, dst in instance.data.get("transfers", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) + resource_destinations.add(os.path.abspath(dst)) + + for src, dst in instance.data.get("hardlinks", []): + file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) + resource_destinations.add(os.path.abspath(dst)) + + # Bulk write to the database + # We write the subset and version to the database before the File + # Transaction to reduce the chances of another publish trying to + # publish to the same version number since that chance can greatly + # increase if the file transaction takes a long time. + op_session.commit() + + self.log.info("Subset {subset[name]} and Version {version[name]} " + "written to database..".format(subset=subset, + version=version)) + + # Process all file transfers of all integrations now + self.log.debug("Integrating source files to destination ...") + file_transactions.process() + self.log.debug( + "Backed up existing files: {}".format(file_transactions.backups)) + self.log.debug( + "Transferred files: {}".format(file_transactions.transferred)) + self.log.debug("Retrieving Representation Site Sync information ...") + + # Get the accessible sites for Site Sync + modules_by_name = instance.context.data["openPypeModules"] + sync_server_module = modules_by_name["sync_server"] + sites = sync_server_module.compute_resource_sync_sites( + project_name=instance.data["projectEntity"]["name"] + ) + self.log.debug("Sync Server Sites: {}".format(sites)) + + # Compute the resource file infos once (files belonging to the + # version instance instead of an individual representation) so + # we can re-use those file infos per representation + anatomy = instance.context.data["anatomy"] + resource_file_infos = self.get_files_info(resource_destinations, + sites=sites, + anatomy=anatomy) + + # Finalize the representations now the published files are integrated + # Get 'files' info for representations and its attached resources + new_repre_names_low = set() + for prepared in prepared_representations: + repre_doc = prepared["representation"] + repre_update_data = prepared["repre_doc_update_data"] + transfers = prepared["transfers"] + destinations = [dst for src, dst in transfers] + repre_doc["files"] = self.get_files_info( + destinations, sites=sites, anatomy=anatomy + ) + + # Add the version resource file infos to each representation + repre_doc["files"] += resource_file_infos + + # Set up representation for writing to the database. Since + # we *might* be overwriting an existing entry if the version + # already existed we'll use ReplaceOnce with `upsert=True` + if repre_update_data is None: + op_session.create_entity( + project_name, repre_doc["type"], repre_doc + ) + else: + op_session.update_entity( + project_name, + repre_doc["type"], + repre_doc["_id"], + repre_update_data + ) + + new_repre_names_low.add(repre_doc["name"].lower()) + + # Delete any existing representations that didn't get any new data + # if the instance is not set to append mode + if not instance.data.get("append", False): + for name, existing_repres in existing_repres_by_name.items(): + if name not in new_repre_names_low: + # We add the exact representation name because `name` is + # lowercase for name matching only and not in the database + op_session.delete_entity( + project_name, "representation", existing_repres["_id"] + ) + + self.log.debug("{}".format(op_session.to_data())) + op_session.commit() + + # Backwards compatibility + # todo: can we avoid the need to store this? + instance.data["published_representations"] = { + p["representation"]["_id"]: p for p in prepared_representations + } + + self.log.info("Registered {} representations" + "".format(len(prepared_representations))) + + def prepare_subset(self, instance, op_session, project_name): + asset_doc = instance.data["assetEntity"] + subset_name = instance.data["subset"] + family = instance.data["family"] + self.log.debug("Subset: {}".format(subset_name)) + + # Get existing subset if it exists + existing_subset_doc = get_subset_by_name( + project_name, subset_name, asset_doc["_id"] + ) + + # Define subset data + data = { + "families": get_instance_families(instance) + } + + subset_group = instance.data.get("subsetGroup") + if subset_group: + data["subsetGroup"] = subset_group + + subset_id = None + if existing_subset_doc: + subset_id = existing_subset_doc["_id"] + subset_doc = new_subset_document( + subset_name, family, asset_doc["_id"], data, subset_id + ) + + if existing_subset_doc is None: + # Create a new subset + self.log.info("Subset '%s' not found, creating ..." % subset_name) + op_session.create_entity( + project_name, subset_doc["type"], subset_doc + ) + + else: + # Update existing subset data with new data and set in database. + # We also change the found subset in-place so we don't need to + # re-query the subset afterwards + subset_doc["data"].update(data) + update_data = prepare_subset_update_data( + existing_subset_doc, subset_doc + ) + op_session.update_entity( + project_name, + subset_doc["type"], + subset_doc["_id"], + update_data + ) + + self.log.info("Prepared subset: {}".format(subset_name)) + return subset_doc + + def prepare_version(self, instance, op_session, subset_doc, project_name): + version_number = instance.data["version"] + + existing_version = get_version_by_name( + project_name, + version_number, + subset_doc["_id"], + fields=["_id"] + ) + version_id = None + if existing_version: + version_id = existing_version["_id"] + + version_data = self.create_version_data(instance) + version_doc = new_version_doc( + version_number, + subset_doc["_id"], + version_data, + version_id + ) + + if existing_version: + self.log.debug("Updating existing version ...") + update_data = prepare_version_update_data( + existing_version, version_doc + ) + op_session.update_entity( + project_name, + version_doc["type"], + version_doc["_id"], + update_data + ) + else: + self.log.debug("Creating new version ...") + op_session.create_entity( + project_name, version_doc["type"], version_doc + ) + + self.log.info("Prepared version: v{0:03d}".format(version_doc["name"])) + + return version_doc + + def prepare_representation(self, repre, + template_name, + existing_repres_by_name, + version, + instance_stagingdir, + instance): + + # pre-flight validations + if repre["ext"].startswith("."): + raise KnownPublishError(( + "Extension must not start with a dot '.': {}" + ).format(repre["ext"])) + + if repre.get("transfers"): + raise KnownPublishError(( + "Representation is not allowed to have transfers" + "data before integration. They are computed in " + "the integrator. Got: {}" + ).format(repre["transfers"])) + + # create template data for Anatomy + template_data = copy.deepcopy(instance.data["anatomyData"]) + + # required representation keys + files = repre["files"] + template_data["representation"] = repre["name"] + template_data["ext"] = repre["ext"] + + # optionals + # retrieve additional anatomy data from representation if exists + for key, anatomy_key in { + # Representation Key: Anatomy data key + "resolutionWidth": "resolution_width", + "resolutionHeight": "resolution_height", + "fps": "fps", + "outputName": "output", + "originalBasename": "originalBasename" + }.items(): + # Allow to take value from representation + # if not found also consider instance.data + value = repre.get(key) + if value is None: + value = instance.data.get(key) + + if value is not None: + template_data[anatomy_key] = value + + stagingdir = repre.get("stagingDir") + if not stagingdir: + # Fall back to instance staging dir if not explicitly + # set for representation in the instance + self.log.debug(( + "Representation uses instance staging dir: {}" + ).format(instance_stagingdir)) + stagingdir = instance_stagingdir + + if not stagingdir: + raise KnownPublishError( + "No staging directory set for representation: {}".format(repre) + ) + + self.log.debug("Anatomy template name: {}".format(template_name)) + anatomy = instance.context.data["anatomy"] + publish_template_category = anatomy.templates[template_name] + template = os.path.normpath(publish_template_category["path"]) + + is_udim = bool(repre.get("udim")) + + is_sequence_representation = isinstance(files, (list, tuple)) + if is_sequence_representation: + # Collection of files (sequence) + if any(os.path.isabs(fname) for fname in files): + raise KnownPublishError("Given file names contain full paths") + + src_collections, remainders = clique.assemble(files) + if len(files) < 2 or len(src_collections) != 1 or remainders: + raise KnownPublishError(( + "Files of representation does not contain proper" + " sequence files.\nCollected collections: {}" + "\nCollected remainders: {}" + ).format( + ", ".join([str(col) for col in src_collections]), + ", ".join([str(rem) for rem in remainders]) + )) + + src_collection = src_collections[0] + destination_indexes = list(src_collection.indexes) + # Use last frame for minimum padding + # - that should cover both 'udim' and 'frame' minimum padding + destination_padding = len(str(destination_indexes[-1])) + if not is_udim: + # Change padding for frames if template has defined higher + # padding. + template_padding = int( + publish_template_category["frame_padding"] + ) + if template_padding > destination_padding: + destination_padding = template_padding + + # If the representation has `frameStart` set it renumbers the + # frame indices of the published collection. It will start from + # that `frameStart` index instead. Thus if that frame start + # differs from the collection we want to shift the destination + # frame indices from the source collection. + repre_frame_start = repre.get("frameStart") + if repre_frame_start is not None: + index_frame_start = int(repre["frameStart"]) + # Shift destination sequence to the start frame + destination_indexes = [ + index_frame_start + idx + for idx in range(len(destination_indexes)) + ] + + # To construct the destination template with anatomy we require + # a Frame or UDIM tile set for the template data. We use the first + # index of the destination for that because that could've shifted + # from the source indexes, etc. + first_index_padded = get_frame_padded( + frame=destination_indexes[0], + padding=destination_padding + ) + + # Construct destination collection from template + repre_context = None + dst_filepaths = [] + for index in destination_indexes: + if is_udim: + template_data["udim"] = index + else: + template_data["frame"] = index + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + dst_filepaths.append(template_filled) + if repre_context is None: + self.log.debug( + "Template filled: {}".format(str(template_filled)) + ) + repre_context = template_filled.used_values + + # Make sure context contains frame + # NOTE: Frame would not be available only if template does not + # contain '{frame}' in template -> Do we want support it? + if not is_udim: + repre_context["frame"] = first_index_padded + + # Update the destination indexes and padding + dst_collection = clique.assemble(dst_filepaths)[0][0] + dst_collection.padding = destination_padding + if len(src_collection.indexes) != len(dst_collection.indexes): + raise KnownPublishError(( + "This is a bug. Source sequence frames length" + " does not match integration frames length" + )) + + # Multiple file transfers + transfers = [] + for src_file_name, dst in zip(src_collection, dst_collection): + src = os.path.join(stagingdir, src_file_name) + transfers.append((src, dst)) + + else: + # Single file + fname = files + if os.path.isabs(fname): + self.log.error( + "Filename in representation is filepath {}".format(fname) + ) + raise KnownPublishError( + "This is a bug. Representation file name is full path" + ) + + # Manage anatomy template data + template_data.pop("frame", None) + if is_udim: + template_data["udim"] = repre["udim"][0] + + # Construct destination filepath from template + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + dst = os.path.normpath(template_filled) + + # Single file transfer + src = os.path.join(stagingdir, fname) + transfers = [(src, dst)] + + # todo: Are we sure the assumption each representation + # ends up in the same folder is valid? + if not instance.data.get("publishDir"): + instance.data["publishDir"] = ( + anatomy_filled + [template_name] + ["folder"] + ) + + for key in self.db_representation_context_keys: + # Also add these values to the context even if not used by the + # destination template + value = template_data.get(key) + if value is not None: + repre_context[key] = value + + # Explicitly store the full list even though template data might + # have a different value because it uses just a single udim tile + if repre.get("udim"): + repre_context["udim"] = repre.get("udim") # store list + + # Use previous representation's id if there is a name match + existing = existing_repres_by_name.get(repre["name"].lower()) + repre_id = None + if existing: + repre_id = existing["_id"] + + # Store first transferred destination as published path data + # - used primarily for reviews that are integrated to custom modules + # TODO we should probably store all integrated files + # related to the representation? + published_path = transfers[0][1] + repre["published_path"] = published_path + + # todo: `repre` is not the actual `representation` entity + # we should simplify/clarify difference between data above + # and the actual representation entity for the database + data = repre.get("data", {}) + data.update({"path": published_path, "template": template}) + repre_doc = new_representation_doc( + repre["name"], version["_id"], repre_context, data, repre_id + ) + update_data = None + if repre_id is not None: + update_data = prepare_representation_update_data( + existing, repre_doc + ) + + return { + "representation": repre_doc, + "repre_doc_update_data": update_data, + "anatomy_data": template_data, + "transfers": transfers, + # todo: avoid the need for 'published_files' used by Integrate Hero + # backwards compatibility + "published_files": [transfer[1] for transfer in transfers] + } + + def create_version_data(self, instance): + """Create the data dictionary for the version + + Args: + instance: the current instance being published + + Returns: + dict: the required information for version["data"] + """ + + context = instance.context + + # create relative source path for DB + if "source" in instance.data: + source = instance.data["source"] + else: + source = context.data["currentFile"] + anatomy = instance.context.data["anatomy"] + source = self.get_rootless_path(anatomy, source) + self.log.debug("Source: {}".format(source)) + + version_data = { + "families": get_instance_families(instance), + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment"), + "machine": context.data.get("machine"), + "fps": instance.data.get("fps", context.data.get("fps")) + } + + # todo: preferably we wouldn't need this "if dict" etc. logic and + # instead be able to rely what the input value is if it's set. + intent_value = context.data.get("intent") + if intent_value and isinstance(intent_value, dict): + intent_value = intent_value.get("value") + + if intent_value: + version_data["intent"] = intent_value + + # Include optional data if present in + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart", "sourceHashes" + ] + for key in optionals: + if key in instance.data: + version_data[key] = instance.data[key] + + # Include instance.data[versionData] directly + version_data_instance = instance.data.get("versionData") + if version_data_instance: + version_data.update(version_data_instance) + + return version_data + + def get_template_name(self, instance): + """Return anatomy template name to use for integration""" + # Define publish template name from profiles + filter_criteria = self.get_profile_filter_criteria(instance) + template_name_profiles = self._get_template_name_profiles(instance) + profile = filter_profiles( + template_name_profiles, + filter_criteria, + logger=self.log + ) + + if profile: + return profile["template_name"] + return self.default_template_name + + def _get_template_name_profiles(self, instance): + """Receive profiles for publish template keys. + + Reuse template name profiles from legacy integrator. Goal is to move + the profile settings out of plugin settings but until that happens we + want to be able set it at one place and don't break backwards + compatibility (more then once). + """ + + return ( + instance.context.data["project_settings"] + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) + + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + + # Anatomy data is pre-filled by Collectors + anatomy_data = instance.data["anatomyData"] + + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + # Return filter criteria + return { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "task_types": task.get("type"), + "hosts": instance.context.data["hostName"], + } + + def get_rootless_path(self, anatomy, path): + """Returns, if possible, path without absolute portion from root + (eg. 'c:\' or '/opt/..') + + This information is platform dependent and shouldn't be captured. + Example: + 'c:/projects/MyProject1/Assets/publish...' > + '{root}/MyProject1/Assets...' + + Args: + anatomy: anatomy part from instance + path: path (absolute) + Returns: + path: modified path if possible, or unmodified path + + warning logged + """ + + success, rootless_path = anatomy.find_root_template_from_path(path) + if success: + path = rootless_path + else: + self.log.warning(( + "Could not find root path for remapping \"{}\"." + " This may cause issues on farm." + ).format(path)) + return path + + def get_files_info(self, destinations, sites, anatomy): + """Prepare 'files' info portion for representations. + + Arguments: + destinations (list): List of transferred file destinations + sites (list): array of published locations + anatomy: anatomy part from instance + Returns: + output_resources: array of dictionaries to be added to 'files' key + in representation + """ + + file_infos = [] + for file_path in destinations: + file_info = self.prepare_file_info(file_path, anatomy, sites=sites) + file_infos.append(file_info) + return file_infos + + def prepare_file_info(self, path, anatomy, sites): + """ Prepare information for one file (asset or resource) + + Arguments: + path: destination url of published file + anatomy: anatomy part from instance + sites: array of published locations, + [ {'name':'studio', 'created_dt':date} by default + keys expected ['studio', 'site1', 'gdrive1'] + + Returns: + dict: file info dictionary + """ + + return { + "_id": ObjectId(), + "path": self.get_rootless_path(anatomy, path), + "size": os.path.getsize(path), + "hash": source_hash(path), + "sites": sites + } diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 5f97a9bd41..7d698ff98d 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -71,7 +71,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): template_key = self._get_template_key(instance) anatomy = instance.context.data["anatomy"] - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = anatomy.project_name if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -313,13 +313,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): } repre_context = template_filled.used_values for key in self.db_representation_context_keys: - if ( - key in repre_context or - key not in anatomy_data - ): - continue - - repre_context[key] = anatomy_data[key] + value = anatomy_data.get(key) + if value is not None: + repre_context[key] = value # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) @@ -454,7 +450,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - project_name = legacy_io.Session["AVALON_PROJECT"] legacy_io.database[project_name].bulk_write( bulk_writes ) @@ -517,11 +512,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled[template_key]["path"] # Directory diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_legacy.py similarity index 99% rename from openpype/plugins/publish/integrate_new.py rename to openpype/plugins/publish/integrate_legacy.py index f870220421..b90b61f587 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -69,8 +69,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "data": additional metadata for each representation. """ - label = "Integrate Asset New" - order = pyblish.api.IntegratorOrder + label = "Integrate Asset (legacy)" + # Make sure it happens after new integrator + order = pyblish.api.IntegratorOrder + 0.00001 families = ["workfile", "pointcache", "camera", @@ -101,7 +102,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source", "matchmove", "image", - "source", "assembly", "fbx", "textures", @@ -142,6 +142,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_grouping_profiles = None def process(self, instance): + if instance.data.get("processedWithNewIntegrator"): + self.log.info("Instance was already processed with new integrator") + return + for ef in self.exclude_families: if ( instance.data["family"] == ef or diff --git a/openpype/plugins/publish/integrate_subset_group.py b/openpype/plugins/publish/integrate_subset_group.py new file mode 100644 index 0000000000..a24ebba3a5 --- /dev/null +++ b/openpype/plugins/publish/integrate_subset_group.py @@ -0,0 +1,98 @@ +"""Produces instance.data["subsetGroup"] data used during integration. + +Requires: + dict -> context["anatomyData"] *(pyblish.api.CollectorOrder + 0.49) + +Provides: + instance -> subsetGroup (str) + +""" +import pyblish.api + +from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib import ( + prepare_template_data, + StringTemplate, + TemplateUnsolved +) + + +class IntegrateSubsetGroup(pyblish.api.InstancePlugin): + """Integrate Subset Group for publish.""" + + # Run after CollectAnatomyInstanceData + order = pyblish.api.IntegratorOrder - 0.1 + label = "Subset Group" + + # Attributes set by settings + subset_grouping_profiles = None + + def process(self, instance): + """Look into subset group profiles set by settings. + + Attribute 'subset_grouping_profiles' is defined by OpenPype settings. + """ + + # Skip if 'subset_grouping_profiles' is empty + if not self.subset_grouping_profiles: + return + + if instance.data.get("subsetGroup"): + # If subsetGroup is already set then allow that value to remain + self.log.debug(( + "Skipping collect subset group due to existing value: {}" + ).format(instance.data["subsetGroup"])) + return + + # Skip if there is no matching profile + filter_criteria = self.get_profile_filter_criteria(instance) + profile = filter_profiles( + self.subset_grouping_profiles, + filter_criteria, + logger=self.log + ) + + if not profile: + return + + template = profile["template"] + + fill_pairs = prepare_template_data({ + "family": filter_criteria["families"], + "task": filter_criteria["tasks"], + "host": filter_criteria["hosts"], + "subset": instance.data["subset"], + "renderlayer": instance.data.get("renderlayer") + }) + + filled_template = None + try: + filled_template = StringTemplate.format_strict_template( + template, fill_pairs + ) + except (KeyError, TemplateUnsolved): + keys = fill_pairs.keys() + self.log.warning(( + "Subset grouping failed. Only {} are expected in Settings" + ).format(','.join(keys))) + + if filled_template: + instance.data["subsetGroup"] = filled_template + + def get_profile_filter_criteria(self, instance): + """Return filter criteria for `filter_profiles`""" + # TODO: This logic is used in much more plug-ins in one way or another + # Maybe better suited for lib? + # Anatomy data is pre-filled by Collectors + anatomy_data = instance.data["anatomyData"] + + # Task can be optional in anatomy data + task = anatomy_data.get("task", {}) + + # Return filter criteria + return { + "families": anatomy_data["family"], + "tasks": task.get("name"), + "hosts": instance.context.data["hostName"], + "task_types": task.get("type") + } diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index fd50858a91..8ae0dd2d60 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -39,9 +39,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - project_name = legacy_io.Session["AVALON_PROJECT"] - anatomy = instance.context.data["anatomy"] + project_name = anatomy.project_name if "publish" not in anatomy.templates: self.log.warning("Anatomy is missing the \"publish\" key!") return diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py deleted file mode 100644 index 112d92bef0..0000000000 --- a/openpype/plugins/publish/start_timer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -from openpype.lib import change_timer_to_current_context - - -class StartTimer(pyblish.api.ContextPlugin): - label = "Start Timer" - order = pyblish.api.IntegratorOrder + 1 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py deleted file mode 100644 index 414e43a3c4..0000000000 --- a/openpype/plugins/publish/stop_timer.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -import requests - -import pyblish.api - - -class StopTimer(pyblish.api.ContextPlugin): - label = "Stop Timer" - order = pyblish.api.ExtractorOrder - 0.49 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index bc1f9b9e6c..9a1ca5b8de 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -24,6 +24,10 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): if instance.data.get("assetEntity"): self.log.info("Instance has set asset document in its data.") + elif instance.data.get("newAssetPublishing"): + # skip if it is editorial + self.log.info("Editorial instance is no need to check...") + else: raise PublishValidationError(( "Instance \"{}\" doesn't have asset document " diff --git a/openpype/plugins/publish/validate_containers.py b/openpype/plugins/publish/validate_containers.py index ce91bd3396..79759450e1 100644 --- a/openpype/plugins/publish/validate_containers.py +++ b/openpype/plugins/publish/validate_containers.py @@ -1,5 +1,9 @@ import pyblish.api -import openpype.lib +from openpype.pipeline.load import any_outdated_containers +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) class ShowInventory(pyblish.api.Action): @@ -14,15 +18,21 @@ class ShowInventory(pyblish.api.Action): host_tools.show_scene_inventory() -class ValidateContainers(pyblish.api.ContextPlugin): +class ValidateContainers(OptionalPyblishPluginMixin, + pyblish.api.ContextPlugin): + """Containers are must be updated to latest version on publish.""" label = "Validate Containers" order = pyblish.api.ValidatorOrder - hosts = ["maya", "houdini", "nuke", "harmony", "photoshop"] + hosts = ["maya", "houdini", "nuke", "harmony", "photoshop", "aftereffects"] optional = True actions = [ShowInventory] def process(self, context): - if openpype.lib.any_outdated(): - raise ValueError("There are outdated containers in the scene.") + if not self.is_active(context.data): + return + + if any_outdated_containers(): + msg = "There are outdated containers in the scene." + raise PublishXmlValidationError(self, msg) diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py index 702e87b58d..694788c414 100644 --- a/openpype/plugins/publish/validate_editorial_asset_name.py +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -19,7 +19,8 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): "hiero", "standalonepublisher", "resolve", - "flame" + "flame", + "traypublisher" ] def process(self, context): diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 124eacbe39..fe46a4bc54 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -5,19 +5,6 @@ import sys import json import time -from openpype.lib import PypeLogger -from openpype.api import get_app_environments_for_context -from openpype.lib.plugin_tools import get_batch_asset_task_info -from openpype.lib.remote_publish import ( - get_webpublish_conn, - start_webpublish_log, - publish_and_log, - fail_batch, - find_variant_key, - get_task_data, - IN_PROGRESS_STATUS -) - class PypeCommands: """Class implementing commands used by Pype. @@ -26,10 +13,11 @@ class PypeCommands: """ @staticmethod def launch_tray(): - PypeLogger.set_process_name("Tray") - + from openpype.lib import Logger from openpype.tools import tray + Logger.set_process_name("Tray") + tray.main() @staticmethod @@ -46,10 +34,12 @@ class PypeCommands: @staticmethod def add_modules(click_func): """Modules/Addons can add their cli commands dynamically.""" + + from openpype.lib import Logger from openpype.modules import ModulesManager manager = ModulesManager() - log = PypeLogger.get_logger("AddModulesCLI") + log = Logger.get_logger("CLI-AddModules") for module in manager.modules: try: module.cli(click_func) @@ -71,14 +61,9 @@ class PypeCommands: @staticmethod def launch_webpublisher_webservercli(*args, **kwargs): - from openpype.hosts.webpublisher.webserver_service.webserver_cli \ - import (run_webserver) - return run_webserver(*args, **kwargs) + from openpype.hosts.webpublisher.webserver_service import run_webserver - @staticmethod - def launch_standalone_publisher(): - from openpype.tools import standalonepublish - standalonepublish.main() + return run_webserver(*args, **kwargs) @staticmethod def launch_traypublisher(): @@ -100,10 +85,11 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ + + from openpype.lib import Logger + from openpype.lib.applications import get_app_environments_for_context from openpype.modules import ModulesManager from openpype.pipeline import install_openpype_plugins - - from openpype.api import Logger from openpype.tools.utils.host_tools import show_publish from openpype.tools.utils.lib import qt_app_context @@ -111,7 +97,7 @@ class PypeCommands: import pyblish.api import pyblish.util - log = Logger.get_logger() + log = Logger.get_logger("CLI-publish") install_openpype_plugins() @@ -170,7 +156,7 @@ class PypeCommands: log.info("Publish finished.") @staticmethod - def remotepublishfromapp(project, batch_path, host_name, + def remotepublishfromapp(project_name, batch_path, host_name, user_email, targets=None): """Opens installed variant of 'host' and run remote publish there. @@ -189,8 +175,8 @@ class PypeCommands: Runs publish process as user would, in automatic fashion. Args: - project (str): project to publish (only single context is expected - per call of remotepublish + project_name (str): project to publish (only single context is + expected per call of remotepublish batch_path (str): Path batch folder. Contains subfolders with resources (workfile, another subfolder 'renders' etc.) host_name (str): 'photoshop' @@ -199,78 +185,14 @@ class PypeCommands: targets (list): Pyblish targets (to choose validator for example) """ - import pyblish.api - from openpype.api import Logger - from openpype.lib import ApplicationManager - log = Logger.get_logger() - - log.info("remotepublishphotoshop command") - - task_data = get_task_data(batch_path) - - workfile_path = os.path.join(batch_path, - task_data["task"], - task_data["files"][0]) - - print("workfile_path {}".format(workfile_path)) - - batch_id = task_data["batch"] - dbcon = get_webpublish_conn() - # safer to start logging here, launch might be broken altogether - _id = start_webpublish_log(dbcon, batch_id, user_email) - - batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) - if len(batches_in_progress) > 1: - fail_batch(_id, batches_in_progress, dbcon) - print("Another batch running, probably stuck, ask admin for help") - - asset, task_name, _ = get_batch_asset_task_info(task_data["context"]) - - application_manager = ApplicationManager() - found_variant_key = find_variant_key(application_manager, host_name) - app_name = "{}/{}".format(host_name, found_variant_key) - - # must have for proper launch of app - env = get_app_environments_for_context( - project, - asset, - task_name, - app_name + from openpype.hosts.webpublisher.cli_functions import ( + cli_publish_from_app ) - print("env:: {}".format(env)) - os.environ.update(env) - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - # must pass identifier to update log lines for a batch - os.environ["BATCH_LOG_ID"] = str(_id) - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - os.environ["USER_EMAIL"] = user_email - - pyblish.api.register_host(host_name) - if targets: - if isinstance(targets, str): - targets = [targets] - current_targets = os.environ.get("PYBLISH_TARGETS", "").split( - os.pathsep) - for target in targets: - current_targets.append(target) - - os.environ["PYBLISH_TARGETS"] = os.pathsep.join( - set(current_targets)) - - data = { - "last_workfile_path": workfile_path, - "start_last_workfile": True, - "project_name": project, - "asset_name": asset, - "task_name": task_name - } - - launched_app = application_manager.launch(app_name, **data) - - while launched_app.poll() is None: - time.sleep(0.5) + cli_publish_from_app( + project_name, batch_path, host_name, user_email, targets + ) @staticmethod def remotepublish(project, batch_path, user_email, targets=None): @@ -294,46 +216,12 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ - if not batch_path: - raise RuntimeError("No publish paths specified") - # Register target and host - import pyblish.api - import pyblish.util + from openpype.hosts.webpublisher.cli_functions import ( + cli_publish + ) - from openpype.pipeline import install_host - from openpype.hosts.webpublisher import api as webpublisher - - log = PypeLogger.get_logger() - - log.info("remotepublish command") - - host_name = "webpublisher" - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host_name - os.environ["USER_EMAIL"] = user_email - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - - pyblish.api.register_host(host_name) - - if targets: - if isinstance(targets, str): - targets = [targets] - for target in targets: - pyblish.api.register_target(target) - - install_host(webpublisher) - - log.info("Running publish ...") - - _, batch_id = os.path.split(batch_path) - dbcon = get_webpublish_conn() - _id = start_webpublish_log(dbcon, batch_id, user_email) - - publish_and_log(dbcon, _id, log, batch_id=batch_id) - - log.info("Publish finished.") + cli_publish(project, batch_path, user_email, targets) @staticmethod def extractenvironments(output_json_path, project, asset, task, app, @@ -342,8 +230,10 @@ class PypeCommands: Called by Deadline plugin to propagate environment into render jobs. """ + + from openpype.lib.applications import get_app_environments_for_context + if all((project, asset, task, app)): - from openpype.api import get_app_environments_for_context env = get_app_environments_for_context( project, asset, task, app, env_group ) @@ -445,7 +335,6 @@ class PypeCommands: sync_server_module.server_init() sync_server_module.server_start() - import time while True: time.sleep(1.0) diff --git a/openpype/resources/app_icons/shotgrid.png b/openpype/resources/app_icons/shotgrid.png new file mode 100644 index 0000000000..6d0cc047f9 Binary files /dev/null and b/openpype/resources/app_icons/shotgrid.png differ diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 245fc665f0..fc22f060a2 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -3,6 +3,8 @@ import re import sys import logging +from openpype.client import get_asset_by_name, get_versions + # Pipeline imports from openpype.hosts.fusion import api import openpype.hosts.fusion.api.lib as fusion_lib @@ -15,13 +17,10 @@ from openpype.pipeline import ( legacy_io, ) -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") -self = sys.modules[__name__] -self._project = None - def _format_version_folder(folder): """Format a version folder based on the filepath @@ -131,8 +130,8 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) - versions = list(versions) + project_name = legacy_io.active_project() + versions = list(get_versions(project_name, version_ids=version_ids)) start = min(v["data"]["frameStart"] for v in versions) end = max(v["data"]["frameEnd"] for v in versions) @@ -162,15 +161,10 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = legacy_io.find_one({"type": "asset", "name": asset_name}) + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, asset_name) assert asset, "Could not find '%s' in the database" % asset_name - # Get current project - self._project = legacy_io.find_one({ - "type": "project", - "name": legacy_io.Session["AVALON_PROJECT"] - }) - # Go to comp if not filepath: current_comp = api.get_current_comp() diff --git a/openpype/scripts/remote_publish.py b/openpype/scripts/remote_publish.py index d322f369d1..37df35e36c 100644 --- a/openpype/scripts/remote_publish.py +++ b/openpype/scripts/remote_publish.py @@ -1,11 +1,12 @@ try: - from openpype.api import Logger - import openpype.lib.remote_publish + from openpype.lib import Logger + from openpype.pipeline.publish.lib import remote_publish except ImportError as exc: # Ensure Deadline fails by output an error that contains "Fatal Error:" raise ImportError("Fatal Error: %s" % exc) + if __name__ == "__main__": # Perform remote publish with thorough error checking log = Logger.get_logger(__name__) - openpype.lib.remote_publish.publish(log, raise_error=True) + remote_publish(log, raise_error=True) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 70cda68cb4..41bed7751b 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -96,10 +96,6 @@ "mapping": {}, "asset_types_to_skip": [] }, - "first_version_status": { - "enabled": true, - "status": "" - }, "next_task_update": { "enabled": true, "mapping": { @@ -301,7 +297,9 @@ "traypublisher" ], "families": [ - "plate" + "plate", + "review", + "audio" ], "task_types": [], "tasks": [], @@ -432,6 +430,9 @@ "enabled": false, "custom_attribute_keys": [] }, + "IntegrateHierarchyToFtrack": { + "create_task_status_profiles": [] + }, "IntegrateFtrackNote": { "enabled": true, "note_template": "{intent}: {comment}", @@ -447,6 +448,9 @@ "enabled": false, "ftrack_custom_attributes": {} }, + "IntegrateFtrackComponentOverwrite": { + "enabled": true + }, "IntegrateFtrackInstance": { "family_mapping": { "camera": "cam", @@ -483,7 +487,11 @@ "usd": "usd" }, "keep_first_subset_name_for_review": true, - "asset_versions_status_profiles": [] + "asset_versions_status_profiles": [], + "additional_metadata_keys": [] + }, + "IntegrateFtrackFarmStatus": { + "farm_status_profiles": [] } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 6131ea1939..0ff9363ba7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -85,6 +85,7 @@ ], "width": 0, "height": 0, + "scale_pixel_aspect": true, "bg_color": [ 0, 0, @@ -159,7 +160,27 @@ } ] }, + "IntegrateSubsetGroup": { + "subset_grouping_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template": "" + } + ] + }, "IntegrateAssetNew": { + "subset_grouping_profiles": [ + { + "families": [], + "hosts": [], + "task_types": [], + "tasks": [], + "template": "" + } + ], "template_name_profiles": [ { "families": [], @@ -202,17 +223,11 @@ "tasks": [], "template_name": "maya2unreal" } - ], - "subset_grouping_profiles": [ - { - "families": [], - "hosts": [], - "task_types": [], - "tasks": [], - "template": "" - } ] }, + "IntegrateAsset": { + "skip_host_families": [] + }, "IntegrateHeroVersion": { "enabled": true, "optional": true, diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 5976c6a823..28f6d23e4d 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -31,6 +31,38 @@ } ] }, + "RenderSettings": { + "apply_render_settings": true, + "default_render_image_folder": "renders", + "enable_all_lights": false, + "aov_separator": "underscore", + "reset_current_frame": false, + "arnold_renderer": { + "image_prefix": "maya///_", + "image_format": "exr", + "multilayer_exr": true, + "tiled": true, + "aov_list": [], + "additional_options": [] + }, + "vray_renderer": { + "image_prefix": "maya///", + "engine": "1", + "image_format": "png", + "aov_list": [], + "additional_options": [] + }, + "redshift_renderer": { + "image_prefix": "maya///", + "primary_gi_engine": "0", + "secondary_gi_engine": "0", + "image_format": "iff", + "multilayer_exr": true, + "force_combine": true, + "aov_list": [], + "additional_options": [] + } + }, "create": { "CreateLook": { "enabled": true, @@ -43,9 +75,7 @@ "enabled": true, "defaults": [ "Main" - ], - "aov_separator": "underscore", - "default_render_image_folder": "renders" + ] }, "CreateUnrealStaticMesh": { "enabled": true, @@ -70,6 +100,20 @@ "enabled": true, "publish_mip_map": true }, + "CreateAnimation": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, + "CreatePointCache": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, "CreateMultiverseUsd": { "enabled": true, "defaults": [ @@ -88,12 +132,6 @@ "Main" ] }, - "CreateAnimation": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateAss": { "enabled": true, "defaults": [ @@ -132,12 +170,6 @@ "Sculpt" ] }, - "CreatePointCache": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ @@ -205,10 +237,15 @@ "enabled": true, "optional": true, "active": true, - "exclude_families": ["model", "rig", "staticMesh"] + "exclude_families": [ + "model", + "rig", + "staticMesh" + ] }, "ValidateShaderName": { "enabled": false, + "optional": true, "regex": "(?P.*)_(.*)_SHD" }, "ValidateShadingEngine": { @@ -222,6 +259,7 @@ }, "ValidateLoadedPlugin": { "enabled": false, + "optional": true, "whitelist_native_plugins": false, "authorized_plugins": [] }, @@ -236,6 +274,7 @@ }, "ValidateUnrealStaticMeshName": { "enabled": true, + "optional": true, "validate_mesh": false, "validate_collision": true }, @@ -252,6 +291,81 @@ "redshift_render_attributes": [], "renderman_render_attributes": [] }, + "ValidateCurrentRenderLayerIsRenderable": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderImageRule": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderNoDefaultCameras": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderSingleCamera": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRenderLayerAOVs": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateStepSize": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVRayDistributedRendering": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayReferencedAOVs": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVRayTranslatorEnabled": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayProxy": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateVrayProxyMembers": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRenderScriptCallbacks": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigCacheState": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigInputShapesInInstance": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateYetiRigSettings": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateModelName": { "enabled": false, "database": true, @@ -270,6 +384,7 @@ }, "ValidateTransformNamingSuffix": { "enabled": true, + "optional": true, "SUFFIX_NAMING_TABLE": { "mesh": [ "_GEO", @@ -293,7 +408,7 @@ "ALLOW_IF_NOT_IN_SUFFIX_TABLE": true }, "ValidateColorSets": { - "enabled": false, + "enabled": true, "optional": true, "active": true }, @@ -337,6 +452,16 @@ "optional": true, "active": true }, + "ValidateMeshNoNegativeScale": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateMeshNonZeroEdgeLength": { + "enabled": true, + "optional": true, + "active": true + }, "ValidateMeshNormalsUnlocked": { "enabled": false, "optional": true, @@ -359,22 +484,22 @@ }, "ValidateNoNamespace": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNoNullTransforms": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNoUnknownNodes": { "enabled": true, - "optional": true, + "optional": false, "active": true }, "ValidateNodeNoGhosting": { "enabled": false, - "optional": true, + "optional": false, "active": true }, "ValidateShapeDefaultNames": { @@ -402,6 +527,21 @@ "optional": true, "active": true }, + "ValidateNoVRayMesh": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateUnrealMeshTriangulated": { + "enabled": false, + "optional": true, + "active": true + }, + "ValidateAlembicVisibleOnly": { + "enabled": true, + "optional": false, + "active": true + }, "ExtractAlembic": { "enabled": true, "families": [ @@ -425,8 +565,34 @@ "optional": true, "active": true }, + "ValidateAnimationContent": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateOutRelatedNodeIds": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateRigControllersArnoldAttributes": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateSkeletalMeshHierarchy": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateSkinclusterDeformerSet": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateRigOutSetNodeIds": { "enabled": true, + "optional": false, "allow_history_only": false }, "ValidateCameraAttributes": { @@ -439,14 +605,44 @@ "optional": true, "active": true }, + "ValidateAssemblyNamespaces": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateAssemblyModelTransforms": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateAssRelativePaths": { "enabled": true, + "optional": false, + "active": true + }, + "ValidateInstancerContent": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateInstancerFrameRanges": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateNoDefaultCameras": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateUnrealUpAxis": { + "enabled": false, "optional": true, "active": true }, "ValidateCameraContents": { "enabled": true, - "optional": true, + "optional": false, "validate_shapes": true }, "ExtractPlayblast": { @@ -772,6 +968,9 @@ } ] }, + "templated_workfile_build": { + "profiles": [] + }, "filters": { "preset 1": { "ValidateNoAnimation": false, @@ -781,4 +980,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 3e29122074..f40ec1fe9e 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -131,7 +131,7 @@ "write" ] }, - "ValidateInstanceInContext": { + "ValidateCorrectAssetName": { "enabled": true, "optional": true, "active": true diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index d9b7a8083f..552c2c9cad 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -8,7 +8,7 @@ }, "publish": { "CollectColorCodedInstances": { - "create_flatten_image": false, + "create_flatten_image": "no", "flatten_subset_template": "", "color_code_mapping": [] }, @@ -32,6 +32,7 @@ }, "ExtractReview": { "make_image_sequence": false, + "max_downscale_size": 8192, "jpg_options": { "tags": [] }, diff --git a/openpype/settings/defaults/project_settings/shotgrid.json b/openpype/settings/defaults/project_settings/shotgrid.json new file mode 100644 index 0000000000..774bce714b --- /dev/null +++ b/openpype/settings/defaults/project_settings/shotgrid.json @@ -0,0 +1,22 @@ +{ + "shotgrid_project_id": 0, + "shotgrid_server": "", + "event": { + "enabled": false + }, + "fields": { + "asset": { + "type": "sg_asset_type" + }, + "sequence": { + "episode_link": "episode" + }, + "shot": { + "episode_link": "sg_episode", + "sequence_link": "sg_sequence" + }, + "task": { + "step": "step" + } + } +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 8bf3e3b306..5db2a79772 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -236,9 +236,70 @@ "extensions": [] } ], + "editorial_creators": { + "editorial_simple": { + "default_variants": [ + "Main" + ], + "clip_name_tokenizer": { + "_sequence_": "(sc\\d{3})", + "_shot_": "(sh\\d{3})" + }, + "shot_rename": { + "enabled": true, + "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}" + }, + "shot_hierarchy": { + "enabled": true, + "parents_path": "{project}/{folder}/{sequence}", + "parents": [ + { + "type": "Project", + "name": "project", + "value": "{project[name]}" + }, + { + "type": "Folder", + "name": "folder", + "value": "shots" + }, + { + "type": "Sequence", + "name": "sequence", + "value": "{_sequence_}" + } + ] + }, + "shot_add_tasks": {}, + "family_presets": [ + { + "family": "review", + "variant": "Reference", + "review": true, + "output_file_type": ".mp4" + }, + { + "family": "plate", + "variant": "", + "review": false, + "output_file_type": ".mov" + }, + { + "family": "audio", + "variant": "", + "review": false, + "output_file_type": ".wav" + } + ] + } + }, "BatchMovieCreator": { - "default_variants": ["Main"], - "default_tasks": ["Compositing"], + "default_variants": [ + "Main" + ], + "default_tasks": [ + "Compositing" + ], "extensions": [ ".mov" ] diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index dad61cd1f0..c5f5cdf719 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -1,4 +1,5 @@ { + "level_sequences_for_layouts": false, "project_setup": { "dev_mode": true } diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index 77168c25e6..cba472514e 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -1,4 +1,13 @@ { + "timeout_profiles": [ + { + "hosts": [ + "photoshop" + ], + "task_types": [], + "timeout": 600 + } + ], "publish": { "CollectPublishedFiles": { "task_type_to_family": { diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index a06947ba77..909ffc1ee4 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -2,11 +2,7 @@ "studio_name": "Studio name", "studio_code": "stu", "admin_password": "", - "environment": { - "__environment_keys__": { - "global": [] - } - }, + "environment": {}, "log_to_server": true, "disk_mapping": { "windows": [], diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 8cd4114cb0..c84d23d3fc 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -26,13 +26,14 @@ "linux": [] }, "intent": { + "allow_empty_intent": true, + "empty_intent_label": "", "items": { - "-": "-", "wip": "WIP", "final": "Final", "test": "Test" }, - "default": "-" + "default": "" }, "custom_attributes": { "show": { @@ -135,6 +136,13 @@ "enabled": false, "server": "" }, + "shotgrid": { + "enabled": false, + "leecher_manager_url": "http://127.0.0.1:3000", + "leecher_backend_url": "http://127.0.0.1:8090", + "filter_projects_by_login": true, + "shotgrid_settings": {} + }, "timers_manager": { "enabled": true, "auto_stop": true, diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index a173e2454f..b2cb2204f4 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -107,6 +107,7 @@ from .enum_entity import ( TaskTypeEnumEntity, DeadlineUrlEnumEntity, AnatomyTemplatesEnumEntity, + ShotgridUrlEnumEntity ) from .list_entity import ListEntity @@ -171,6 +172,7 @@ __all__ = ( "ToolsEnumEntity", "TaskTypeEnumEntity", "DeadlineUrlEnumEntity", + "ShotgridUrlEnumEntity", "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 741f13c49b..f28fefdf5a 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -15,7 +15,7 @@ from .exceptions import ( EntitySchemaError ) -from openpype.lib import PypeLogger +from openpype.lib import Logger @six.add_metaclass(ABCMeta) @@ -478,7 +478,7 @@ class BaseItemEntity(BaseEntity): def log(self): """Auto created logger for debugging or warnings.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @abstractproperty diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 03998677ce..defe4aa1f0 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -1,10 +1,7 @@ import copy from .input_entities import InputEntity from .exceptions import EntitySchemaError -from .lib import ( - NOT_SET, - STRING_TYPE -) +from .lib import NOT_SET, STRING_TYPE class BaseEnumEntity(InputEntity): @@ -26,7 +23,7 @@ class BaseEnumEntity(InputEntity): for item in self.enum_items: key = tuple(item.keys())[0] if key in enum_keys: - reason = "Key \"{}\" is more than once in enum items.".format( + reason = 'Key "{}" is more than once in enum items.'.format( key ) raise EntitySchemaError(self, reason) @@ -34,7 +31,7 @@ class BaseEnumEntity(InputEntity): enum_keys.add(key) if not isinstance(key, STRING_TYPE): - reason = "Key \"{}\" has invalid type {}, expected {}.".format( + reason = 'Key "{}" has invalid type {}, expected {}.'.format( key, type(key), STRING_TYPE ) raise EntitySchemaError(self, reason) @@ -59,7 +56,7 @@ class BaseEnumEntity(InputEntity): for item in check_values: if item not in self.valid_keys: raise ValueError( - "{} Invalid value \"{}\". Expected one of: {}".format( + '{} Invalid value "{}". Expected one of: {}'.format( self.path, item, self.valid_keys ) ) @@ -84,7 +81,7 @@ class EnumEntity(BaseEnumEntity): self.valid_keys = set(all_keys) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) value_on_not_set = [] if enum_default: if not isinstance(enum_default, list): @@ -109,7 +106,7 @@ class EnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -152,6 +149,7 @@ class HostsEnumEntity(BaseEnumEntity): Host name is not the same as application name. Host name defines implementation instead of application name. """ + schema_types = ["hosts-enum"] all_host_names = [ "aftereffects", @@ -211,7 +209,7 @@ class HostsEnumEntity(BaseEnumEntity): self.valid_keys = valid_keys if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: for key in valid_keys: @@ -219,7 +217,7 @@ class HostsEnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -227,14 +225,10 @@ class HostsEnumEntity(BaseEnumEntity): def schema_validations(self): if self.hosts_filter: enum_len = len(self.enum_items) - if ( - enum_len == 0 - or (enum_len == 1 and self.use_empty_value) - ): - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) + if enum_len == 0 or (enum_len == 1 and self.use_empty_value): + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) reason = ( "All host names were removed after applying" " host filters. {}" @@ -247,24 +241,25 @@ class HostsEnumEntity(BaseEnumEntity): invalid_filters.add(item) if invalid_filters: - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) - expected_hosts = ", ".join([ - '"{}"'.format(item) - for item in self.all_host_names - ]) - self.log.warning(( - "Host filters containt invalid host names:" - " \"{}\" Expected values are {}" - ).format(joined_filters, expected_hosts)) + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) + expected_hosts = ", ".join( + ['"{}"'.format(item) for item in self.all_host_names] + ) + self.log.warning( + ( + "Host filters containt invalid host names:" + ' "{}" Expected values are {}' + ).format(joined_filters, expected_hosts) + ) super(HostsEnumEntity, self).schema_validations() class AppsEnumEntity(BaseEnumEntity): """Enum of applications for project anatomy attributes.""" + schema_types = ["apps-enum"] def _item_initialization(self): @@ -272,7 +267,7 @@ class AppsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -353,7 +348,7 @@ class ToolsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -410,10 +405,10 @@ class TaskTypeEnumEntity(BaseEnumEntity): def _item_initialization(self): self.multiselection = self.schema_data.get("multiselection", True) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) self.value_on_not_set = "" self.enum_items = [] @@ -508,7 +503,8 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): enum_items_list = [] for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( - {server_name: "{}: {}".format(server_name, url_entity.value)}) + {server_name: "{}: {}".format(server_name, url_entity.value)} + ) valid_keys.add(server_name) return enum_items_list, valid_keys @@ -531,6 +527,50 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): self._current_value = tuple(self.valid_keys)[0] +class ShotgridUrlEnumEntity(BaseEnumEntity): + schema_types = ["shotgrid_url-enum"] + + def _item_initialization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + shotgrid_settings = self.get_entity_from_path( + "system_settings/modules/shotgrid/shotgrid_settings" + ) + + valid_keys = set() + enum_items_list = [] + for server_name, settings in shotgrid_settings.items(): + enum_items_list.append( + { + server_name: "{}: {}".format( + server_name, settings["shotgrid_url"].value + ) + } + ) + valid_keys.add(server_name) + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs) + + self.enum_items, self.valid_keys = self._get_enum_values() + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] + + class AnatomyTemplatesEnumEntity(BaseEnumEntity): schema_types = ["anatomy-templates-enum"] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 6c07209de3..80b1baad1b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -62,6 +62,10 @@ "type": "schema", "name": "schema_project_ftrack" }, + { + "type": "schema", + "name": "schema_project_shotgrid" + }, { "type": "schema", "name": "schema_project_kitsu" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index e008fd85ee..da414cc961 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -299,24 +299,6 @@ } ] }, - { - "type": "dict", - "key": "first_version_status", - "label": "Set status on first created version", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "text", - "key": "status", - "label": "Status" - } - ] - }, { "type": "dict", "key": "next_task_update", @@ -841,6 +823,44 @@ } ] }, + { + "type": "dict", + "key": "IntegrateHierarchyToFtrack", + "label": "Integrate Hierarchy to ftrack", + "is_group": true, + "collapsible": true, + "children": [ + { + "type": "label", + "label": "Set task status on new task creation. Ftrack's default status is used otherwise." + }, + { + "type": "list", + "key": "create_task_status_profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "text", + "key": "status_name", + "label": "Status name" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, @@ -930,10 +950,25 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "IntegrateFtrackComponentOverwrite", + "label": "IntegrateFtrackComponentOverwrite", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "dict", "key": "IntegrateFtrackInstance", - "label": "IntegrateFtrackInstance", + "label": "Integrate Ftrack Instance", "is_group": true, "children": [ { @@ -986,6 +1021,82 @@ } ] } + }, + { + "key": "additional_metadata_keys", + "label": "Additional metadata keys on components", + "type": "enum", + "multiselection": true, + "enum_items": [ + {"openpype_version": "OpenPype version"}, + {"frame_start": "Frame start"}, + {"frame_end": "Frame end"}, + {"duration": "Duration"}, + {"width": "Resolution width"}, + {"height": "Resolution height"}, + {"fps": "FPS"}, + {"code": "Codec"} + ] + } + ] + }, + { + "type": "dict", + "key": "IntegrateFtrackFarmStatus", + "label": "Integrate Ftrack Farm Status", + "children": [ + { + "type": "label", + "label": "Change status of task when it's subset is submitted to farm" + }, + { + "type": "list", + "collapsible": true, + "key": "farm_status_profiles", + "label": "Farm status profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "key": "status_name", + "label": "Status name", + "type": "text" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 40e98b0333..816874779e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -57,6 +57,10 @@ "type": "schema", "name": "schema_scriptsmenu" }, + { + "type": "schema", + "name": "schema_maya_render_settings" + }, { "type": "schema", "name": "schema_maya_create" @@ -73,6 +77,10 @@ "type": "schema", "name": "schema_workfile_build" }, + { + "type": "schema", + "name": "schema_templated_workfile_build" + }, { "type": "schema", "name": "schema_publish_gui_filter" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index badf94229b..7aa49c99a4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -45,9 +45,15 @@ "label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)" }, { - "type": "boolean", "key": "create_flatten_image", - "label": "Create flatten image" + "label": "Create flatten image", + "type": "enum", + "multiselection": false, + "enum_items": [ + { "flatten_with_images": "Flatten with images" }, + { "flatten_only": "Flatten only" }, + { "no": "No" } + ] }, { "type": "text", @@ -186,6 +192,15 @@ "key": "make_image_sequence", "label": "Makes an image sequence instead of a flatten image" }, + { + "type": "number", + "key": "max_downscale_size", + "label": "Maximum size of sources for review", + "tooltip": "FFMpeg can only handle limited resolution for creation of review and/or thumbnail", + "minimum": 300, + "maximum": 16384, + "decimal": 0 + }, { "type": "dict", "collapsible": false, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json new file mode 100644 index 0000000000..4faeca89f3 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json @@ -0,0 +1,98 @@ +{ + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "is_file": true, + "children": [ + { + "type": "number", + "key": "shotgrid_project_id", + "label": "Shotgrid project id" + }, + { + "type": "shotgrid_url-enum", + "key": "shotgrid_server", + "label": "Shotgrid Server" + }, + { + "type": "dict", + "key": "event", + "label": "Event Handler", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, + { + "type": "dict", + "key": "fields", + "label": "Fields Template", + "collapsible": true, + "children": [ + { + "type": "dict", + "key": "asset", + "label": "Asset", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "type", + "label": "Asset Type" + } + ] + }, + { + "type": "dict", + "key": "sequence", + "label": "Sequence", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + } + ] + }, + { + "type": "dict", + "key": "shot", + "label": "Shot", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + }, + { + "type": "text", + "key": "sequence_link", + "label": "Sequence link" + } + ] + }, + { + "type": "dict", + "key": "task", + "label": "Task", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "step", + "label": "Step link" + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 8f0f864dc2..7c61aeed50 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -84,7 +84,197 @@ ] } }, - { + { + "type": "dict", + "collapsible": true, + "key": "editorial_creators", + "label": "Editorial creator plugins", + "use_label_wrap": true, + "collapsible_key": true, + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "editorial_simple", + "label": "Editorial simple creator", + "use_label_wrap": true, + "collapsible_key": true, + "children": [ + + { + "type": "list", + "key": "default_variants", + "label": "Default variants", + "object_type": { + "type": "text" + } + }, + { + "type": "splitter" + }, + { + "type": "collapsible-wrap", + "label": "Shot metadata creator", + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "clip_name_tokenizer", + "label": "Clip name tokenizer", + "type": "dict-modifiable", + "highlight_content": true, + "tooltip": "Using Regex expression to create tokens. \nThose can be used later in \"Shot rename\" creator \nor \"Shot hierarchy\". \n\nTokens should be decorated with \"_\" on each side", + "object_type": { + "type": "text" + } + }, + { + "type": "dict", + "key": "shot_rename", + "label": "Shot rename", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "shot_rename_template", + "label": "Shot rename template", + "tooltip":"Template only supports Anatomy keys and Tokens \nfrom \"Clip name tokenizer\"" + } + ] + }, + { + "type": "dict", + "key": "shot_hierarchy", + "label": "Shot hierarchy", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "parents_path", + "label": "Parents path template", + "tooltip": "Using keys from \"Token to parent convertor\" or tokens directly" + }, + { + "key": "parents", + "label": "Token to parent convertor", + "type": "list", + "highlight_content": true, + "tooltip": "The left side is key to be used in template. \nThe right is value build from Tokens comming from \n\"Clip name tokenizer\"", + "object_type": { + "type": "dict", + "children": [ + { + "type": "enum", + "key": "type", + "label": "Parent type", + "enum_items": [ + {"Project": "Project"}, + {"Folder": "Folder"}, + {"Episode": "Episode"}, + {"Sequence": "Sequence"} + ] + }, + { + "type": "text", + "key": "name", + "label": "Parent token name", + "tooltip": "Unique name used in \"Parent path template\"" + }, + { + "type": "text", + "key": "value", + "label": "Parent name value", + "tooltip": "Template where any text, Anatomy keys and Tokens could be used" + } + ] + } + } + ] + }, + { + "key": "shot_add_tasks", + "label": "Add tasks to shot", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "task-types-enum", + "key": "type", + "label": "Task type", + "multiselection": false + } + ] + } + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Shot's subset creator", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "list", + "key": "family_presets", + "label": "Family presets", + "object_type": { + "type": "dict", + "children": [ + { + "type": "enum", + "key": "family", + "label": "Family", + "enum_items": [ + {"review": "review"}, + {"plate": "plate"}, + {"audio": "audio"} + ] + }, + { + "type": "text", + "key": "variant", + "label": "Variant", + "placeholder": "< Inherited >" + }, + { + "type": "boolean", + "key": "review", + "label": "Review", + "default": true + }, + { + "type": "enum", + "key": "output_file_type", + "label": "Integrating file type", + "enum_items": [ + {".mp4": "MP4"}, + {".mov": "MOV"}, + {".wav": "WAV"} + ] + } + ] + } + } + ] + } + ] + } + ] + }, + { "type": "dict", "collapsible": true, "key": "BatchMovieCreator", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index 4e197e9fc8..d26b5c1ccf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -5,6 +5,11 @@ "label": "Unreal Engine", "is_file": true, "children": [ + { + "type": "boolean", + "key": "level_sequences_for_layouts", + "label": "Generate level sequences when loading layouts" + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index b76a0fa844..2ef7a05b21 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -5,6 +5,38 @@ "label": "Web Publisher", "is_file": true, "children": [ + { + "type": "list", + "collapsible": true, + "use_label_wrap": true, + "key": "timeout_profiles", + "label": "Timeout profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum", + "multiselection": true + }, + { + "type": "separator" + }, + { + "type": "number", + "key": "timeout", + "label": "Timeout (sec)" + } + ] + } + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index a3cbf0cfcd..e1aa230b49 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -319,6 +319,15 @@ "minimum": 0, "maximum": 100000 }, + { + "type": "label", + "label": "Rescale input when it's pixel aspect ratio is not 1. Usefull for anamorph reviews." + }, + { + "key": "scale_pixel_aspect", + "label": "Scale pixel aspect", + "type": "boolean" + }, { "type": "label", "label": "Background color is used only when input have transparency and Alpha is higher than 0." @@ -528,10 +537,111 @@ { "type": "dict", "collapsible": true, - "key": "IntegrateAssetNew", - "label": "IntegrateAssetNew", + "key": "IntegrateSubsetGroup", + "label": "Integrate Subset Group", "is_group": true, "children": [ + { + "type": "list", + "key": "subset_grouping_profiles", + "label": "Subset grouping profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template", + "label": "Template" + } + ] + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "IntegrateAssetNew", + "label": "IntegrateAsset (Legacy)", + "is_group": true, + "children": [ + { + "type": "label", + "label": "NOTE: Subset grouping profiles settings were moved to Integrate Subset Group. Please move values there." + }, + { + "type": "list", + "key": "subset_grouping_profiles", + "label": "Subset grouping profiles (DEPRECATED)", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template", + "label": "Template" + } + ] + } + }, { "type": "list", "key": "template_name_profiles", @@ -577,49 +687,34 @@ } ] } - }, + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "IntegrateAsset", + "label": "Integrate Asset", + "is_group": true, + "children": [ { "type": "list", - "key": "subset_grouping_profiles", - "label": "Subset grouping profiles", + "key": "skip_host_families", + "label": "Skip hosts and families", "use_label_wrap": true, "object_type": { "type": "dict", "children": [ { - "type": "label", - "label": "Set all published instances as a part of specific group named according to 'Template'.
Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]" + "type": "hosts-enum", + "key": "host", + "label": "Host" }, { + "type": "list", "key": "families", "label": "Families", - "type": "list", "object_type": "text" - }, - { - "type": "hosts-enum", - "key": "hosts", - "label": "Hosts", - "multiselection": true - }, - { - "key": "task_types", - "label": "Task types", - "type": "task-types-enum" - }, - { - "key": "tasks", - "label": "Task names", - "type": "list", - "object_type": "text" - }, - { - "type": "separator" - }, - { - "type": "text", - "key": "template", - "label": "Template" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 09287a8b50..431add28df 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -29,42 +29,9 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CreateRender", - "label": "Create Render", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "list", - "key": "defaults", - "label": "Default Subsets", - "object_type": "text" - }, - { - "key": "aov_separator", - "label": "AOV Separator character", - "type": "enum", - "multiselection": false, - "default": "underscore", - "enum_items": [ - {"dash": "- (dash)"}, - {"underscore": "_ (underscore)"}, - {"dot": ". (dot)"} - ] - }, - { - "type": "text", - "key": "default_render_image_folder", - "label": "Default render image folder" - } - ] + { + "type": "schema", + "name": "schema_maya_create_render" }, { "type": "dict", @@ -143,6 +110,57 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateAnimation", + "label": "Create Animation", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CreatePointCache", + "label": "Create Point Cache", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, + { "type": "schema_template", "name": "template_create_plugin", @@ -159,10 +177,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAnimation", - "label": "Create Animation" - }, { "key": "CreateAss", "label": "Create Ass" @@ -187,10 +201,6 @@ "key": "CreateModel", "label": "Create Model" }, - { - "key": "CreatePointCache", - "label": "Create Cache" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json new file mode 100644 index 0000000000..68ad7ad63d --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create_render.json @@ -0,0 +1,20 @@ +{ + "type": "dict", + "collapsible": true, + "key": "CreateRender", + "label": "Create Render", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 84182973a1..53247f6bd4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -107,6 +107,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "label", "label": "Shader name regex can use named capture group asset to validate against current asset name.

Example:
^.*(?P=<asset>.+)_SHD

" @@ -159,6 +164,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "whitelist_native_plugins", @@ -246,6 +256,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "validate_mesh", @@ -332,6 +347,72 @@ } ] }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateCurrentRenderLayerIsRenderable", + "label": "Validate Current Render Layer Has Renderable Camera" + }, + { + "key": "ValidateRenderImageRule", + "label": "Validate Images File Rule (Workspace)" + }, + { + "key": "ValidateRenderNoDefaultCameras", + "label": "Validate No Default Cameras Renderable" + }, + { + "key": "ValidateRenderSingleCamera", + "label": "Validate Render Single Camera" + }, + { + "key": "ValidateRenderLayerAOVs", + "label": "Validate Render Passes / AOVs Are Registered" + }, + { + "key": "ValidateStepSize", + "label": "Validate Step Size" + }, + { + "key": "ValidateVRayDistributedRendering", + "label": "VRay Distributed Rendering" + }, + { + "key": "ValidateVrayReferencedAOVs", + "label": "VRay Referenced AOVs" + }, + { + "key": "ValidateVRayTranslatorEnabled", + "label": "VRay Translator Settings" + }, + { + "key": "ValidateVrayProxy", + "label": "VRay Proxy Settings" + }, + { + "key": "ValidateVrayProxyMembers", + "label": "VRay Proxy Members" + }, + { + "key": "ValidateYetiRenderScriptCallbacks", + "label": "Yeti Render Script Callbacks" + }, + { + "key": "ValidateYetiRigCacheState", + "label": "Yeti Rig Cache State" + }, + { + "key": "ValidateYetiRigInputShapesInInstance", + "label": "Yeti Rig Input Shapes In Instance" + }, + { + "key": "ValidateYetiRigSettings", + "label": "Yeti Rig Settings" + } + ] + }, { "type": "collapsible-wrap", "label": "Model", @@ -416,6 +497,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "label", "label": "Validates transform suffix based on the type of its children shapes." @@ -472,6 +558,14 @@ "key": "ValidateMeshNonManifold", "label": "ValidateMeshNonManifold" }, + { + "key": "ValidateMeshNoNegativeScale", + "label": "Validate Mesh No Negative Scale" + }, + { + "key": "ValidateMeshNonZeroEdgeLength", + "label": "Validate Mesh Edge Length Non Zero" + }, { "key": "ValidateMeshNormalsUnlocked", "label": "ValidateMeshNormalsUnlocked" @@ -525,6 +619,18 @@ { "key": "ValidateUniqueNames", "label": "ValidateUniqueNames" + }, + { + "key": "ValidateNoVRayMesh", + "label": "Validate No V-Ray Proxies (VRayMesh)" + }, + { + "key": "ValidateUnrealMeshTriangulated", + "label": "Validate if Mesh is Triangulated" + }, + { + "key": "ValidateAlembicVisibleOnly", + "label": "Validate Alembic visible node" } ] }, @@ -573,6 +679,26 @@ { "key": "ValidateRigControllers", "label": "Validate Rig Controllers" + }, + { + "key": "ValidateAnimationContent", + "label": "Validate Animation Content" + }, + { + "key": "ValidateOutRelatedNodeIds", + "label": "Validate Animation Out Set Related Node Ids" + }, + { + "key": "ValidateRigControllersArnoldAttributes", + "label": "Validate Rig Controllers (Arnold Attributes)" + }, + { + "key": "ValidateSkeletalMeshHierarchy", + "label": "Validate Skeletal Mesh Top Node" + }, + { + "key": "ValidateSkinclusterDeformerSet", + "label": "Validate Skincluster Deformer Relationships" } ] }, @@ -589,6 +715,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "boolean", "key": "allow_history_only", @@ -611,9 +742,33 @@ "key": "ValidateAssemblyName", "label": "Validate Assembly Name" }, + { + "key": "ValidateAssemblyNamespaces", + "label": "Validate Assembly Namespaces" + }, + { + "key": "ValidateAssemblyModelTransforms", + "label": "Validate Assembly Model Transforms" + }, { "key": "ValidateAssRelativePaths", "label": "ValidateAssRelativePaths" + }, + { + "key": "ValidateInstancerContent", + "label": "Validate Instancer Content" + }, + { + "key": "ValidateInstancerFrameRanges", + "label": "Validate Instancer Cache Frame Ranges" + }, + { + "key": "ValidateNoDefaultCameras", + "label": "Validate No Default Cameras" + }, + { + "key": "ValidateUnrealUpAxis", + "label": "Validate Unreal Up-Axis check" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json new file mode 100644 index 0000000000..6ee02ca78f --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -0,0 +1,423 @@ +{ + "type": "dict", + "collapsible": true, + "key": "RenderSettings", + "label": "Render Settings", + "children": [ + { + "type": "boolean", + "key": "apply_render_settings", + "label": "Apply Render Settings on creation" + }, + { + "type": "text", + "key": "default_render_image_folder", + "label": "Default render image folder" + }, + { + "type": "boolean", + "key": "enable_all_lights", + "label": "Include all lights in Render Setup Layers by default" + }, + { + "key": "aov_separator", + "label": "AOV Separator character", + "type": "enum", + "multiselection": false, + "default": "underscore", + "enum_items": [ + {"dash": "- (dash)"}, + {"underscore": "_ (underscore)"}, + {"dot": ". (dot)"} + ] + }, + { + "key": "reset_current_frame", + "label": "Reset Current Frame", + "type": "boolean" + }, + { + "type": "dict", + "collapsible": true, + "key": "arnold_renderer", + "label": "Arnold Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"jpeg": "jpeg"}, + {"png": "png"}, + {"deepexr": "deep exr"}, + {"tif": "tif"}, + {"exr": "exr"}, + {"maya": "maya"}, + {"mtoa_shaders": "mtoa_shaders"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "tiled", + "label": "Tiled (tif, exr)", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"ID": "ID"}, + {"N": "N"}, + {"P": "P"}, + {"Pref": "Pref"}, + {"RGBA": "RGBA"}, + {"Z": "Z"}, + {"albedo": "albedo"}, + {"background": "background"}, + {"coat": "coat"}, + {"coat_albedo": "coat_albedo"}, + {"coat_direct": "coat_direct"}, + {"coat_indirect": "coat_indirect"}, + {"cputime": "cputime"}, + {"crypto_asset": "crypto_asset"}, + {"crypto_material": "cypto_material"}, + {"crypto_object": "crypto_object"}, + {"diffuse": "diffuse"}, + {"diffuse_albedo": "diffuse_albedo"}, + {"diffuse_direct": "diffuse_direct"}, + {"diffuse_indirect": "diffuse_indirect"}, + {"direct": "direct"}, + {"emission": "emission"}, + {"highlight": "highlight"}, + {"indirect": "indirect"}, + {"motionvector": "motionvector"}, + {"opacity": "opacity"}, + {"raycount": "raycount"}, + {"rim_light": "rim_light"}, + {"shadow": "shadow"}, + {"shadow_diff": "shadow_diff"}, + {"shadow_mask": "shadow_mask"}, + {"shadow_matte": "shadow_matte"}, + {"sheen": "sheen"}, + {"sheen_albedo": "sheen_albedo"}, + {"sheen_direct": "sheen_direct"}, + {"sheen_indirect": "sheen_indirect"}, + {"specular": "specular"}, + {"specular_albedo": "specular_albedo"}, + {"specular_direct": "specular_direct"}, + {"specular_indirect": "specular_indirect"}, + {"sss": "sss"}, + {"sss_albedo": "sss_albedo"}, + {"sss_direct": "sss_direct"}, + {"sss_indirect": "sss_indirect"}, + {"transmission": "transmission"}, + {"transmission_albedo": "transmission_albedo"}, + {"transmission_direct": "transmission_direct"}, + {"transmission_indirect": "transmission_indirect"}, + {"volume": "volume"}, + {"volume_Z": "volume_Z"}, + {"volume_albedo": "volume_albedo"}, + {"volume_direct": "volume_direct"}, + {"volume_indirect": "volume_indirect"}, + {"volume_opacity": "volume_opacity"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like AASamples" + }, + { + "type": "dict-modifiable", + "store_as_list": true, + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "vray_renderer", + "label": "V-Ray Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "engine", + "label": "Production Engine", + "type": "enum", + "multiselection": false, + "defaults": "1", + "enum_items": [ + {"1": "V-Ray"}, + {"2": "V-Ray GPU"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"png": "png"}, + {"jpg": "jpg"}, + {"vrimg": "vrimg"}, + {"hdr": "hdr"}, + {"exr": "exr"}, + {"exr (multichannel)": "exr (multichannel)"}, + {"exr (deep)": "exr (deep)"}, + {"tga": "tga"}, + {"bmp": "bmp"}, + {"sgi": "sgi"} + ] + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< empty >"}, + {"atmosphereChannel": "atmosphere"}, + {"backgroundChannel": "background"}, + {"bumpNormalsChannel": "bumpnormals"}, + {"causticsChannel": "caustics"}, + {"coatFilterChannel": "coat_filter"}, + {"coatGlossinessChannel": "coatGloss"}, + {"coatReflectionChannel": "coat_reflection"}, + {"vrayCoatChannel": "coat_specular"}, + {"CoverageChannel": "coverage"}, + {"cryptomatteChannel": "cryptomatte"}, + {"customColor": "custom_color"}, + {"drBucketChannel": "DR"}, + {"denoiserChannel": "denoiser"}, + {"diffuseChannel": "diffuse"}, + {"ExtraTexElement": "extraTex"}, + {"giChannel": "GI"}, + {"LightMixElement": "None"}, + {"lightingChannel": "lighting"}, + {"LightingAnalysisChannel": "LightingAnalysis"}, + {"materialIDChannel": "materialID"}, + {"MaterialSelectElement": "materialSelect"}, + {"matteShadowChannel": "matteShadow"}, + {"MultiMatteElement": "multimatte"}, + {"multimatteIDChannel": "multimatteID"}, + {"normalsChannel": "normals"}, + {"nodeIDChannel": "objectId"}, + {"objectSelectChannel": "objectSelect"}, + {"rawCoatFilterChannel": "raw_coat_filter"}, + {"rawCoatReflectionChannel": "raw_coat_reflection"}, + {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, + {"rawGiChannel": "rawGI"}, + {"rawLightChannel": "rawLight"}, + {"rawReflectionChannel": "rawReflection"}, + {"rawReflectionFilterChannel": "rawReflectionFilter"}, + {"rawRefractionChannel": "rawRefraction"}, + {"rawRefractionFilterChannel": "rawRefractionFilter"}, + {"rawShadowChannel": "rawShadow"}, + {"rawSheenFilterChannel": "raw_sheen_filter"}, + {"rawSheenReflectionChannel": "raw_sheen_reflection"}, + {"rawTotalLightChannel": "rawTotalLight"}, + {"reflectIORChannel": "reflIOR"}, + {"reflectChannel": "reflect"}, + {"reflectionFilterChannel": "reflectionFilter"}, + {"reflectGlossinessChannel": "reflGloss"}, + {"refractChannel": "refract"}, + {"refractionFilterChannel": "refractionFilter"}, + {"refractGlossinessChannel": "refrGloss"}, + {"renderIDChannel": "renderId"}, + {"FastSSS2Channel": "SSS"}, + {"sampleRateChannel": "sampleRate"}, + {"samplerInfo": "samplerInfo"}, + {"selfIllumChannel": "selfIllum"}, + {"shadowChannel": "shadow"}, + {"sheenFilterChannel": "sheen_filter"}, + {"sheenGlossinessChannel": "sheenGloss"}, + {"sheenReflectionChannel": "sheen_reflection"}, + {"vraySheenChannel": "sheen_specular"}, + {"specularChannel": "specular"}, + {"Toon": "Toon"}, + {"toonLightingChannel": "toonLighting"}, + {"toonSpecularChannel": "toonSpecular"}, + {"totalLightChannel": "totalLight"}, + {"unclampedColorChannel": "unclampedColor"}, + {"VRScansPaintMaskChannel": "VRScansPaintMask"}, + {"VRScansZoneMaskChannel": "VRScansZoneMask"}, + {"velocityChannel": "velocity"}, + {"zdepthChannel": "zDepth"}, + {"LightSelectElement": "lightselect"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like aaFilterSize" + }, + { + "type": "dict-modifiable", + "store_as_list": true, + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "redshift_renderer", + "label": "Redshift Renderer", + "is_group": true, + "children": [ + { + "key": "image_prefix", + "label": "Image prefix template", + "type": "text" + }, + { + "key": "primary_gi_engine", + "label": "Primary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "secondary_gi_engine", + "label": "Secondary GI Engine", + "type": "enum", + "multiselection": false, + "defaults": "0", + "enum_items": [ + {"0": "None"}, + {"1": "Photon Map"}, + {"2": "Irradiance Cache"}, + {"3": "Brute Force"} + ] + }, + { + "key": "image_format", + "label": "Output Image Format", + "type": "enum", + "multiselection": false, + "defaults": "exr", + "enum_items": [ + {"iff": "Maya IFF"}, + {"exr": "OpenEXR"}, + {"tif": "TIFF"}, + {"png": "PNG"}, + {"tga": "Targa"}, + {"jpg": "JPEG"} + ] + }, + { + "key": "multilayer_exr", + "label": "Multilayer (exr)", + "type": "boolean" + }, + { + "key": "force_combine", + "label": "Force combine beauty and AOVs", + "type": "boolean" + }, + { + "key": "aov_list", + "label": "AOVs to create", + "type": "enum", + "multiselection": true, + "defaults": "empty", + "enum_items": [ + {"empty": "< none >"}, + {"AO": "Ambient Occlusion"}, + {"Background": "Background"}, + {"Beauty": "Beauty"}, + {"BumpNormals": "Bump Normals"}, + {"Caustics": "Caustics"}, + {"CausticsRaw": "Caustics Raw"}, + {"Cryptomatte": "Cryptomatte"}, + {"Custom": "Custom"}, + {"Z": "Depth"}, + {"DiffuseFilter": "Diffuse Filter"}, + {"DiffuseLighting": "Diffuse Lighting"}, + {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, + {"Emission": "Emission"}, + {"GI": "Global Illumination"}, + {"GIRaw": "Global Illumination Raw"}, + {"Matte": "Matte"}, + {"MotionVectors": "Ambient Occlusion"}, + {"N": "Normals"}, + {"ID": "ObjectID"}, + {"ObjectBumpNormal": "Object-Space Bump Normals"}, + {"ObjectPosition": "Object-Space Positions"}, + {"PuzzleMatte": "Puzzle Matte"}, + {"Reflections": "Reflections"}, + {"ReflectionsFilter": "Reflections Filter"}, + {"ReflectionsRaw": "Reflections Raw"}, + {"Refractions": "Refractions"}, + {"RefractionsFilter": "Refractions Filter"}, + {"RefractionsRaw": "Refractions Filter"}, + {"Shadows": "Shadows"}, + {"SpecularLighting": "Specular Lighting"}, + {"SSS": "Sub Surface Scatter"}, + {"SSSRaw": "Sub Surface Scatter Raw"}, + {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, + {"TotalTransLightingRaw": "Total Translucency Filter"}, + {"TransTint": "Translucency Filter"}, + {"TransGIRaw": "Translucency Lighting Raw"}, + {"VolumeFogEmission": "Volume Fog Emission"}, + {"VolumeFogTint": "Volume Fog Tint"}, + {"VolumeLighting": "Volume Lighting"}, + {"P": "World Position"} + ] + }, + { + "type": "label", + "label": "Add additional options - put attribute and value, like reflectionMaxTraceDepth" + }, + { + "type": "dict-modifiable", + "store_as_list": true, + "key": "additional_options", + "label": "Additional Renderer Options", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 575bfe79e7..e5827a92c4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -61,8 +61,8 @@ "name": "template_publish_plugin", "template_data": [ { - "key": "ValidateInstanceInContext", - "label": "Validate Instance In Context" + "key": "ValidateCorrectAssetName", + "label": "Validate Correct Asset name" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index 484fbf9d07..a4b28f47bc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -13,6 +13,9 @@ { "ftrackreview": "Add review to Ftrack" }, + { + "shotgridreview": "Add review to Shotgrid" + }, { "delete": "Delete output" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json new file mode 100644 index 0000000000..a591facf98 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -0,0 +1,35 @@ +{ + "type": "dict", + "collapsible": true, + "key": "templated_workfile_build", + "label": "Templated Workfile Build Settings", + "children": [ + { + "type": "list", + "key": "profiles", + "label": "Profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "path", + "label": "Path to template", + "type": "text", + "object_type": "text" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json index 654ddf2938..7c5774415c 100644 --- a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json +++ b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json @@ -50,8 +50,15 @@ "is_group": true, "children": [ { - "type": "label", - "label": "Intent" + "type": "boolean", + "key": "allow_empty_intent", + "label": "Allow empty intent" + }, + { + "type": "text", + "key": "empty_intent_label", + "label": "Empty item label", + "placeholder": "< Not set >" }, { "type": "dict-modifiable", @@ -64,7 +71,8 @@ { "key": "default", "type": "text", - "label": "Default Intent" + "label": "Default Intent", + "placeholder": "< First available >" }, { "type": "separator" diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index d22b9016a7..952b38040c 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -48,6 +48,60 @@ "type": "schema", "name": "schema_kitsu" }, + { + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "leecher_manager_url", + "label": "Shotgrid Leecher Manager URL" + }, + { + "type": "text", + "key": "leecher_backend_url", + "label": "Shotgrid Leecher Backend URL" + }, + { + "type": "boolean", + "key": "filter_projects_by_login", + "label": "Filter projects by SG login" + }, + { + "type": "dict-modifiable", + "key": "shotgrid_settings", + "label": "Shotgrid Servers", + "object_type": { + "type": "dict", + "children": [ + { + "key": "shotgrid_url", + "label": "Server URL", + "type": "text" + }, + { + "key": "shotgrid_script_name", + "label": "Script Name", + "type": "text" + }, + { + "key": "shotgrid_script_key", + "label": "Script api key", + "type": "text" + } + ] + } + } + ] + }, { "type": "dict", "key": "timers_manager", diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index c99fc6080b..def8c16ea7 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -7,6 +7,8 @@ from abc import ABCMeta, abstractmethod import six import openpype.version +from openpype.client.mongo import OpenPypeMongoConnection +from openpype.client.entities import get_project_connection, get_project from .constants import ( GLOBAL_SETTINGS_KEY, @@ -20,6 +22,164 @@ from .constants import ( ) +class SettingsStateInfo: + """Helper state information about some settings state. + + Is used to hold information about last saved and last opened UI. Keep + information about the time when that happened and on which machine under + which user and on which openpype version. + + To create currrent machine and time information use 'create_new' method. + """ + + timestamp_format = "%Y-%m-%d %H:%M:%S.%f" + + def __init__( + self, + openpype_version, + settings_type, + project_name, + timestamp, + hostname, + hostip, + username, + system_name, + local_id + ): + self.openpype_version = openpype_version + self.settings_type = settings_type + self.project_name = project_name + + timestamp_obj = None + if timestamp: + timestamp_obj = datetime.datetime.strptime( + timestamp, self.timestamp_format + ) + self.timestamp = timestamp + self.timestamp_obj = timestamp_obj + self.hostname = hostname + self.hostip = hostip + self.username = username + self.system_name = system_name + self.local_id = local_id + + def copy(self): + return self.from_data(self.to_data()) + + @classmethod + def create_new( + cls, openpype_version, settings_type=None, project_name=None + ): + """Create information about this machine for current time.""" + + from openpype.lib.pype_info import get_workstation_info + + now = datetime.datetime.now() + workstation_info = get_workstation_info() + + return cls( + openpype_version, + settings_type, + project_name, + now.strftime(cls.timestamp_format), + workstation_info["hostname"], + workstation_info["hostip"], + workstation_info["username"], + workstation_info["system_name"], + workstation_info["local_id"] + ) + + @classmethod + def from_data(cls, data): + """Create object from data.""" + + return cls( + data["openpype_version"], + data["settings_type"], + data["project_name"], + data["timestamp"], + data["hostname"], + data["hostip"], + data["username"], + data["system_name"], + data["local_id"] + ) + + def to_data(self): + data = self.to_document_data() + data.update({ + "openpype_version": self.openpype_version, + "settings_type": self.settings_type, + "project_name": self.project_name + }) + return data + + @classmethod + def create_new_empty(cls, openpype_version, settings_type=None): + return cls( + openpype_version, + settings_type, + None, + None, + None, + None, + None, + None, + None + ) + + @classmethod + def from_document(cls, openpype_version, settings_type, document): + document = document or {} + project_name = document.get("project_name") + last_saved_info = document.get("last_saved_info") + if last_saved_info: + copy_last_saved_info = copy.deepcopy(last_saved_info) + copy_last_saved_info.update({ + "openpype_version": openpype_version, + "settings_type": settings_type, + "project_name": project_name, + }) + return cls.from_data(copy_last_saved_info) + return cls( + openpype_version, + settings_type, + project_name, + None, + None, + None, + None, + None, + None + ) + + def to_document_data(self): + return { + "timestamp": self.timestamp, + "hostname": self.hostname, + "hostip": self.hostip, + "username": self.username, + "system_name": self.system_name, + "local_id": self.local_id, + } + + def __eq__(self, other): + if not isinstance(other, SettingsStateInfo): + return False + + if other.timestamp_obj != self.timestamp_obj: + return False + + return ( + self.openpype_version == other.openpype_version + and self.hostname == other.hostname + and self.hostip == other.hostip + and self.username == other.username + and self.system_name == other.system_name + and self.local_id == other.local_id + ) + + @six.add_metaclass(ABCMeta) class SettingsHandler: @abstractmethod @@ -224,7 +384,7 @@ class SettingsHandler: """OpenPype versions that have any studio project anatomy overrides. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ pass @@ -235,7 +395,7 @@ class SettingsHandler: """OpenPype versions that have any studio project settings overrides. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ pass @@ -249,8 +409,87 @@ class SettingsHandler: project_name(str): Name of project. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ + + pass + + @abstractmethod + def get_system_last_saved_info(self): + """State of last system settings overrides at the moment when called. + + This method must provide most recent data so using cached data is not + the way. + + Returns: + SettingsStateInfo: Information about system settings overrides. + """ + + pass + + @abstractmethod + def get_project_last_saved_info(self, project_name): + """State of last project settings overrides at the moment when called. + + This method must provide most recent data so using cached data is not + the way. + + Args: + project_name (Union[None, str]): Project name for which state + should be returned. + + Returns: + SettingsStateInfo: Information about project settings overrides. + """ + + pass + + # UI related calls + @abstractmethod + def get_last_opened_info(self): + """Get information about last opened UI. + + Last opened UI is empty if there is noone who would have opened UI at + the moment when called. + + Returns: + Union[None, SettingsStateInfo]: Information about machine who had + opened Settings UI. + """ + + pass + + @abstractmethod + def opened_settings_ui(self): + """Callback called when settings UI is opened. + + Information about this machine must be available when + 'get_last_opened_info' is called from anywhere until + 'closed_settings_ui' is called again. + + Returns: + SettingsStateInfo: Object representing information about this + machine. Must be passed to 'closed_settings_ui' when finished. + """ + + pass + + @abstractmethod + def closed_settings_ui(self, info_obj): + """Callback called when settings UI is closed. + + From the moment this method is called the information about this + machine is removed and no more available when 'get_last_opened_info' + is called. + + Callback should validate if this machine is still stored as opened ui + before changing any value. + + Args: + info_obj (SettingsStateInfo): Object created when + 'opened_settings_ui' was called. + """ + pass @@ -283,19 +522,22 @@ class CacheValues: self.data = None self.creation_time = None self.version = None + self.last_saved_info = None def data_copy(self): if not self.data: return {} return copy.deepcopy(self.data) - def update_data(self, data, version=None): + def update_data(self, data, version): self.data = data self.creation_time = datetime.datetime.now() - if version is not None: - self.version = version + self.version = version - def update_from_document(self, document, version=None): + def update_last_saved_info(self, last_saved_info): + self.last_saved_info = last_saved_info + + def update_from_document(self, document, version): data = {} if document: if "data" in document: @@ -304,9 +546,9 @@ class CacheValues: value = document["value"] if value: data = json.loads(value) + self.data = data - if version is not None: - self.version = version + self.version = version def to_json_string(self): return json.dumps(self.data or {}) @@ -318,6 +560,9 @@ class CacheValues: delta = (datetime.datetime.now() - self.creation_time).seconds return delta > self.cache_lifetime + def set_outdated(self): + self.create_time = None + class MongoSettingsHandler(SettingsHandler): """Settings handler that use mongo for storing and loading of settings.""" @@ -337,9 +582,6 @@ class MongoSettingsHandler(SettingsHandler): def __init__(self): # Get mongo connection - from openpype.lib import OpenPypeMongoConnection - from openpype.pipeline import AvalonMongoDB - settings_collection = OpenPypeMongoConnection.get_mongo_client() self._anatomy_keys = None @@ -362,7 +604,6 @@ class MongoSettingsHandler(SettingsHandler): self.collection_name = collection_name self.collection = settings_collection[database_name][collection_name] - self.avalon_db = AvalonMongoDB() self.system_settings_cache = CacheValues() self.project_settings_cache = collections.defaultdict(CacheValues) @@ -511,6 +752,14 @@ class MongoSettingsHandler(SettingsHandler): # Update cache self.system_settings_cache.update_data(data, self._current_version) + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + SYSTEM_SETTINGS_KEY + ) + self.system_settings_cache.update_last_saved_info( + last_saved_info + ) + # Get copy of just updated cache system_settings_data = self.system_settings_cache.data_copy() @@ -519,20 +768,29 @@ class MongoSettingsHandler(SettingsHandler): system_settings_data ) - # Store system settings - self.collection.replace_one( + system_settings_doc = self.collection.find_one( { "type": self._system_settings_key, "version": self._current_version }, - { - "type": self._system_settings_key, - "data": system_settings_data, - "version": self._current_version - }, - upsert=True + {"_id": True} ) + # Store system settings + new_system_settings_doc = { + "type": self._system_settings_key, + "version": self._current_version, + "data": system_settings_data, + "last_saved_info": last_saved_info.to_document_data() + } + if not system_settings_doc: + self.collection.insert_one(new_system_settings_doc) + else: + self.collection.update_one( + {"_id": system_settings_doc["_id"]}, + {"$set": new_system_settings_doc} + ) + # Store global settings self.collection.replace_one( { @@ -564,8 +822,19 @@ class MongoSettingsHandler(SettingsHandler): data_cache = self.project_settings_cache[project_name] data_cache.update_data(overrides, self._current_version) + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + PROJECT_SETTINGS_KEY, + project_name + ) + + data_cache.update_last_saved_info(last_saved_info) + self._save_project_data( - project_name, self._project_settings_key, data_cache + project_name, + self._project_settings_key, + data_cache, + last_saved_info ) def save_project_anatomy(self, project_name, anatomy_data): @@ -583,8 +852,16 @@ class MongoSettingsHandler(SettingsHandler): self._save_project_anatomy_data(project_name, data_cache) else: + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + PROJECT_ANATOMY_KEY, + project_name + ) self._save_project_data( - project_name, self._project_anatomy_key, data_cache + project_name, + self._project_anatomy_key, + data_cache, + last_saved_info ) @classmethod @@ -607,16 +884,14 @@ class MongoSettingsHandler(SettingsHandler): new_data = data_cache.data_copy() # Prepare avalon project document - collection = self.avalon_db.database[project_name] - project_doc = collection.find_one({ - "type": "project" - }) + project_doc = get_project(project_name) if not project_doc: raise ValueError(( "Project document of project \"{}\" does not exists." " Create project first." ).format(project_name)) + collection = get_project_connection(project_name) # Project's data update_dict_data = {} project_doc_data = project_doc.get("data") or {} @@ -667,28 +942,39 @@ class MongoSettingsHandler(SettingsHandler): {"$set": update_dict} ) - def _save_project_data(self, project_name, doc_type, data_cache): + def _save_project_data( + self, project_name, doc_type, data_cache, last_saved_info + ): is_default = bool(project_name is None) - replace_filter = { + query_filter = { "type": doc_type, "is_default": is_default, "version": self._current_version } - replace_data = { + + new_project_settings_doc = { "type": doc_type, "data": data_cache.data, "is_default": is_default, - "version": self._current_version + "version": self._current_version, + "last_saved_info": last_saved_info.to_data() } - if not is_default: - replace_filter["project_name"] = project_name - replace_data["project_name"] = project_name - self.collection.replace_one( - replace_filter, - replace_data, - upsert=True + if not is_default: + query_filter["project_name"] = project_name + new_project_settings_doc["project_name"] = project_name + + project_settings_doc = self.collection.find_one( + query_filter, + {"_id": True} ) + if project_settings_doc: + self.collection.update_one( + {"_id": project_settings_doc["_id"]}, + {"$set": new_project_settings_doc} + ) + else: + self.collection.insert_one(new_project_settings_doc) def _get_versions_order_doc(self, projection=None): # TODO cache @@ -1015,19 +1301,11 @@ class MongoSettingsHandler(SettingsHandler): globals_document = self.collection.find_one({ "type": GLOBAL_SETTINGS_KEY }) - document = ( - self._get_studio_system_settings_overrides_for_version() + document, version = self._get_system_settings_overrides_doc() + + last_saved_info = SettingsStateInfo.from_document( + version, SYSTEM_SETTINGS_KEY, document ) - if document is None: - document = self._find_closest_system_settings() - - version = None - if document: - if document["type"] == self._system_settings_key: - version = document["version"] - else: - version = LEGACY_SETTINGS_VERSION - merged_document = self._apply_global_settings( document, globals_document ) @@ -1035,6 +1313,9 @@ class MongoSettingsHandler(SettingsHandler): self.system_settings_cache.update_from_document( merged_document, version ) + self.system_settings_cache.update_last_saved_info( + last_saved_info + ) cache = self.system_settings_cache data = cache.data_copy() @@ -1042,24 +1323,43 @@ class MongoSettingsHandler(SettingsHandler): return data, cache.version return data + def _get_system_settings_overrides_doc(self): + document = ( + self._get_studio_system_settings_overrides_for_version() + ) + if document is None: + document = self._find_closest_system_settings() + + version = None + if document: + if document["type"] == self._system_settings_key: + version = document["version"] + else: + version = LEGACY_SETTINGS_VERSION + + return document, version + + def get_system_last_saved_info(self): + # Make sure settings are recaches + self.system_settings_cache.set_outdated() + self.get_studio_system_settings_overrides(False) + + return self.system_settings_cache.last_saved_info.copy() + def _get_project_settings_overrides(self, project_name, return_version): if self.project_settings_cache[project_name].is_outdated: - document = self._get_project_settings_overrides_for_version( + document, version = self._get_project_settings_overrides_doc( project_name ) - if document is None: - document = self._find_closest_project_settings(project_name) - - version = None - if document: - if document["type"] == self._project_settings_key: - version = document["version"] - else: - version = LEGACY_SETTINGS_VERSION - self.project_settings_cache[project_name].update_from_document( document, version ) + last_saved_info = SettingsStateInfo.from_document( + version, PROJECT_SETTINGS_KEY, document + ) + self.project_settings_cache[project_name].update_last_saved_info( + last_saved_info + ) cache = self.project_settings_cache[project_name] data = cache.data_copy() @@ -1067,6 +1367,29 @@ class MongoSettingsHandler(SettingsHandler): return data, cache.version return data + def _get_project_settings_overrides_doc(self, project_name): + document = self._get_project_settings_overrides_for_version( + project_name + ) + if document is None: + document = self._find_closest_project_settings(project_name) + + version = None + if document: + if document["type"] == self._project_settings_key: + version = document["version"] + else: + version = LEGACY_SETTINGS_VERSION + + return document, version + + def get_project_last_saved_info(self, project_name): + # Make sure settings are recaches + self.project_settings_cache[project_name].set_outdated() + self._get_project_settings_overrides(project_name, False) + + return self.project_settings_cache[project_name].last_saved_info.copy() + def get_studio_project_settings_overrides(self, return_version): """Studio overrides of default project settings.""" return self._get_project_settings_overrides(None, return_version) @@ -1144,9 +1467,9 @@ class MongoSettingsHandler(SettingsHandler): self.project_anatomy_cache[project_name].update_from_document( document, version ) + else: - collection = self.avalon_db.database[project_name] - project_doc = collection.find_one({"type": "project"}) + project_doc = get_project(project_name) self.project_anatomy_cache[project_name].update_data( self.project_doc_to_anatomy_data(project_doc), self._current_version @@ -1364,6 +1687,64 @@ class MongoSettingsHandler(SettingsHandler): return output return self._sort_versions(output) + def get_last_opened_info(self): + doc = self.collection.find_one({ + "type": "last_opened_settings_ui", + "version": self._current_version + }) or {} + info_data = doc.get("info") + if not info_data: + return None + + # Fill not available information + info_data["openpype_version"] = self._current_version + info_data["settings_type"] = None + info_data["project_name"] = None + return SettingsStateInfo.from_data(info_data) + + def opened_settings_ui(self): + doc_filter = { + "type": "last_opened_settings_ui", + "version": self._current_version + } + + opened_info = SettingsStateInfo.create_new(self._current_version) + new_doc_data = copy.deepcopy(doc_filter) + new_doc_data["info"] = opened_info.to_document_data() + + doc = self.collection.find_one( + doc_filter, + {"_id": True} + ) + if doc: + self.collection.update_one( + {"_id": doc["_id"]}, + {"$set": new_doc_data} + ) + else: + self.collection.insert_one(new_doc_data) + return opened_info + + def closed_settings_ui(self, info_obj): + doc_filter = { + "type": "last_opened_settings_ui", + "version": self._current_version + } + doc = self.collection.find_one(doc_filter) or {} + info_data = doc.get("info") + if not info_data: + return + + info_data["openpype_version"] = self._current_version + info_data["settings_type"] = None + info_data["project_name"] = None + current_info = SettingsStateInfo.from_data(info_data) + if current_info == info_obj: + self.collection.update_one( + {"_id": doc["_id"]}, + {"$set": {"info": None}} + ) + class MongoLocalSettingsHandler(LocalSettingsHandler): """Settings handler that use mongo for store and load local settings. @@ -1410,7 +1791,7 @@ class MongoLocalSettingsHandler(LocalSettingsHandler): """ data = data or {} - self.local_settings_cache.update_data(data) + self.local_settings_cache.update_data(data, None) self.collection.replace_one( { @@ -1433,6 +1814,6 @@ class MongoLocalSettingsHandler(LocalSettingsHandler): "site_id": self.local_site_id }) - self.local_settings_cache.update_from_document(document) + self.local_settings_cache.update_from_document(document, None) return self.local_settings_cache.data_copy() diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 6df41112c8..5eaddf6e6e 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -91,6 +91,31 @@ def calculate_changes(old_value, new_value): return changes +@require_handler +def get_system_last_saved_info(): + return _SETTINGS_HANDLER.get_system_last_saved_info() + + +@require_handler +def get_project_last_saved_info(project_name): + return _SETTINGS_HANDLER.get_project_last_saved_info(project_name) + + +@require_handler +def get_last_opened_info(): + return _SETTINGS_HANDLER.get_last_opened_info() + + +@require_handler +def opened_settings_ui(): + return _SETTINGS_HANDLER.opened_settings_ui() + + +@require_handler +def closed_settings_ui(info_obj): + return _SETTINGS_HANDLER.closed_settings_ui(info_obj) + + @require_handler def save_studio_settings(data): """Save studio overrides of system settings. diff --git a/openpype/tests/test_lib_restructuralization.py b/openpype/tests/test_lib_restructuralization.py index ccccc76a08..c8952e5a1c 100644 --- a/openpype/tests/test_lib_restructuralization.py +++ b/openpype/tests/test_lib_restructuralization.py @@ -22,7 +22,6 @@ def test_backward_compatibility(printer): from openpype.lib import any_outdated from openpype.lib import get_asset from openpype.lib import get_linked_assets - from openpype.lib import get_latest_version from openpype.lib import get_ffprobe_streams from openpype.hosts.fusion.lib import switch_item diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 3f899cc05e..6d40d21f96 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -10,6 +10,7 @@ from Qt import QtCore, QtGui import qtawesome from openpype.client import ( + get_projects, get_project, get_assets, ) @@ -527,7 +528,7 @@ class LauncherModel(QtCore.QObject): current_project = self.project_name project_names = set() project_docs_by_name = {} - for project_doc in self._dbcon.projects(only_active=True): + for project_doc in get_projects(): project_name = project_doc["name"] project_names.add(project_name) project_docs_by_name[project_name] = project_doc diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 5f4d10d796..d2af1b7151 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -3,7 +3,7 @@ import sys from Qt import QtWidgets, QtCore, QtGui from openpype import style -from openpype.client import get_project +from openpype.client import get_projects, get_project from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import lib as tools_lib from openpype.tools.loader.widgets import ( @@ -239,7 +239,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): def get_filtered_projects(self): projects = list() - for project in self.dbcon.projects(): + for project in get_projects(fields=["name", "data.library_project"]): is_library = project.get("data", {}).get("library_project", False) if ( (is_library and self.show_libraries) or diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..929e497890 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,15 +272,17 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - query = self._repre_per_version_pipeline( - [version_doc["_id"]], - self.active_site, - self.remote_site + repres_info = list( + self.sync_server.get_repre_info_for_versions( + project_name, + [version_doc["_id"]], + self.active_site, + self.remote_site + ) ) - docs = list(self.dbcon.aggregate(query)) - if docs: - repre = docs.pop() - version_doc["data"].update(self._get_repre_dict(repre)) + if repres_info: + version_doc["data"].update( + self._get_repre_dict(repres_info[0])) self.set_version(index, version_doc) @@ -472,29 +474,34 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version - repre_info = {} + repre_info_by_version_id = {} if self.sync_server.enabled: - version_ids = set() + versions_by_id = {} for _subset_id, doc in last_versions_by_subset_id.items(): - version_ids.add(doc["_id"]) + versions_by_id[doc["_id"]] = doc - query = self._repre_per_version_pipeline( - list(version_ids), self.active_site, self.remote_site + repres_info = self.sync_server.get_repre_info_for_versions( + project_name, + list(versions_by_id.keys()), + self.active_site, + self.remote_site ) - - for doc in self.dbcon.aggregate(query): + for repre_info in repres_info: if self._doc_fetching_stop: return + + version_id = repre_info["_id"] + doc = versions_by_id[version_id] doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[doc["_id"]] = doc + repre_info_by_version_id[version_id] = repre_info self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, "subset_docs_by_id": subset_docs_by_id, "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, - "repre_info_by_version_id": repre_info + "repre_info_by_version_id": repre_info_by_version_id } self.doc_fetched.emit() @@ -827,83 +834,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return data - def _repre_per_version_pipeline(self, version_ids, - active_site, remote_site): - query = [ - {"$match": {"parent": {"$in": version_ids}, - "type": "representation", - "files.sites.name": {"$exists": 1}}}, - {"$unwind": "$files"}, - {'$addFields': { - 'order_local': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', active_site]} - } - } - }}, - {'$addFields': { - 'order_remote': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', remote_site]} - } - } - }}, - {'$addFields': { - 'progress_local': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_local.progress"}, - "$order_local.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$addFields': { - 'progress_remote': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_remote.progress"}, - "$order_remote.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$group': { # first group by repre - '_id': '$_id', - 'parent': {'$first': '$parent'}, - 'avail_ratio_local': { - '$first': { - '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] - } - }, - 'avail_ratio_remote': { - '$first': { - '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] - } - } - }}, - {'$group': { # second group by parent, eg version_id - '_id': '$parent', - 'repre_count': {'$sum': 1}, # total representations - # fully available representation for site - 'avail_repre_local': {'$sum': "$avail_ratio_local"}, - 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, - }}, - ] - return query - class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel): """Provide the feature of filtering group by the acceptance of members diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 13e18b3757..cbf5720803 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -17,6 +17,7 @@ from openpype.client import ( get_thumbnail_id_from_source, get_thumbnail, ) +from openpype.client.operations import OperationsSession, REMOVED_VALUE from openpype.pipeline import HeroVersionType, Anatomy from openpype.pipeline.thumbnail import get_thumbnail_binary from openpype.pipeline.load import ( @@ -434,7 +435,8 @@ class SubsetWidget(QtWidgets.QWidget): # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) if self.tool_name: available_loaders = lib.remove_tool_name_from_loaders( available_loaders, self.tool_name @@ -566,12 +568,12 @@ class SubsetWidget(QtWidgets.QWidget): # Trigger project_name = self.dbcon.active_project() - subset_names_by_version_id = collections.defaultdict(set) + subset_name_by_version_id = dict() for item in items: version_id = item["version_document"]["_id"] - subset_names_by_version_id[version_id].add(item["subset"]) + subset_name_by_version_id[version_id] = item["subset"] - version_ids = set(subset_names_by_version_id.keys()) + version_ids = set(subset_name_by_version_id.keys()) repre_docs = get_representations( project_name, representation_names=[representation_name], @@ -583,14 +585,15 @@ class SubsetWidget(QtWidgets.QWidget): for repre_doc in repre_docs: repre_ids.append(repre_doc["_id"]) + # keep only version ids without representation with that name version_id = repre_doc["parent"] - if version_id not in version_ids: - version_ids.remove(version_id) + version_ids.discard(version_id) - for version_id in version_ids: + if version_ids: + # report versions that didn't have valid representation joined_subset_names = ", ".join([ - '"{}"'.format(subset) - for subset in subset_names_by_version_id[version_id] + '"{}"'.format(subset_name_by_version_id[version_id]) + for version_id in version_ids ]) self.echo("Subsets {} don't have representation '{}'".format( joined_subset_names, representation_name @@ -612,26 +615,30 @@ class SubsetWidget(QtWidgets.QWidget): box.show() def group_subsets(self, name, asset_ids, items): - field = "data.subsetGroup" + subset_ids = { + item["_id"] + for item in items + if item.get("_id") + } + if not subset_ids: + return if name: - update = {"$set": {field: name}} self.echo("Group subsets to '%s'.." % name) else: - update = {"$unset": {field: ""}} self.echo("Ungroup subsets..") - subsets = list() - for item in items: - subsets.append(item["subset"]) + project_name = self.dbcon.active_project() + op_session = OperationsSession() + for subset_id in subset_ids: + op_session.update_entity( + project_name, + "subset", + subset_id, + {"data.subsetGroup": name or REMOVED_VALUE} + ) - for asset_id in asset_ids: - filtr = { - "type": "subset", - "parent": asset_id, - "name": {"$in": subsets}, - } - self.dbcon.update_many(filtr, update) + op_session.commit() def echo(self, message): print(message) @@ -1330,7 +1337,8 @@ class RepresentationWidget(QtWidgets.QWidget): selected_side = self._get_selected_side(point_index, rows) # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) filtered_loaders = [] for loader in available_loaders: @@ -1544,6 +1552,11 @@ def _load_representations_by_loader(loader, repre_contexts, return for repre_context in repre_contexts.values(): + version_doc = repre_context["version"] + if version_doc["type"] == "hero_version": + version_name = "Hero" + else: + version_name = version_doc.get("name") try: if data_by_repre_id: _id = repre_context["representation"]["_id"] @@ -1561,7 +1574,7 @@ def _load_representations_by_loader(loader, repre_contexts, None, repre_context["representation"]["name"], repre_context["subset"]["name"], - repre_context["version"]["name"] + version_name )) except Exception as exc: @@ -1574,7 +1587,7 @@ def _load_representations_by_loader(loader, repre_contexts, formatted_traceback, repre_context["representation"]["name"], repre_context["subset"]["name"], - repre_context["version"]["name"] + version_name )) return error_info diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index c5bde5aaec..6f40140e5e 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -8,14 +8,13 @@ from pymongo import UpdateOne, DeleteOne from Qt import QtCore, QtGui from openpype.client import ( + get_projects, get_project, get_assets, get_asset_ids_with_subsets, ) -from openpype.lib import ( - CURRENT_DOC_SCHEMAS, - PypeLogger, -) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA +from openpype.lib import Logger from .constants import ( IDENTIFIER_ROLE, @@ -54,12 +53,8 @@ class ProjectModel(QtGui.QStandardItemModel): self._items_by_name[None] = none_project new_project_items.append(none_project) - project_docs = self.dbcon.projects( - projection={"name": 1}, - only_active=True - ) project_names = set() - for project_doc in project_docs: + for project_doc in get_projects(fields=["name"]): project_name = project_doc.get("name") if not project_name: continue @@ -206,7 +201,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): @property def log(self): if self._log is None: - self._log = PypeLogger.get_logger("ProjectManagerModel") + self._log = Logger.get_logger("ProjectManagerModel") return self._log @property @@ -1964,7 +1959,7 @@ class AssetItem(BaseItem): } schema_name = ( self._origin_asset_doc.get("schema") - or CURRENT_DOC_SCHEMAS["asset"] + or CURRENT_ASSET_DOC_SCHEMA ) doc = { diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 371d1ba2ef..d0715f204d 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -5,8 +5,8 @@ from .constants import ( NAME_ALLOWED_SYMBOLS, NAME_REGEX ) -from openpype.lib import ( - create_project, +from openpype.lib import create_project +from openpype.client.operations import ( PROJECT_NAME_ALLOWED_SYMBOLS, PROJECT_NAME_REGEX ) diff --git a/openpype/tools/publisher/publish_report_viewer/model.py b/openpype/tools/publisher/publish_report_viewer/model.py index bd03376c55..704feeb4bd 100644 --- a/openpype/tools/publisher/publish_report_viewer/model.py +++ b/openpype/tools/publisher/publish_report_viewer/model.py @@ -1,9 +1,9 @@ import uuid -import html from Qt import QtCore, QtGui import pyblish.api +from openpype.tools.utils.lib import html_escape from .constants import ( ITEM_ID_ROLE, ITEM_IS_GROUP_ROLE, @@ -46,7 +46,7 @@ class InstancesModel(QtGui.QStandardItemModel): all_removed = True for instance_item in instance_items: item = QtGui.QStandardItem(instance_item.label) - instance_label = html.escape(instance_item.label) + instance_label = html_escape(instance_item.label) item.setData(instance_label, ITEM_LABEL_ROLE) item.setData(instance_item.errored, ITEM_ERRORED_ROLE) item.setData(instance_item.id, ITEM_ID_ROLE) diff --git a/openpype/tools/publisher/publish_report_viewer/report_items.py b/openpype/tools/publisher/publish_report_viewer/report_items.py index 8a01569723..206f999bac 100644 --- a/openpype/tools/publisher/publish_report_viewer/report_items.py +++ b/openpype/tools/publisher/publish_report_viewer/report_items.py @@ -79,7 +79,7 @@ class PublishReport: context_data = data["context"] context_data["name"] = "context" - context_data["label"] = context_data["label"] or "Context" + context_data["label"] = context_data.get("label") or "Context" logs = [] plugins_items_by_id = {} diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index bd591138f4..fa391f4ba0 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -22,13 +22,13 @@ Only one item can be selected at a time. import re import collections -import html from Qt import QtWidgets, QtCore from openpype.widgets.nice_checkbox import NiceCheckbox from openpype.tools.utils import BaseClickableFrame +from openpype.tools.utils.lib import html_escape from .widgets import ( AbstractInstanceView, ContextWarningLabel, @@ -308,7 +308,7 @@ class InstanceCardWidget(CardWidget): self._last_variant = variant self._last_subset_name = subset_name # Make `variant` bold - label = html.escape(self.instance.label) + label = html_escape(self.instance.label) found_parts = set(re.findall(variant, label, re.IGNORECASE)) if found_parts: for part in found_parts: diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index d4740b2493..173df7d5c8 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -11,10 +11,10 @@ except Exception: from Qt import QtWidgets, QtCore, QtGui from openpype.client import get_asset_by_name, get_subsets -from openpype.lib import TaskNotSetError from openpype.pipeline.create import ( CreatorError, - SUBSET_NAME_ALLOWED_SYMBOLS + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, ) from openpype.tools.utils import ( ErrorMessageBox, diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 3e4fd5b72d..6e31ba635b 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -23,12 +23,12 @@ selection can be enabled disabled using checkbox or keyboard key presses: ``` """ import collections -import html from Qt import QtWidgets, QtCore, QtGui from openpype.style import get_objected_colors from openpype.widgets.nice_checkbox import NiceCheckbox +from openpype.tools.utils.lib import html_escape from .widgets import AbstractInstanceView from ..constants import ( INSTANCE_ID_ROLE, @@ -114,7 +114,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): self.instance = instance - instance_label = html.escape(instance.label) + instance_label = html_escape(instance.label) subset_name_label = QtWidgets.QLabel(instance_label, self) subset_name_label.setObjectName("ListViewSubsetName") @@ -181,7 +181,7 @@ class InstanceListItemWidget(QtWidgets.QWidget): # Check subset name label = self.instance.label if label != self._instance_label_widget.text(): - self._instance_label_widget.setText(html.escape(label)) + self._instance_label_widget.setText(html_escape(label)) # Check active state self.set_active(self.instance["active"]) # Check valid states diff --git a/openpype/tools/publisher/widgets/publish_widget.py b/openpype/tools/publisher/widgets/publish_widget.py index 80d0265dd3..b32b5381d1 100644 --- a/openpype/tools/publisher/widgets/publish_widget.py +++ b/openpype/tools/publisher/widgets/publish_widget.py @@ -335,14 +335,12 @@ class PublishFrame(QtWidgets.QFrame): if instance is None: new_name = ( context.data.get("label") - or getattr(context, "label", None) or context.data.get("name") or "Context" ) else: new_name = ( instance.data.get("label") - or getattr(instance, "label", None) or instance.data["name"] ) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 5a5f8c4c37..aa7e3be687 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -6,7 +6,6 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome -from openpype.lib import TaskNotSetError from openpype.widgets.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -17,7 +16,10 @@ from openpype.tools.utils import ( BaseClickableFrame, set_style_property, ) -from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.create import ( + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, +) from .assets_widget import AssetsDialog from .tasks_widget import TasksModel from .icons import ( diff --git a/openpype/tools/pyblish_pype/control.py b/openpype/tools/pyblish_pype/control.py index f657936b79..05e53a989a 100644 --- a/openpype/tools/pyblish_pype/control.py +++ b/openpype/tools/pyblish_pype/control.py @@ -244,7 +244,6 @@ class Controller(QtCore.QObject): self.context.optional = False self.context.data["publish"] = True - self.context.data["label"] = "Context" self.context.data["name"] = "context" self.context.data["host"] = reversed(pyblish.api.registered_hosts()) diff --git a/openpype/tools/pyblish_pype/model.py b/openpype/tools/pyblish_pype/model.py index 2931a379b3..1479d91bb5 100644 --- a/openpype/tools/pyblish_pype/model.py +++ b/openpype/tools/pyblish_pype/model.py @@ -86,7 +86,7 @@ class IntentModel(QtGui.QStandardItemModel): First and default value is {"< Not Set >": None} """ - default_item = {"< Not Set >": None} + default_empty_label = "< Not set >" def __init__(self, parent=None): super(IntentModel, self).__init__(parent) @@ -102,27 +102,39 @@ class IntentModel(QtGui.QStandardItemModel): self._item_count = 0 self.default_index = 0 - intents_preset = ( + intent_settings = ( get_system_settings() .get("modules", {}) .get("ftrack", {}) .get("intent", {}) ) - default = intents_preset.get("default") - items = intents_preset.get("items", {}) + items = intent_settings.get("items", {}) if not items: return - for idx, item_value in enumerate(items.keys()): + allow_empty_intent = intent_settings.get("allow_empty_intent", True) + empty_intent_label = ( + intent_settings.get("empty_intent_label") + or self.default_empty_label + ) + listed_items = list(items.items()) + if allow_empty_intent: + listed_items.insert(0, ("", empty_intent_label)) + + default = intent_settings.get("default") + + for idx, item in enumerate(listed_items): + item_value = item[0] if item_value == default: self.default_index = idx break - self.add_items(items) + self._add_items(listed_items) - def add_items(self, items): - for value, label in items.items(): + def _add_items(self, items): + for item in items: + value, label = item new_item = QtGui.QStandardItem() new_item.setData(label, QtCore.Qt.DisplayRole) new_item.setData(value, Roles.IntentItemValue) @@ -584,11 +596,6 @@ class InstanceItem(QtGui.QStandardItem): instance._logs = [] instance.optional = getattr(instance, "optional", True) instance.data["publish"] = instance.data.get("publish", True) - instance.data["label"] = ( - instance.data.get("label") - or getattr(instance, "label", None) - or instance.data["name"] - ) family = self.data(Roles.FamiliesRole)[0] self.setData( @@ -604,9 +611,16 @@ class InstanceItem(QtGui.QStandardItem): def data(self, role=QtCore.Qt.DisplayRole): if role == QtCore.Qt.DisplayRole: + label = None if settings.UseLabel: - return self.instance.data["label"] - return self.instance.data["name"] + label = self.instance.data.get("label") + + if not label: + if self.is_context: + label = "Context" + else: + label = self.instance.data["name"] + return label if role == QtCore.Qt.DecorationRole: icon_name = self.instance.data.get("icon") or "file" diff --git a/openpype/tools/pyblish_pype/window.py b/openpype/tools/pyblish_pype/window.py index 78590259bc..e167405325 100644 --- a/openpype/tools/pyblish_pype/window.py +++ b/openpype/tools/pyblish_pype/window.py @@ -523,6 +523,7 @@ class Window(QtWidgets.QDialog): instance_item.setData(enable_value, Roles.IsEnabledRole) def _add_intent_to_context(self): + context_value = None if ( self.intent_model.has_items and "intent" not in self.controller.context.data @@ -530,11 +531,17 @@ class Window(QtWidgets.QDialog): idx = self.intent_model.index(self.intent_box.currentIndex(), 0) intent_value = self.intent_model.data(idx, Roles.IntentItemValue) intent_label = self.intent_model.data(idx, QtCore.Qt.DisplayRole) + if intent_value: + context_value = { + "value": intent_value, + "label": intent_label + } - self.controller.context.data["intent"] = { - "value": intent_value, - "label": intent_label - } + # Unset intent if is set to empty value + if context_value is None: + self.controller.context.data.pop("intent", None) + else: + self.controller.context.data["intent"] = context_value def on_instance_toggle(self, index, state=None): """An item is requesting to be toggled""" diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 63d181b2d6..e0e43aaba7 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -551,16 +551,16 @@ class SceneInventoryView(QtWidgets.QTreeView): "toggle": selection_model.Toggle, }[options.get("mode", "select")] - for item in iter_model_rows(model, 0): - item = item.data(InventoryModel.ItemRole) + for index in iter_model_rows(model, 0): + item = index.data(InventoryModel.ItemRole) if item.get("isGroupNode"): continue name = item.get("objectName") if name in object_names: - self.scrollTo(item) # Ensure item is visible + self.scrollTo(index) # Ensure item is visible flags = select_mode | selection_model.Rows - selection_model.select(item, flags) + selection_model.select(index, flags) object_names.remove(name) diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 054c2a2daa..463280b71c 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -4,8 +4,9 @@ import sys from Qt import QtWidgets, QtCore import qtawesome -from openpype.pipeline import legacy_io from openpype import style +from openpype.client import get_projects +from openpype.pipeline import legacy_io from openpype.tools.utils.delegates import VersionDelegate from openpype.tools.utils.lib import ( qt_app_context, @@ -195,8 +196,7 @@ def show(root=None, debug=False, parent=None, items=None): if not os.environ.get("AVALON_PROJECT"): any_project = next( - project for project in legacy_io.projects() - if project.get("active", True) is not False + project for project in get_projects() ) project_name = any_project["name"] diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index f42027d9e2..f4b2c13a12 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -36,6 +36,11 @@ from openpype.settings.entities.op_version_entity import ( ) from openpype.settings import SaveWarningExc +from openpype.settings.lib import ( + get_system_last_saved_info, + get_project_last_saved_info, +) +from .dialogs import SettingsLastSavedChanged, SettingsControlTaken from .widgets import ( ProjectListWidget, VersionAction @@ -115,12 +120,19 @@ class SettingsCategoryWidget(QtWidgets.QWidget): "settings to update them to you current running OpenPype version." ) - def __init__(self, user_role, parent=None): + def __init__(self, controller, parent=None): super(SettingsCategoryWidget, self).__init__(parent) - self.user_role = user_role + self._controller = controller + controller.event_system.add_callback( + "edit.mode.changed", + self._edit_mode_changed + ) self.entity = None + self._edit_mode = None + self._last_saved_info = None + self._reset_crashed = False self._state = CategoryState.Idle @@ -191,6 +203,31 @@ class SettingsCategoryWidget(QtWidgets.QWidget): ) raise TypeError("Unknown type: {}".format(label)) + def _edit_mode_changed(self, event): + self.set_edit_mode(event["edit_mode"]) + + def set_edit_mode(self, enabled): + if enabled is self._edit_mode: + return + + was_false = self._edit_mode is False + self._edit_mode = enabled + + self.save_btn.setEnabled(enabled and not self._reset_crashed) + if enabled: + tooltip = ( + "Someone else has opened settings UI." + "\nTry hit refresh to check if settings are already available." + ) + else: + tooltip = "Save settings" + + self.save_btn.setToolTip(tooltip) + + # Reset when last saved information has changed + if was_false and not self._check_last_saved_info(): + self.reset() + @property def state(self): return self._state @@ -286,7 +323,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): footer_layout = QtWidgets.QHBoxLayout(footer_widget) footer_layout.setContentsMargins(5, 5, 5, 5) - if self.user_role == "developer": + if self._controller.user_role == "developer": self._add_developer_ui(footer_layout, footer_widget) footer_layout.addWidget(empty_label, 1) @@ -434,6 +471,9 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.set_state(CategoryState.Idle) def save(self): + if not self._edit_mode: + return + if not self.items_are_valid(): return @@ -664,14 +704,16 @@ class SettingsCategoryWidget(QtWidgets.QWidget): ) def _on_reset_crash(self): + self._reset_crashed = True self.save_btn.setEnabled(False) if self.breadcrumbs_model is not None: self.breadcrumbs_model.set_entity(None) def _on_reset_success(self): + self._reset_crashed = False if not self.save_btn.isEnabled(): - self.save_btn.setEnabled(True) + self.save_btn.setEnabled(self._edit_mode) if self.breadcrumbs_model is not None: path = self.breadcrumbs_bar.path() @@ -716,7 +758,24 @@ class SettingsCategoryWidget(QtWidgets.QWidget): """Callback on any tab widget save.""" return + def _check_last_saved_info(self): + raise NotImplementedError(( + "{} does not have implemented '_check_last_saved_info'" + ).format(self.__class__.__name__)) + def _save(self): + self._controller.update_last_opened_info() + if not self._controller.opened_info: + dialog = SettingsControlTaken(self._last_saved_info, self) + dialog.exec_() + return + + if not self._check_last_saved_info(): + dialog = SettingsLastSavedChanged(self._last_saved_info, self) + dialog.exec_() + if dialog.result() == 0: + return + # Don't trigger restart if defaults are modified if self.is_modifying_defaults: require_restart = False @@ -775,6 +834,13 @@ class SystemWidget(SettingsCategoryWidget): self._actions = [] super(SystemWidget, self).__init__(*args, **kwargs) + def _check_last_saved_info(self): + if self.is_modifying_defaults: + return True + + last_saved_info = get_system_last_saved_info() + return self._last_saved_info == last_saved_info + def contain_category_key(self, category): if category == "system_settings": return True @@ -789,6 +855,10 @@ class SystemWidget(SettingsCategoryWidget): ) entity.on_change_callbacks.append(self._on_entity_change) self.entity = entity + last_saved_info = None + if not self.is_modifying_defaults: + last_saved_info = get_system_last_saved_info() + self._last_saved_info = last_saved_info try: if self.is_modifying_defaults: entity.set_defaults_state() @@ -822,6 +892,13 @@ class ProjectWidget(SettingsCategoryWidget): def __init__(self, *args, **kwargs): super(ProjectWidget, self).__init__(*args, **kwargs) + def _check_last_saved_info(self): + if self.is_modifying_defaults: + return True + + last_saved_info = get_project_last_saved_info(self.project_name) + return self._last_saved_info == last_saved_info + def contain_category_key(self, category): if category in ("project_settings", "project_anatomy"): return True @@ -901,6 +978,11 @@ class ProjectWidget(SettingsCategoryWidget): entity.on_change_callbacks.append(self._on_entity_change) self.project_list_widget.set_entity(entity) self.entity = entity + + last_saved_info = None + if not self.is_modifying_defaults: + last_saved_info = get_project_last_saved_info(self.project_name) + self._last_saved_info = last_saved_info try: if self.is_modifying_defaults: self.entity.set_defaults_state() diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py new file mode 100644 index 0000000000..b1b4daa1a0 --- /dev/null +++ b/openpype/tools/settings/settings/dialogs.py @@ -0,0 +1,202 @@ +from Qt import QtWidgets, QtCore + +from openpype.tools.utils.delegates import pretty_date + + +class BaseInfoDialog(QtWidgets.QDialog): + width = 600 + height = 400 + + def __init__(self, message, title, info_obj, parent=None): + super(BaseInfoDialog, self).__init__(parent) + self._result = 0 + self._info_obj = info_obj + + self.setWindowTitle(title) + + message_label = QtWidgets.QLabel(message, self) + message_label.setWordWrap(True) + + separator_widget_1 = QtWidgets.QFrame(self) + separator_widget_2 = QtWidgets.QFrame(self) + for separator_widget in ( + separator_widget_1, + separator_widget_2 + ): + separator_widget.setObjectName("Separator") + separator_widget.setMinimumHeight(1) + separator_widget.setMaximumHeight(1) + + other_information = QtWidgets.QWidget(self) + other_information_layout = QtWidgets.QFormLayout(other_information) + other_information_layout.setContentsMargins(0, 0, 0, 0) + for label, value in ( + ("Username", info_obj.username), + ("Host name", info_obj.hostname), + ("Host IP", info_obj.hostip), + ("System name", info_obj.system_name), + ("Local ID", info_obj.local_id), + ): + other_information_layout.addRow( + label, + QtWidgets.QLabel(value or "N/A", other_information) + ) + + timestamp_label = QtWidgets.QLabel( + pretty_date(info_obj.timestamp_obj), other_information + ) + other_information_layout.addRow("Time", timestamp_label) + + footer_widget = QtWidgets.QWidget(self) + buttons_widget = QtWidgets.QWidget(footer_widget) + + buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) + buttons_layout.setContentsMargins(0, 0, 0, 0) + buttons = self.get_buttons(buttons_widget) + for button in buttons: + buttons_layout.addWidget(button, 1) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addStretch(1) + footer_layout.addWidget(buttons_widget, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(message_label, 0) + layout.addWidget(separator_widget_1, 0) + layout.addStretch(1) + layout.addWidget(other_information, 0, QtCore.Qt.AlignHCenter) + layout.addStretch(1) + layout.addWidget(separator_widget_2, 0) + layout.addWidget(footer_widget, 0) + + timestamp_timer = QtCore.QTimer() + timestamp_timer.setInterval(1000) + timestamp_timer.timeout.connect(self._on_timestamp_timer) + + self._timestamp_label = timestamp_label + self._timestamp_timer = timestamp_timer + + def showEvent(self, event): + super(BaseInfoDialog, self).showEvent(event) + self._timestamp_timer.start() + self.resize(self.width, self.height) + + def closeEvent(self, event): + self._timestamp_timer.stop() + super(BaseInfoDialog, self).closeEvent(event) + + def _on_timestamp_timer(self): + self._timestamp_label.setText( + pretty_date(self._info_obj.timestamp_obj) + ) + + def result(self): + return self._result + + def get_buttons(self, parent): + return [] + + +class SettingsUIOpenedElsewhere(BaseInfoDialog): + def __init__(self, info_obj, parent=None): + title = "Someone else has opened Settings UI" + message = ( + "Someone else has opened Settings UI which could cause data loss." + " Please contact the person on the other side." + "

You can continue in view-only mode." + " All changes in view mode will be lost." + "

You can take control which will cause that" + " all changes of settings on the other side will be lost.
" + ) + super(SettingsUIOpenedElsewhere, self).__init__( + message, title, info_obj, parent + ) + + def _on_take_control(self): + self._result = 1 + self.close() + + def _on_view_mode(self): + self._result = 0 + self.close() + + def get_buttons(self, parent): + take_control_btn = QtWidgets.QPushButton( + "Take control", parent + ) + view_mode_btn = QtWidgets.QPushButton( + "View only", parent + ) + + take_control_btn.clicked.connect(self._on_take_control) + view_mode_btn.clicked.connect(self._on_view_mode) + + return [ + take_control_btn, + view_mode_btn + ] + + +class SettingsLastSavedChanged(BaseInfoDialog): + width = 500 + height = 300 + + def __init__(self, info_obj, parent=None): + title = "Settings has changed" + message = ( + "Settings has changed while you had opened this settings session." + "

It is recommended to refresh settings" + " and re-apply changes in the new session." + ) + super(SettingsLastSavedChanged, self).__init__( + message, title, info_obj, parent + ) + + def _on_save(self): + self._result = 1 + self.close() + + def _on_close(self): + self._result = 0 + self.close() + + def get_buttons(self, parent): + close_btn = QtWidgets.QPushButton( + "Close", parent + ) + save_btn = QtWidgets.QPushButton( + "Save anyway", parent + ) + + close_btn.clicked.connect(self._on_close) + save_btn.clicked.connect(self._on_save) + + return [ + close_btn, + save_btn + ] + + +class SettingsControlTaken(BaseInfoDialog): + width = 500 + height = 300 + + def __init__(self, info_obj, parent=None): + title = "Settings control taken" + message = ( + "Someone took control over your settings." + "

It is not possible to save changes of currently" + " opened session. Copy changes you want to keep and hit refresh." + ) + super(SettingsControlTaken, self).__init__( + message, title, info_obj, parent + ) + + def _on_confirm(self): + self.close() + + def get_buttons(self, parent): + confirm_btn = QtWidgets.QPushButton("Understand", parent) + confirm_btn.clicked.connect(self._on_confirm) + return [confirm_btn] diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 88d923c16a..1a4a6877b0 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -3,6 +3,7 @@ import uuid from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.client import get_projects from openpype.pipeline import AvalonMongoDB from openpype.style import get_objected_colors from openpype.tools.utils.widgets import ImageButton @@ -783,8 +784,6 @@ class ProjectModel(QtGui.QStandardItemModel): self.setColumnCount(2) - self.dbcon = None - self._only_active = only_active self._default_item = None self._items_by_name = {} @@ -828,9 +827,6 @@ class ProjectModel(QtGui.QStandardItemModel): index = self.index(index.row(), 0, index.parent()) return super(ProjectModel, self).flags(index) - def set_dbcon(self, dbcon): - self.dbcon = dbcon - def refresh(self): # Change id of versions refresh self._version_refresh_id = uuid.uuid4() @@ -846,31 +842,30 @@ class ProjectModel(QtGui.QStandardItemModel): self._default_item.setData("", PROJECT_VERSION_ROLE) project_names = set() - if self.dbcon is not None: - for project_doc in self.dbcon.projects( - projection={"name": 1, "data.active": 1}, - only_active=self._only_active - ): - project_name = project_doc["name"] - project_names.add(project_name) - if project_name in self._items_by_name: - item = self._items_by_name[project_name] - else: - item = QtGui.QStandardItem(project_name) + for project_doc in get_projects( + inactive=not self._only_active, + fields=["name", "data.active"] + ): + project_name = project_doc["name"] + project_names.add(project_name) + if project_name in self._items_by_name: + item = self._items_by_name[project_name] + else: + item = QtGui.QStandardItem(project_name) - self._items_by_name[project_name] = item - new_items.append(item) + self._items_by_name[project_name] = item + new_items.append(item) - is_active = project_doc.get("data", {}).get("active", True) - item.setData(project_name, PROJECT_NAME_ROLE) - item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) - item.setData("", PROJECT_VERSION_ROLE) - item.setData(False, PROJECT_IS_SELECTED_ROLE) + is_active = project_doc.get("data", {}).get("active", True) + item.setData(project_name, PROJECT_NAME_ROLE) + item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) + item.setData("", PROJECT_VERSION_ROLE) + item.setData(False, PROJECT_IS_SELECTED_ROLE) - if not is_active: - font = item.font() - font.setItalic(True) - item.setFont(font) + if not is_active: + font = item.font() + font.setItalic(True) + item.setFont(font) root_item = self.invisibleRootItem() for project_name in tuple(self._items_by_name.keys()): @@ -1067,8 +1062,6 @@ class ProjectListWidget(QtWidgets.QWidget): self.project_model = project_model self.inactive_chk = inactive_chk - self.dbcon = None - def set_entity(self, entity): self._entity = entity @@ -1211,15 +1204,6 @@ class ProjectListWidget(QtWidgets.QWidget): selected_project = index.data(PROJECT_NAME_ROLE) break - if not self.dbcon: - try: - self.dbcon = AvalonMongoDB() - self.dbcon.install() - except Exception: - self.dbcon = None - self.current_project = None - - self.project_model.set_dbcon(self.dbcon) self.project_model.refresh() self.project_proxy.sort(0) diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 22778e4a5b..77a2f64dac 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -1,4 +1,18 @@ from Qt import QtWidgets, QtGui, QtCore + +from openpype import style + +from openpype.lib import is_admin_password_required +from openpype.lib.events import EventSystem +from openpype.widgets import PasswordDialog + +from openpype.settings.lib import ( + get_last_opened_info, + opened_settings_ui, + closed_settings_ui, +) + +from .dialogs import SettingsUIOpenedElsewhere from .categories import ( CategoryState, SystemWidget, @@ -10,10 +24,80 @@ from .widgets import ( SettingsTabWidget ) from .search_dialog import SearchEntitiesDialog -from openpype import style -from openpype.lib import is_admin_password_required -from openpype.widgets import PasswordDialog + +class SettingsController: + """Controller for settings tools. + + Added when tool was finished for checks of last opened in settings + categories and being able communicated with main widget logic. + """ + + def __init__(self, user_role): + self._user_role = user_role + self._event_system = EventSystem() + + self._opened_info = None + self._last_opened_info = None + self._edit_mode = None + + @property + def user_role(self): + return self._user_role + + @property + def event_system(self): + return self._event_system + + @property + def opened_info(self): + return self._opened_info + + @property + def last_opened_info(self): + return self._last_opened_info + + @property + def edit_mode(self): + return self._edit_mode + + def ui_closed(self): + if self._opened_info is not None: + closed_settings_ui(self._opened_info) + + self._opened_info = None + self._edit_mode = None + + def set_edit_mode(self, enabled): + if self._edit_mode is enabled: + return + + opened_info = None + if enabled: + opened_info = opened_settings_ui() + self._last_opened_info = opened_info + + self._opened_info = opened_info + self._edit_mode = enabled + + self.event_system.emit( + "edit.mode.changed", + {"edit_mode": enabled}, + "controller" + ) + + def update_last_opened_info(self): + last_opened_info = get_last_opened_info() + enabled = False + if ( + last_opened_info is None + or self._opened_info == last_opened_info + ): + enabled = True + + self._last_opened_info = last_opened_info + + self.set_edit_mode(enabled) class MainWidget(QtWidgets.QWidget): @@ -21,17 +105,25 @@ class MainWidget(QtWidgets.QWidget): widget_width = 1000 widget_height = 600 + window_title = "OpenPype Settings" def __init__(self, user_role, parent=None, reset_on_show=True): super(MainWidget, self).__init__(parent) + controller = SettingsController(user_role) + + # Object referencing to this machine and time when UI was opened + # - is used on close event + self._main_reset = False + self._controller = controller + self._user_passed = False self._reset_on_show = reset_on_show self._password_dialog = None self.setObjectName("SettingsMainWidget") - self.setWindowTitle("OpenPype Settings") + self.setWindowTitle(self.window_title) self.resize(self.widget_width, self.widget_height) @@ -41,8 +133,8 @@ class MainWidget(QtWidgets.QWidget): header_tab_widget = SettingsTabWidget(parent=self) - studio_widget = SystemWidget(user_role, header_tab_widget) - project_widget = ProjectWidget(user_role, header_tab_widget) + studio_widget = SystemWidget(controller, header_tab_widget) + project_widget = ProjectWidget(controller, header_tab_widget) tab_widgets = [ studio_widget, @@ -64,6 +156,11 @@ class MainWidget(QtWidgets.QWidget): self._shadow_widget = ShadowWidget("Working...", self) self._shadow_widget.setVisible(False) + controller.event_system.add_callback( + "edit.mode.changed", + self._edit_mode_changed + ) + header_tab_widget.currentChanged.connect(self._on_tab_changed) search_dialog.path_clicked.connect(self._on_search_path_clicked) @@ -74,7 +171,7 @@ class MainWidget(QtWidgets.QWidget): self._on_restart_required ) tab_widget.reset_started.connect(self._on_reset_started) - tab_widget.reset_started.connect(self._on_reset_finished) + tab_widget.reset_finished.connect(self._on_reset_finished) tab_widget.full_path_requested.connect(self._on_full_path_request) header_tab_widget.context_menu_requested.connect( @@ -131,11 +228,31 @@ class MainWidget(QtWidgets.QWidget): def showEvent(self, event): super(MainWidget, self).showEvent(event) + if self._reset_on_show: self._reset_on_show = False # Trigger reset with 100ms delay QtCore.QTimer.singleShot(100, self.reset) + def closeEvent(self, event): + self._controller.ui_closed() + + super(MainWidget, self).closeEvent(event) + + def _check_on_reset(self): + self._controller.update_last_opened_info() + if self._controller.edit_mode: + return + + # if self._edit_mode is False: + # return + + dialog = SettingsUIOpenedElsewhere( + self._controller.last_opened_info, self + ) + dialog.exec_() + self._controller.set_edit_mode(dialog.result() == 1) + def _show_password_dialog(self): if self._password_dialog: self._password_dialog.open() @@ -176,8 +293,11 @@ class MainWidget(QtWidgets.QWidget): if self._reset_on_show: self._reset_on_show = False + self._main_reset = True for tab_widget in self.tab_widgets: tab_widget.reset() + self._main_reset = False + self._check_on_reset() def _update_search_dialog(self, clear=False): if self._search_dialog.isVisible(): @@ -187,6 +307,12 @@ class MainWidget(QtWidgets.QWidget): entity = widget.entity self._search_dialog.set_root_entity(entity) + def _edit_mode_changed(self, event): + title = self.window_title + if not event["edit_mode"]: + title += " [View only]" + self.setWindowTitle(title) + def _on_tab_changed(self): self._update_search_dialog() @@ -221,6 +347,9 @@ class MainWidget(QtWidgets.QWidget): if current_widget is widget: self._update_search_dialog() + if not self._main_reset: + self._check_on_reset() + def keyPressEvent(self, event): if event.matches(QtGui.QKeySequence.Find): # todo: search in all widgets (or in active)? diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 3ceeb3ad48..081235c91c 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -236,7 +236,7 @@ def main(): signal.signal(signal.SIGTERM, signal_handler) modules_manager = ModulesManager() - module = modules_manager.modules_by_name["standalonepublish_tool"] + module = modules_manager.modules_by_name["standalonepublisher"] window = Window(module.publish_paths) window.show() diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index 73114f7960..77d756a606 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -3,6 +3,7 @@ from Qt import QtWidgets, QtCore import qtawesome from openpype.client import ( + get_projects, get_project, get_asset_by_id, ) @@ -291,9 +292,7 @@ class AssetWidget(QtWidgets.QWidget): def _set_projects(self): project_names = list() - for doc in self.dbcon.projects(projection={"name": 1}, - only_active=True): - + for doc in get_projects(fields=["name"]): project_name = doc.get("name") if project_name: project_names.append(project_name) @@ -320,8 +319,7 @@ class AssetWidget(QtWidgets.QWidget): def on_project_change(self): projects = list() - for project in self.dbcon.projects(projection={"name": 1}, - only_active=True): + for project in get_projects(fields=["name"]): projects.append(project['name']) project_name = self.combo_projects.currentText() if project_name in projects: diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index 1736be84ab..eab66d75b3 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -8,10 +8,12 @@ from openpype.client import ( get_subsets, get_last_version_by_subset_id, ) -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import LegacyCreator -from openpype.lib import TaskNotSetError -from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.create import ( + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, +) from . import HelpRole, FamilyRole, ExistsRole, PluginRole, PluginKeyRole from . import FamilyDescriptionWidget diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 4e5db06a92..85bc00ead6 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -10,19 +10,19 @@ from Qt import QtCore, QtGui, QtWidgets import openpype.version from openpype.api import ( - Logger, resources, get_system_settings ) -from openpype.lib import ( - get_openpype_execute_args, +from openpype.lib import get_openpype_execute_args, Logger +from openpype.lib.openpype_version import ( op_version_control_available, + get_expected_version, + get_installed_version, is_current_version_studio_latest, is_current_version_higher_than_expected, is_running_from_build, is_running_staging, - get_expected_version, - get_openpype_version + get_openpype_version, ) from openpype.modules import TrayModulesManager from openpype import style @@ -329,6 +329,25 @@ class TrayManager: self._version_dialog.close() return + installed_version = get_installed_version() + expected_version = get_expected_version() + + # Request new build if is needed + if ( + # Backwards compatibility + not hasattr(expected_version, "is_compatible") + or not expected_version.is_compatible(installed_version) + ): + if ( + self._version_dialog is not None + and self._version_dialog.isVisible() + ): + self._version_dialog.close() + + dialog = BuildVersionDialog() + dialog.exec_() + return + if self._version_dialog is None: self._version_dialog = VersionUpdateDialog() self._version_dialog.restart_requested.connect( @@ -338,7 +357,6 @@ class TrayManager: self._outdated_version_ignored ) - expected_version = get_expected_version() current_version = get_openpype_version() current_is_higher = is_current_version_higher_than_expected() diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index ea1362945f..97b680b77e 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -2,6 +2,7 @@ import os import sys import contextlib import collections +import traceback from Qt import QtWidgets, QtCore, QtGui import qtawesome @@ -37,6 +38,19 @@ def center_window(window): window.move(geo.topLeft()) +def html_escape(text): + """Basic escape of html syntax symbols in text.""" + + return ( + text + .replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) + + def set_style_property(widget, property_name, property_value): """Set widget's property that may affect style. @@ -430,10 +444,6 @@ class FamilyConfigCache: if profiles: # Make sure connection is installed # - accessing attribute which does not have auto-install - self.dbcon.install() - database = getattr(self.dbcon, "database", None) - if database is None: - database = self.dbcon._database asset_doc = get_asset_by_name( project_name, asset_name, fields=["data.tasks"] ) or {} @@ -634,7 +644,11 @@ class DynamicQThread(QtCore.QThread): def create_qthread(func, *args, **kwargs): class Thread(QtCore.QThread): def run(self): - func(*args, **kwargs) + try: + func(*args, **kwargs) + except BaseException: + traceback.print_exception(*sys.exc_info()) + raise return Thread() diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 8991614fe1..1faccef4dd 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -3,6 +3,7 @@ import logging import Qt from Qt import QtCore, QtGui +from openpype.client import get_projects from .constants import ( PROJECT_IS_ACTIVE_ROLE, PROJECT_NAME_ROLE, @@ -296,29 +297,29 @@ class ProjectModel(QtGui.QStandardItemModel): self._default_item = item project_names = set() - if self.dbcon is not None: - for project_doc in self.dbcon.projects( - projection={"name": 1, "data.active": 1}, - only_active=self._only_active - ): - project_name = project_doc["name"] - project_names.add(project_name) - if project_name in self._items_by_name: - item = self._items_by_name[project_name] - else: - item = QtGui.QStandardItem(project_name) + project_docs = get_projects( + inactive=not self._only_active, + fields=["name", "data.active"] + ) + for project_doc in project_docs: + project_name = project_doc["name"] + project_names.add(project_name) + if project_name in self._items_by_name: + item = self._items_by_name[project_name] + else: + item = QtGui.QStandardItem(project_name) - self._items_by_name[project_name] = item - new_items.append(item) + self._items_by_name[project_name] = item + new_items.append(item) - is_active = project_doc.get("data", {}).get("active", True) - item.setData(project_name, PROJECT_NAME_ROLE) - item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) + is_active = project_doc.get("data", {}).get("active", True) + item.setData(project_name, PROJECT_NAME_ROLE) + item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) - if not is_active: - font = item.font() - font.setItalic(True) - item.setFont(font) + if not is_active: + font = item.font() + font.setItalic(True) + item.setFont(font) root_item = self.invisibleRootItem() for project_name in tuple(self._items_by_name.keys()): diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 34692b7102..a5d5b14bb6 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -12,18 +12,19 @@ from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( emit_event, - get_workfile_template_key, create_workdir_extra_folders, ) -from openpype.lib.avalon_context import ( - update_current_task, - compute_session_changes -) from openpype.pipeline import ( registered_host, legacy_io, Anatomy, ) +from openpype.pipeline.context_tools import ( + compute_session_changes, + change_current_context +) +from openpype.pipeline.workfile import get_workfile_template_key + from .model import ( WorkAreaFilesModel, PublishFilesModel, @@ -407,8 +408,8 @@ class FilesWidget(QtWidgets.QWidget): ) changes = compute_session_changes( session, - asset=self._get_asset_doc(), - task=self._task_name, + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) session.update(changes) @@ -421,8 +422,8 @@ class FilesWidget(QtWidgets.QWidget): session = legacy_io.Session.copy() changes = compute_session_changes( session, - asset=self._get_asset_doc(), - task=self._task_name, + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) if not changes: @@ -430,9 +431,9 @@ class FilesWidget(QtWidgets.QWidget): # to avoid any unwanted Task Changed callbacks to be triggered. return - update_current_task( - asset=self._get_asset_doc(), - task=self._task_name, + change_current_context( + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index d5b7cef339..9a7fd659a9 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -299,7 +299,6 @@ class PublishFilesModel(QtGui.QStandardItemModel): self.project_name, asset_ids=[self._asset_id], fields=["_id", "name"] - ) subset_ids = [subset_doc["_id"] for subset_doc in subset_docs] @@ -329,7 +328,9 @@ class PublishFilesModel(QtGui.QStandardItemModel): # extension extensions = [ext.replace(".", "") for ext in self._file_extensions] repre_docs = get_representations( - self.project_name, version_ids, extensions + self.project_name, + version_ids=version_ids, + context_filters={"ext": extensions} ) # Filter queried representations by task name if task is set diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index b62fd2c889..cded4eb1a5 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,18 +5,12 @@ import logging from Qt import QtWidgets, QtCore -from openpype.client import ( - get_project, - get_asset_by_name, -) -from openpype.lib import ( - get_last_workfile_with_version, - get_workdir_data, -) from openpype.pipeline import ( registered_host, legacy_io, ) +from openpype.pipeline.workfile import get_last_workfile_with_version +from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import PlaceholderLineEdit log = logging.getLogger(__name__) @@ -30,16 +24,10 @@ def build_workfile_data(session): asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] host_name = session["AVALON_APP"] - project_doc = get_project( - project_name, fields=["name", "data.code", "config.tasks"] - ) - asset_doc = get_asset_by_name( - project_name, - asset_name, - fields=["name", "data.tasks", "data.parents"] - ) - data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data = get_template_data_with_names( + project_name, asset_name, task_name, host_name + ) data.update({ "version": 1, "comment": "", diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 0b0d67e589..de42b80d64 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -1,18 +1,20 @@ import os import datetime +import copy from Qt import QtCore, QtWidgets, QtGui from openpype.client import ( - get_asset_by_id, get_asset_by_name, get_workfile_info, ) +from openpype.client.operations import ( + OperationsSession, + new_workfile_info_doc, + prepare_workfile_info_update_data, +) from openpype import style from openpype import resources -from openpype.lib import ( - create_workfile_doc, - save_workfile_data_to_doc, -) +from openpype.pipeline import Anatomy from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -324,10 +326,23 @@ class Window(QtWidgets.QWidget): workfile_doc, data = self.side_panel.get_workfile_data() if not workfile_doc: filepath = self.files_widget._get_selected_filepath() - self._create_workfile_doc(filepath, force=True) - workfile_doc = self._get_current_workfile_doc() + workfile_doc = self._create_workfile_doc(filepath) - save_workfile_data_to_doc(workfile_doc, data, legacy_io) + new_workfile_doc = copy.deepcopy(workfile_doc) + new_workfile_doc["data"] = data + update_data = prepare_workfile_info_update_data( + workfile_doc, new_workfile_doc + ) + if not update_data: + return + + project_name = legacy_io.active_project() + + session = OperationsSession() + session.update_entity( + project_name, "workfile", workfile_doc["_id"], update_data + ) + session.commit() def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -343,20 +358,32 @@ class Window(QtWidgets.QWidget): project_name, asset_id, task_name, filename ) - def _create_workfile_doc(self, filepath, force=False): - workfile_doc = None - if not force: - workfile_doc = self._get_current_workfile_doc(filepath) + def _create_workfile_doc(self, filepath): + workfile_doc = self._get_current_workfile_doc(filepath) + if workfile_doc: + return workfile_doc - if not workfile_doc: - workdir, filename = os.path.split(filepath) - asset_id = self.assets_widget.get_selected_asset_id() - project_name = legacy_io.active_project() - asset_doc = get_asset_by_id(project_name, asset_id) - task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc( - asset_doc, task_name, filename, workdir, legacy_io - ) + workdir, filename = os.path.split(filepath) + + project_name = legacy_io.active_project() + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + + anatomy = Anatomy(project_name) + success, rootless_dir = anatomy.find_root_template_from_path(workdir) + filepath = "/".join([ + os.path.normpath(rootless_dir).replace("\\", "/"), + filename + ]) + + workfile_doc = new_workfile_info_doc( + filename, asset_id, task_name, [filepath] + ) + + session = OperationsSession() + session.create_entity(project_name, "workfile", workfile_doc) + session.commit() + return workfile_doc def refresh(self): # Refresh asset widget diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 71b86a5f1a..86c1c60e56 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -665,7 +665,10 @@ def _applied_camera_options(options, panel): _iteritems = getattr(options, "iteritems", options.items) for opt, value in _iteritems(): - _safe_setAttr(camera + "." + opt, value) + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) try: yield @@ -673,7 +676,11 @@ def _applied_camera_options(options, panel): if old_options: _iteritems = getattr(old_options, "iteritems", old_options.items) for opt, value in _iteritems(): - _safe_setAttr(camera + "." + opt, value) + # + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) @contextlib.contextmanager diff --git a/openpype/vendor/python/python_2/attr/__init__.py b/openpype/vendor/python/python_2/attr/__init__.py new file mode 100644 index 0000000000..f95c96dd57 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/__init__.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "21.4.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable # noqa: F401 + + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/openpype/vendor/python/python_2/attr/__init__.pyi b/openpype/vendor/python/python_2/attr/__init__.pyi new file mode 100644 index 0000000000..c0a2126503 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/__init__.pyi @@ -0,0 +1,484 @@ +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +_CompareWithType = Callable[[Any, Any], bool] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/openpype/vendor/python/python_2/attr/_cmp.py b/openpype/vendor/python/python_2/attr/_cmp.py new file mode 100644 index 0000000000..6cffa4dbab --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_cmp.py @@ -0,0 +1,154 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import functools + +from ._compat import new_class +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = "__%s__" % (name,) + method.__doc__ = "Return a %s b. Computed by attrs." % ( + _operation_names[name], + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/openpype/vendor/python/python_2/attr/_cmp.pyi b/openpype/vendor/python/python_2/attr/_cmp.pyi new file mode 100644 index 0000000000..e71aaff7a1 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Type + +from . import _CompareWithType + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/openpype/vendor/python/python_2/attr/_compat.py b/openpype/vendor/python/python_2/attr/_compat.py new file mode 100644 index 0000000000..dc0cb02b64 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_compat.py @@ -0,0 +1,261 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import platform +import sys +import threading +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from collections import Mapping, Sequence + + from UserDict import IterableUserDict + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + def new_class(name, bases, kwds, exec_body): + """ + A minimal stub of types.new_class that we need for make_class. + """ + ns = {} + exec_body(ns) + + return type(name, bases, ns) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: no cover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + new_class = types.new_class + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/openpype/vendor/python/python_2/attr/_config.py b/openpype/vendor/python/python_2/attr/_config.py new file mode 100644 index 0000000000..fc9be29d00 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_config.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/openpype/vendor/python/python_2/attr/_funcs.py b/openpype/vendor/python/python_2/attr/_funcs.py new file mode 100644 index 0000000000..4c90085a40 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_funcs.py @@ -0,0 +1,422 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/openpype/vendor/python/python_2/attr/_make.py b/openpype/vendor/python/python_2/attr/_make.py new file mode 100644 index 0000000000..d46f8a3e7a --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_make.py @@ -0,0 +1,3173 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy +import inspect +import linecache +import sys +import warnings + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + HAS_F_STRINGS, + PY2, + PY310, + PYPY, + isclass, + iteritems, + metadata_proxy, + new_class, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +if not PY2: + import typing + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + def __len__(self): + return 0 # __bool__ for Python 2 + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} + if globs is None: + globs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overriden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in iteritems(existing_slots) + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + an `attrs.exceptions.PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + `PEP 634 `_ (Structural + Pattern Matching). It is a tuple of all positional-only ``__init__`` + parameter names on Python 3.10 and later. Ignored on older Python + versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + """ + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + " %s(self.%s)," + % ( + cmp_name, + a.name, + ) + ) + others.append( + " %s(other.%s)," + % ( + cmp_name, + a.name, + ) + ) + else: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +if HAS_F_STRINGS: + + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: # pragma: no cover + # This case only happens on Python 3.5 and 3.6. We exclude + # it from coverage, because we don't want to slow down our + # test suite by running them under coverage too for this + # one line. + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attrs.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +if PY2: + + def _unpack_kw_only_py2(attr_name, default=None): + """ + Unpack *attr_name* from _kw_only dict. + """ + if default is not None: + arg_default = ", %s" % default + else: + arg_default = "" + return "%s = _kw_only.pop('%s'%s)" % ( + attr_name, + attr_name, + arg_default, + ) + + def _unpack_kw_only_lines_py2(kw_only_args): + """ + Unpack all *kw_only_args* from _kw_only dict and handle errors. + + Given a list of strings "{attr_name}" and "{attr_name}={default}" + generates list of lines of code that pop attrs from _kw_only dict and + raise TypeError similar to builtin if required attr is missing or + extra key is passed. + + >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) + try: + a = _kw_only.pop('a') + b = _kw_only.pop('b', 42) + except KeyError as _key_error: + raise TypeError( + ... + if _kw_only: + raise TypeError( + ... + """ + lines = ["try:"] + lines.extend( + " " + _unpack_kw_only_py2(*arg.split("=")) + for arg in kw_only_args + ) + lines += """\ +except KeyError as _key_error: + raise TypeError( + '__init__() missing required keyword-only argument: %s' % _key_error + ) +if _kw_only: + raise TypeError( + '__init__() got an unexpected keyword argument %r' + % next(iter(_kw_only)) + ) +""".split( + "\n" + ) + return lines + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None and not PY2: + # Try to get the type from the converter. + sig = None + try: + sig = inspect.signature(a.converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + sig_params = list(sig.parameters.values()) + if ( + sig_params + and sig_params[0].annotation + is not inspect.Parameter.empty + ): + annotations[arg_name] = sig_params[0].annotation + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + lines = _unpack_kw_only_lines_py2(kw_only_args) + lines + + args += "%s**_kw_only" % (", " if args else "",) # leading comma + else: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + return ( + """\ +def {init_name}(self, {args}): + {lines} +""".format( + init_name=("__attrs_init__" if attrs_init else "__init__"), + args=args, + lines="\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not PY2: + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type. + sig = None + try: + sig = inspect.signature(converters[0]) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if ( + params + and params[0].annotation is not inspect.Parameter.empty + ): + pipe_converter.__annotations__["val"] = params[ + 0 + ].annotation + # Get return type. + sig = None + try: + sig = inspect.signature(converters[-1]) + except (ValueError, TypeError): # inspect failed + pass + if sig and sig.return_annotation is not inspect.Signature().empty: + pipe_converter.__annotations__[ + "return" + ] = sig.return_annotation + + return pipe_converter diff --git a/openpype/vendor/python/python_2/attr/_next_gen.py b/openpype/vendor/python/python_2/attr/_next_gen.py new file mode 100644 index 0000000000..068253688c --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_next_gen.py @@ -0,0 +1,216 @@ +# SPDX-License-Identifier: MIT + +""" +These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +`attr.ib` with different default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* (see :term:`slotted classes` for potentially surprising + behaviors) + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - *match_args=True* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/openpype/vendor/python/python_2/attr/_version_info.py b/openpype/vendor/python/python_2/attr/_version_info.py new file mode 100644 index 0000000000..cdaeec37a1 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_version_info.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/openpype/vendor/python/python_2/attr/_version_info.pyi b/openpype/vendor/python/python_2/attr/_version_info.pyi new file mode 100644 index 0000000000..45ced08633 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/openpype/vendor/python/python_2/attr/converters.py b/openpype/vendor/python/python_2/attr/converters.py new file mode 100644 index 0000000000..1fb6c05d7b --- /dev/null +++ b/openpype/vendor/python/python_2/attr/converters.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import PY2 +from ._make import NOTHING, Factory, pipe + + +if not PY2: + import inspect + import typing + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + if not PY2: + sig = None + try: + sig = inspect.signature(converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + optional_converter.__annotations__["val"] = typing.Optional[ + params[0].annotation + ] + if sig.return_annotation is not inspect.Signature.empty: + optional_converter.__annotations__["return"] = typing.Optional[ + sig.return_annotation + ] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/openpype/vendor/python/python_2/attr/converters.pyi b/openpype/vendor/python/python_2/attr/converters.pyi new file mode 100644 index 0000000000..0f58088a37 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, Optional, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/openpype/vendor/python/python_2/attr/exceptions.py b/openpype/vendor/python/python_2/attr/exceptions.py new file mode 100644 index 0000000000..b2f1edc32a --- /dev/null +++ b/openpype/vendor/python/python_2/attr/exceptions.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/openpype/vendor/python/python_2/attr/exceptions.pyi b/openpype/vendor/python/python_2/attr/exceptions.pyi new file mode 100644 index 0000000000..f2680118b4 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/openpype/vendor/python/python_2/attr/filters.py b/openpype/vendor/python/python_2/attr/filters.py new file mode 100644 index 0000000000..a1978a8775 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/filters.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/openpype/vendor/python/python_2/attr/filters.pyi b/openpype/vendor/python/python_2/attr/filters.pyi new file mode 100644 index 0000000000..993866865e --- /dev/null +++ b/openpype/vendor/python/python_2/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/openpype/vendor/python/python_2/attr/py.typed b/openpype/vendor/python/python_2/attr/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/vendor/python/python_2/attr/setters.py b/openpype/vendor/python/python_2/attr/setters.py new file mode 100644 index 0000000000..b1cbb5d83e --- /dev/null +++ b/openpype/vendor/python/python_2/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/openpype/vendor/python/python_2/attr/setters.pyi b/openpype/vendor/python/python_2/attr/setters.pyi new file mode 100644 index 0000000000..3f5603c2b0 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar, cast + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/openpype/vendor/python/python_2/attr/validators.py b/openpype/vendor/python/python_2/attr/validators.py new file mode 100644 index 0000000000..0b0c8342f2 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/validators.py @@ -0,0 +1,561 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator(object): + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + elif fullmatch: + match_func = pattern.fullmatch + else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203) + pattern = re.compile( + r"(?:{})\Z".format(pattern.pattern), pattern.flags + ) + match_func = pattern.match + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator(object): + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator(object): + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) diff --git a/openpype/vendor/python/python_2/attr/validators.pyi b/openpype/vendor/python/python_2/attr/validators.pyi new file mode 100644 index 0000000000..5e00b85433 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/validators.pyi @@ -0,0 +1,78 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... diff --git a/openpype/vendor/python/python_2/attrs/__init__.py b/openpype/vendor/python/python_2/attrs/__init__.py new file mode 100644 index 0000000000..a704b8b56b --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/__init__.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/openpype/vendor/python/python_2/attrs/__init__.pyi b/openpype/vendor/python/python_2/attrs/__init__.pyi new file mode 100644 index 0000000000..7426fa5ddb --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/__init__.pyi @@ -0,0 +1,63 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import define as define +from attr import evolve as evolve +from attr import Factory as Factory +from attr import exceptions as exceptions +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/openpype/vendor/python/python_2/attrs/converters.py b/openpype/vendor/python/python_2/attrs/converters.py new file mode 100644 index 0000000000..edfa8d3c16 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/exceptions.py b/openpype/vendor/python/python_2/attrs/exceptions.py new file mode 100644 index 0000000000..bd9efed202 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/filters.py b/openpype/vendor/python/python_2/attrs/filters.py new file mode 100644 index 0000000000..52959005b0 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/py.typed b/openpype/vendor/python/python_2/attrs/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/vendor/python/python_2/attrs/setters.py b/openpype/vendor/python/python_2/attrs/setters.py new file mode 100644 index 0000000000..9b50770804 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/validators.py b/openpype/vendor/python/python_2/attrs/validators.py new file mode 100644 index 0000000000..ab2c9b3024 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/openpype/version.py b/openpype/version.py index dd5ad97449..7894bb8bf4 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.2-nightly.2" +__version__ = "3.14.1-nightly.3" diff --git a/poetry.lock b/poetry.lock index c4f061743e..c68ffdf1ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,7 +46,7 @@ aiohttp = ">=3,<4" [[package]] name = "aiohttp-middlewares" -version = "2.0.0" +version = "2.1.0" description = "Collection of useful middlewares for aiohttp applications." category = "main" optional = false @@ -90,7 +90,14 @@ version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/ActiveState/appdirs.git" +reference = "master" +resolved_reference = "193a2cbba58cce2542882fcedd0e49f6763672ed" [[package]] name = "arrow" @@ -113,6 +120,7 @@ python-versions = ">=3.6.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" +typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<2" @@ -134,17 +142,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "autopep8" @@ -285,7 +293,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.4.2" +version = "6.4.3" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -433,7 +441,7 @@ pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "frozenlist" -version = "1.3.0" +version = "1.3.1" description = "A list-like structure which implements collections.abc.MutableSequence" category = "main" optional = false @@ -441,19 +449,20 @@ python-versions = ">=3.7" [[package]] name = "ftrack-python-api" -version = "2.0.0" +version = "2.3.3" description = "Python API for ftrack." category = "main" optional = false -python-versions = ">=2.7.9, <4.0" +python-versions = ">=2.7.9, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, < 3.10" [package.dependencies] +appdirs = ">=1,<2" arrow = ">=0.4.4,<1" -clique = ">=1.2.0,<2" +clique = "1.6.1" future = ">=0.16.0,<1" pyparsing = ">=2.0,<3" requests = ">=2,<3" -six = ">=1,<2" +six = ">=1.13.0,<2" termcolor = ">=1.1.0,<2" websocket-client = ">=0.40.0,<1" @@ -539,7 +548,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "2.9.1" +version = "2.10.0" description = "Google Authentication Library" category = "main" optional = false @@ -672,8 +681,8 @@ optional = false python-versions = ">=3.7" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] -trio = ["trio", "async-generator"] +trio = ["async-generator", "trio"] +test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"] [[package]] name = "jinja2" @@ -900,7 +909,7 @@ python-versions = "*" [[package]] name = "protobuf" -version = "4.21.2" +version = "4.21.5" description = "" category = "main" optional = false @@ -1242,7 +1251,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2022.1" +version = "2022.2" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -1354,6 +1363,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "shotgun-api3" +version = "3.3.3" +description = "Shotgun Python API" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/shotgunsoftware/python-api.git" +reference = "v3.3.3" +resolved_reference = "b9f066c0edbea6e0733242e18f32f75489064840" + [[package]] name = "six" version = "1.16.0" @@ -1364,7 +1388,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.18.0" +version = "3.18.1" description = "The Slack API Platform SDK for Python" category = "main" optional = false @@ -1443,9 +1467,9 @@ docutils = "*" sphinx = "*" [package.extras] +test = ["pytest-cov", "pytest (>=3.0.0)"] +lint = ["pylint", "flake8", "black"] dev = ["pre-commit"] -lint = ["black", "flake8", "pylint"] -test = ["pytest (>=3.0.0)", "pytest-cov"] [[package]] name = "sphinx-rtd-theme" @@ -1596,7 +1620,7 @@ python-versions = ">=3.6,<4.0" name = "typing-extensions" version = "4.3.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1610,7 +1634,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.10" +version = "1.26.11" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1667,11 +1691,11 @@ ujson = ["ujson"] [[package]] name = "yarl" -version = "1.7.2" +version = "1.8.1" description = "Yet another URL library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] idna = ">=2.0" @@ -1774,10 +1798,7 @@ aiohttp-json-rpc = [ {file = "aiohttp-json-rpc-0.13.3.tar.gz", hash = "sha256:6237a104478c22c6ef96c7227a01d6832597b414e4b79a52d85593356a169e99"}, {file = "aiohttp_json_rpc-0.13.3-py3-none-any.whl", hash = "sha256:4fbd197aced61bd2df7ae3237ead7d3e08833c2ccf48b8581e1828c95ebee680"}, ] -aiohttp-middlewares = [ - {file = "aiohttp-middlewares-2.0.0.tar.gz", hash = "sha256:e08ba04dc0e8fe379aa5e9444a68485c275677ee1e18c55cbb855de0c3629502"}, - {file = "aiohttp_middlewares-2.0.0-py3-none-any.whl", hash = "sha256:29cf1513176b4013844711975ff520e26a8a5d8f9fefbbddb5e91224a86b043e"}, -] +aiohttp-middlewares = [] aiosignal = [ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, @@ -1790,10 +1811,7 @@ ansicon = [ {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, ] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] +appdirs = [] arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, @@ -2031,14 +2049,8 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] -deprecated = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, -] -dill = [ - {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, - {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, -] +deprecated = [] +dill = [] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -2047,11 +2059,7 @@ docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] -dropbox = [ - {file = "dropbox-11.33.0-py2-none-any.whl", hash = "sha256:3ee9024631b80f18938556d5e27cbdede26d6dc0b73aeaa90fc075ce96c950b1"}, - {file = "dropbox-11.33.0-py3-none-any.whl", hash = "sha256:1a0cbc22b0d1dae96e18b37e3520e5c289de7eb1303935db40e4dbfc9bb9e59b"}, - {file = "dropbox-11.33.0.tar.gz", hash = "sha256:7c638b521169a460de38b9eaeb204fe918874f72d6c3eed005d064b6f37da9c1"}, -] +dropbox = [] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, @@ -2063,77 +2071,12 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -frozenlist = [ - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, - {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, - {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, - {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, - {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, - {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, - {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, -] -ftrack-python-api = [ - {file = "ftrack-python-api-2.0.0.tar.gz", hash = "sha256:dd6f02c31daf5a10078196dc9eac4671e4297c762fbbf4df98de668ac12281d9"}, - {file = "ftrack_python_api-2.0.0-py2.py3-none-any.whl", hash = "sha256:d0df0f2df4b53947272f95e179ec98b477ee425bf4217b37bb59030ad989771e"}, -] +frozenlist = [] +ftrack-python-api = [] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -gazu = [ - {file = "gazu-0.8.30-py2.py3-none-any.whl", hash = "sha256:d692927a11314151bc33e7d67edee634053f70a3b09e4500dfc6626bfea18753"}, -] +gazu = [] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, @@ -2158,10 +2101,7 @@ google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, - {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, -] +googleapis-common-protos = [] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, @@ -2178,6 +2118,9 @@ importlib-metadata = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] +idna = [] +imagesize = [] +importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -2190,18 +2133,12 @@ jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] -jeepney = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] +jeepney = [] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] -jinxed = [ - {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, - {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, -] +jinxed = [] jsonschema = [ {file = "jsonschema-2.6.0-py2.py3-none-any.whl", hash = "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08"}, {file = "jsonschema-2.6.0.tar.gz", hash = "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"}, @@ -2253,46 +2190,40 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -2364,10 +2295,7 @@ packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -paramiko = [ - {file = "paramiko-2.11.0-py2.py3-none-any.whl", hash = "sha256:655f25dc8baf763277b933dfcea101d636581df8d6b9774d1fb653426b72c270"}, - {file = "paramiko-2.11.0.tar.gz", hash = "sha256:003e6bee7c034c21fbb051bf83dc0a9ee4106204dd3c53054c71452cc4ec3938"}, -] +paramiko = [] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, @@ -2376,70 +2304,8 @@ pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] -pillow = [ - {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, - {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, - {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, - {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, - {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, - {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, - {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, - {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, - {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, - {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, - {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, - {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, - {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, - {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, - {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, - {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, - {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, - {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +pillow = [] +platformdirs = [] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -2452,22 +2318,7 @@ prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] -protobuf = [ - {file = "protobuf-4.21.2-cp310-abi3-win32.whl", hash = "sha256:d622dc75e289e8b3031dd8b4e87df508f11a6b3d86a49fb50256af7ce030d35b"}, - {file = "protobuf-4.21.2-cp310-abi3-win_amd64.whl", hash = "sha256:4758b9c22ad0486639a68cea58d38571f233019a73212d78476ec648f68a49a3"}, - {file = "protobuf-4.21.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:e3d3df3292ab4bae85213b9ebef566b5aedb45f97425a92fac5b2e431d31e71c"}, - {file = "protobuf-4.21.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:29eaf8e9db33bc3bae14576ad61370aa2b64ea5d6e6cd705042692e5e0404b10"}, - {file = "protobuf-4.21.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ef0768a609a02b2b412fa0f59f1242f1597e9bb15188d043f3fde09115ca6c69"}, - {file = "protobuf-4.21.2-cp37-cp37m-win32.whl", hash = "sha256:5f8c7488e74024fa12b46aab4258f707d7d6e94c8d322d7c45cc13770f66ab59"}, - {file = "protobuf-4.21.2-cp37-cp37m-win_amd64.whl", hash = "sha256:57a593e40257ab4f164fe6e171651b1386c98f8ec5f5a8643642889c50d4f3c4"}, - {file = "protobuf-4.21.2-cp38-cp38-win32.whl", hash = "sha256:b82ac05b0651a4d2b9d56f5aeef3d711f5858eb4b71c13d77553739e5930a74a"}, - {file = "protobuf-4.21.2-cp38-cp38-win_amd64.whl", hash = "sha256:f2f43ae8dff452aee3026b59ea0a09245ab2529a55a0984992e76bcf848610e1"}, - {file = "protobuf-4.21.2-cp39-cp39-win32.whl", hash = "sha256:7b2dcca25d88ec77358eed3d031c8260b5bf3023fff03a31c9584591c5910833"}, - {file = "protobuf-4.21.2-cp39-cp39-win_amd64.whl", hash = "sha256:095fda15fe04a79c9f0edab09b424be46dd057b15986d235b84c8cea91659df7"}, - {file = "protobuf-4.21.2-py2.py3-none-any.whl", hash = "sha256:9b42afb67e19010cdda057e439574ccd944902ea14b0d52ba0bfba2aad50858d"}, - {file = "protobuf-4.21.2-py3-none-any.whl", hash = "sha256:853708afc3a7eed4df28a8d4bd4812f829f8d736c104dd8d584ccff27969e311"}, - {file = "protobuf-4.21.2.tar.gz", hash = "sha256:863f65e137d9de4a76cac39ae731a19bea1c30997f512ecf0dc9348112313401"}, -] +protobuf = [] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2526,14 +2377,8 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pylint = [ - {file = "pylint-2.14.5-py3-none-any.whl", hash = "sha256:fabe30000de7d07636d2e82c9a518ad5ad7908590fe135ace169b44839c15f90"}, - {file = "pylint-2.14.5.tar.gz", hash = "sha256:487ce2192eee48211269a0e976421f334cf94de1806ca9d0a99449adcdf0285e"}, -] +pygments = [] +pylint = [] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, @@ -2660,42 +2505,10 @@ pynput = [ {file = "pynput-1.7.6-py3.9.egg", hash = "sha256:264429fbe676e98e9050ad26a7017453bdd08768adb25cafb918347cf9f1eb4a"}, {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] -pyobjc-core = [ - {file = "pyobjc-core-8.5.tar.gz", hash = "sha256:704c275439856c0d1287469f0d589a7d808d48b754a93d9ce5415d4eaf06d576"}, - {file = "pyobjc_core-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0c234143b48334443f5adcf26e668945a6d47bc1fa6223e80918c6c735a029d9"}, - {file = "pyobjc_core-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1486ee533f0d76f666804ce89723ada4db56bfde55e56151ba512d3f849857f8"}, - {file = "pyobjc_core-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:412de06dfa728301c04b3e46fd7453320a8ae8b862e85236e547cd797a73b490"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b3e09cccb1be574a82cc9f929ae27fc4283eccc75496cb5d51534caa6bb83a3"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:eeafe21f879666ab7f57efcc6b007c9f5f8733d367b7e380c925203ed83f000d"}, - {file = "pyobjc_core-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0071686976d7ea8c14690950e504a13cb22b4ebb2bc7b5ec47c1c1c0f6eff41"}, -] -pyobjc-framework-applicationservices = [ - {file = "pyobjc-framework-ApplicationServices-8.5.tar.gz", hash = "sha256:fa3015ef8e3add90af3447d7fdcc7f8dd083cc2a1d58f99a569480a2df10d2b1"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:436b16ebe448a829a8312e10208eec81a2adcae1fff674dbcc3262e1bd76e0ca"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:408958d14aa7fcf46f2163754c211078bc63be1368934d86188202914dce077d"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d6cd4ce192859a22e208da4d7177a1c3ceb1ef2f64c339fd881102b1210cadd"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0251d092adb1d2d116fd9f147ceef0e53b158a46c21245131c40b9d7b786d0db"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:9742e69fe6d4545d0e02b0ad0a7a2432bc9944569ee07d6e90ffa5ef614df9f7"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16f5677c14ea903c6aaca1dd121521825c39e816cae696d6ae32c0b287252ab2"}, -] -pyobjc-framework-cocoa = [ - {file = "pyobjc-framework-Cocoa-8.5.tar.gz", hash = "sha256:569bd3a020f64b536fb2d1c085b37553e50558c9f907e08b73ffc16ae68e1861"}, - {file = "pyobjc_framework_Cocoa-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a7c160416696bf6035dfcdf0e603aaa52858d6afcddfcc5ab41733619ac2529"}, - {file = "pyobjc_framework_Cocoa-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ceba444282030be8596b812260e8d28b671254a51052ad778d32da6e17db847"}, - {file = "pyobjc_framework_Cocoa-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f46b2b161b8dd40c7b9e00bc69636c3e6480b2704a69aee22ee0154befbe163a"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b31d425aee8698cbf62b187338f5ca59427fa4dca2153a73866f7cb410713119"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:898359ac1f76eedec8aa156847682378a8950824421c40edb89391286e607dc4"}, - {file = "pyobjc_framework_Cocoa-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:baa2947f76b119a3360973d74d57d6dada87ac527bab9a88f31596af392f123c"}, -] -pyobjc-framework-quartz = [ - {file = "pyobjc-framework-Quartz-8.5.tar.gz", hash = "sha256:d2bc5467a792ddc04814f12a1e9c2fcaf699a1c3ad3d4264cfdce6b9c7b10624"}, - {file = "pyobjc_framework_Quartz-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9f0fb663f7872c9de94169031ac42b91ad01bd4cad49a9f1a0164be8f028426"}, - {file = "pyobjc_framework_Quartz-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:567eec91287cfe9a1b6433717192c585935de8f3daa28d82ce72fdd6c7ac00f6"}, - {file = "pyobjc_framework_Quartz-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f910ab41a712ffc7a8c3e3716a2d6f39ea4419004b26a2fd2d2f740ff5c262c"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29d07066781628278bf0e5278abcfc96ef6724c66c5629a0b4c214d319a82e55"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:72abcde1a3d72be11f2c881c9b9872044c8f2de86d2047b67fe771713638b107"}, - {file = "pyobjc_framework_Quartz-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8809b9a2df2f461697bdb45b6d1b5a4f881f88f09450e3990858e64e3e26c530"}, -] +pyobjc-core = [] +pyobjc-framework-applicationservices = [] +pyobjc-framework-cocoa = [] +pyobjc-framework-quartz = [] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2719,14 +2532,8 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-engineio = [ - {file = "python-engineio-3.14.2.tar.gz", hash = "sha256:eab4553f2804c1ce97054c8b22cf0d5a9ab23128075248b97e1a5b2f29553085"}, - {file = "python_engineio-3.14.2-py2.py3-none-any.whl", hash = "sha256:5a9e6086d192463b04a1428ff1f85b6ba631bbb19d453b144ffc04f530542b84"}, -] -python-socketio = [ - {file = "python-socketio-4.6.1.tar.gz", hash = "sha256:cd1f5aa492c1eb2be77838e837a495f117e17f686029ebc03d62c09e33f4fa10"}, - {file = "python_socketio-4.6.1-py2.py3-none-any.whl", hash = "sha256:5a21da53fdbdc6bb6c8071f40e13d100e0b279ad997681c2492478e06f370523"}, -] +python-engineio = [] +python-socketio = [] python-xlib = [ {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, @@ -2734,10 +2541,7 @@ python-xlib = [ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] -pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] +pytz = [] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, @@ -2754,10 +2558,7 @@ pywin32-ctypes = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] -"qt.py" = [ - {file = "Qt.py-1.3.7-py2.py3-none-any.whl", hash = "sha256:150099d1c6f64c9621a2c9d79d45102ec781c30ee30ee69fc082c6e9be7324fe"}, - {file = "Qt.py-1.3.7.tar.gz", hash = "sha256:803c7bdf4d6230f9a466be19d55934a173eabb61406d21cb91e80c2a3f773b1f"}, -] +"qt.py" = [] qtawesome = [ {file = "QtAwesome-0.7.3-py2.py3-none-any.whl", hash = "sha256:ddf4530b4af71cec13b24b88a4cdb56ec85b1e44c43c42d0698804c7137b09b0"}, {file = "QtAwesome-0.7.3.tar.gz", hash = "sha256:b98b9038d19190e83ab26d91c4d8fc3a36591ee2bc7f5016d4438b8240d097bd"}, @@ -2770,22 +2571,14 @@ recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -secretstorage = [ - {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, - {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, -] +requests = [] +rsa = [] +secretstorage = [] semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2886,72 +2679,7 @@ websocket-client = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] +wrapt = [] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, {file = "wsrpc_aiohttp-3.2.0-py3-none-any.whl", hash = "sha256:fa9b0bf5cb056898cb5c9f64cbc5eacb8a5dd18ab1b7f0cd4a2208b4a7fde282"}, diff --git a/pyproject.toml b/pyproject.toml index 186da55688..6ac5170e88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.2-nightly.2" # OpenPype +version = "3.14.1-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" @@ -33,13 +33,14 @@ aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } opentimelineio = { version = "0.14.0.dev1", source = "openpype" } -appdirs = "^1.4.3" +appdirs = { git = "https://github.com/ActiveState/appdirs.git", branch = "master" } blessed = "^1.17" # openpype terminal formatting coolname = "*" clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" -ftrack-python-api = "2.0.*" +ftrack-python-api = "^2.3.3" +shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" @@ -70,7 +71,6 @@ pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" - [tool.poetry.dev-dependencies] flake8 = "^3.7" autopep8 = "^1.4" @@ -79,13 +79,14 @@ cx_freeze = "~6.9" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" +markupsafe = "2.0.1" pycodestyle = "^2.5.0" pydocstyle = "^3.0.0" pylint = "^2.4.4" pytest = "^6.1" pytest-cov = "*" pytest-print = "*" -Sphinx = "*" +Sphinx = "5.0.1" sphinx-rtd-theme = "*" sphinxcontrib-websupport = "*" sphinx-qt-documentation = "*" @@ -141,6 +142,10 @@ hash = "3894dec7e4e521463891a869586850e8605f5fd604858b674c87323bf33e273d" url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" hash = "sha256:..." +[openpype.thirdparty.ocioconfig] +url = "https://distribute.openpype.io/thirdparty/OpenColorIO-Configs-1.0.2.zip" +hash = "4ac17c1f7de83465e6f51dd352d7117e07e765b66d00443257916c828e35b6ce" + [tool.pyright] include = [ "igniter", diff --git a/setup.py b/setup.py index 8b5a545c16..eab0187983 100644 --- a/setup.py +++ b/setup.py @@ -152,7 +152,7 @@ build_exe_options = dict( ) bdist_mac_options = dict( - bundle_name="OpenPype", + bundle_name=f"OpenPype {__version__}", iconfile=mac_icon_path ) diff --git a/start.py b/start.py index ace33ab92a..d1198a85e4 100644 --- a/start.py +++ b/start.py @@ -103,6 +103,9 @@ import site import distutils.spawn from pathlib import Path + +silent_mode = False + # OPENPYPE_ROOT is variable pointing to build (or code) directory # WARNING `OPENPYPE_ROOT` must be defined before igniter import # - igniter changes cwd which cause that filepath of this script won't lead @@ -138,40 +141,44 @@ if sys.__stdout__: term = blessed.Terminal() def _print(message: str): + if silent_mode: + return if message.startswith("!!! "): - print("{}{}".format(term.orangered2("!!! "), message[4:])) + print(f'{term.orangered2("!!! ")}{message[4:]}') return if message.startswith(">>> "): - print("{}{}".format(term.aquamarine3(">>> "), message[4:])) + print(f'{term.aquamarine3(">>> ")}{message[4:]}') return if message.startswith("--- "): - print("{}{}".format(term.darkolivegreen3("--- "), message[4:])) + print(f'{term.darkolivegreen3("--- ")}{message[4:]}') return if message.startswith("*** "): - print("{}{}".format(term.gold("*** "), message[4:])) + print(f'{term.gold("*** ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.wheat(" - "), message[4:])) + print(f'{term.wheat(" - ")}{message[4:]}') return if message.startswith(" . "): - print("{}{}".format(term.tan(" . "), message[4:])) + print(f'{term.tan(" . ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.seagreen3(" - "), message[7:])) + print(f'{term.seagreen3(" - ")}{message[7:]}') return if message.startswith(" ! "): - print("{}{}".format(term.goldenrod(" ! "), message[7:])) + print(f'{term.goldenrod(" ! ")}{message[7:]}') return if message.startswith(" * "): - print("{}{}".format(term.aquamarine1(" * "), message[7:])) + print(f'{term.aquamarine1(" * ")}{message[7:]}') return if message.startswith(" "): - print("{}{}".format(term.darkseagreen3(" "), message[4:])) + print(f'{term.darkseagreen3(" ")}{message[4:]}') return print(message) else: def _print(message: str): + if silent_mode: + return print(message) @@ -187,9 +194,8 @@ else: if "--headless" in sys.argv: os.environ["OPENPYPE_HEADLESS_MODE"] = "1" sys.argv.remove("--headless") -else: - if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": - os.environ.pop("OPENPYPE_HEADLESS_MODE", None) +elif os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) # Enabled logging debug mode when "--debug" is passed if "--verbose" in sys.argv: @@ -203,8 +209,8 @@ if "--verbose" in sys.argv: value = sys.argv.pop(idx) else: raise RuntimeError(( - "Expect value after \"--verbose\" argument. {}" - ).format(expected_values)) + f"Expect value after \"--verbose\" argument. {expected_values}" + )) log_level = None low_value = value.lower() @@ -225,8 +231,9 @@ if "--verbose" in sys.argv: if log_level is None: raise RuntimeError(( - "Unexpected value after \"--verbose\" argument \"{}\". {}" - ).format(value, expected_values)) + "Unexpected value after \"--verbose\" " + f"argument \"{value}\". {expected_values}" + )) os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) @@ -242,13 +249,14 @@ from igniter.tools import ( get_openpype_global_settings, get_openpype_path_from_settings, validate_mongo_connection, - OpenPypeVersionNotFound + OpenPypeVersionNotFound, + OpenPypeVersionIncompatible ) # noqa from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402 bootstrap = BootstrapRepos() silent_commands = {"run", "igniter", "standalonepublisher", - "extractenvironments"} + "extractenvironments", "version"} def list_versions(openpype_versions: list, local_version=None) -> None: @@ -270,8 +278,11 @@ def set_openpype_global_environments() -> None: general_env = get_general_environments() + # first resolve general environment because merge doesn't expect + # values to be list. + # TODO: switch to OpenPype environment functions merged_env = acre.merge( - acre.parse(general_env), + acre.compute(acre.parse(general_env), cleanup=False), dict(os.environ) ) env = acre.compute( @@ -333,34 +344,33 @@ def run_disk_mapping_commands(settings): destination = destination.rstrip('/') source = source.rstrip('/') - if low_platform == "windows": - args = ["subst", destination, source] - elif low_platform == "darwin": - scr = "do shell script \"ln -s {} {}\" with administrator privileges".format(source, destination) # noqa: E501 + if low_platform == "darwin": + scr = f'do shell script "ln -s {source} {destination}" with administrator privileges' # noqa + args = ["osascript", "-e", scr] + elif low_platform == "windows": + args = ["subst", destination, source] else: args = ["sudo", "ln", "-s", source, destination] - _print("disk mapping args:: {}".format(args)) + _print(f"*** disk mapping arguments: {args}") try: if not os.path.exists(destination): output = subprocess.Popen(args) if output.returncode and output.returncode != 0: - exc_msg = "Executing was not successful: \"{}\"".format( - args) + exc_msg = f'Executing was not successful: "{args}"' raise RuntimeError(exc_msg) except TypeError as exc: - _print("Error {} in mapping drive {}, {}".format(str(exc), - source, - destination)) + _print( + f"Error {str(exc)} in mapping drive {source}, {destination}") raise def set_avalon_environments(): """Set avalon specific environments. - These are non modifiable environments for avalon workflow that must be set + These are non-modifiable environments for avalon workflow that must be set before avalon module is imported because avalon works with globals set with environment variables. """ @@ -505,7 +515,7 @@ def _process_arguments() -> tuple: ) if m and m.group('version'): use_version = m.group('version') - _print(">>> Requested version [ {} ]".format(use_version)) + _print(f">>> Requested version [ {use_version} ]") if "+staging" in use_version: use_staging = True break @@ -611,14 +621,17 @@ def _determine_mongodb() -> str: try: openpype_mongo = bootstrap.secure_registry.get_item( "openPypeMongo") - except ValueError: - raise RuntimeError("Missing MongoDB url") + except ValueError as e: + raise RuntimeError("Missing MongoDB url") from e return openpype_mongo def _initialize_environment(openpype_version: OpenPypeVersion) -> None: version_path = openpype_version.path + if not version_path: + _print(f"!!! Version {openpype_version} doesn't have path set.") + raise ValueError("No path set in specified OpenPype version.") os.environ["OPENPYPE_VERSION"] = str(openpype_version) # set OPENPYPE_REPOS_ROOT to point to currently used OpenPype version. os.environ["OPENPYPE_REPOS_ROOT"] = os.path.normpath( @@ -676,7 +689,7 @@ def _find_frozen_openpype(use_version: str = None, # Collect OpenPype versions installed_version = OpenPypeVersion.get_installed_version() # Expected version that should be used by studio settings - # - this option is used only if version is not explictly set and if + # - this option is used only if version is not explicitly set and if # studio has set explicit version in settings studio_version = OpenPypeVersion.get_expected_studio_version(use_staging) @@ -684,41 +697,40 @@ def _find_frozen_openpype(use_version: str = None, # Specific version is defined if use_version.lower() == "latest": # Version says to use latest version - _print("Finding latest version defined by use version") + _print(">>> Finding latest version defined by use version") openpype_version = bootstrap.find_latest_openpype_version( - use_staging - ) + use_staging) else: - _print("Finding specified version \"{}\"".format(use_version)) + _print(f">>> Finding specified version \"{use_version}\"") openpype_version = bootstrap.find_openpype_version( use_version, use_staging ) if openpype_version is None: raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) + f"Requested version \"{use_version}\" was not found." ) elif studio_version is not None: # Studio has defined a version to use - _print("Finding studio version \"{}\"".format(studio_version)) + _print(f">>> Finding studio version \"{studio_version}\"") openpype_version = bootstrap.find_openpype_version( - studio_version, use_staging - ) + studio_version, use_staging) if openpype_version is None: raise OpenPypeVersionNotFound(( - "Requested OpenPype version \"{}\" defined by settings" + "Requested OpenPype version " + f"\"{studio_version}\" defined by settings" " was not found." - ).format(studio_version)) + )) else: # Default behavior to use latest version - _print("Finding latest version") + _print(( + ">>> Finding latest version " + f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( - use_staging - ) + use_staging) + if openpype_version is None: if use_staging: reason = "Didn't find any staging versions." @@ -736,6 +748,23 @@ def _find_frozen_openpype(use_version: str = None, _initialize_environment(openpype_version) return version_path + in_headless_mode = os.getenv("OPENPYPE_HEADLESS_MODE") == "1" + if not installed_version.is_compatible(openpype_version): + message = "Version {} is not compatible with installed version {}." + # Show UI to user + if not in_headless_mode: + igniter.show_message_dialog( + "Incompatible OpenPype installation", + message.format( + "{}".format(openpype_version), + "{}".format(installed_version) + ) + ) + # Raise incompatible error + raise OpenPypeVersionIncompatible( + message.format(openpype_version, installed_version) + ) + # test if latest detected is installed (in user data dir) is_inside = False try: @@ -748,7 +777,7 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": + if in_headless_mode: version_path = bootstrap.install_version( openpype_version, force=True ) @@ -798,7 +827,7 @@ def _bootstrap_from_code(use_version, use_staging): if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(_openpype_root)) - switch_str = f" - will switch to {use_version}" if use_version else "" + switch_str = f" - will switch to {use_version}" if use_version and use_version != local_version else "" # noqa _print(f" - booting version: {local_version}{switch_str}") assert local_version else: @@ -813,11 +842,8 @@ def _bootstrap_from_code(use_version, use_staging): use_version, use_staging ) if version_to_use is None: - raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) - ) + raise OpenPypeVersionIncompatible( + f"Requested version \"{use_version}\" was not found.") else: # Staging version should be used version_to_use = bootstrap.find_latest_openpype_version( @@ -903,7 +929,7 @@ def _boot_validate_versions(use_version, local_version): use_version, openpype_versions ) valid, message = bootstrap.validate_openpype_version(version_path) - _print("{}{}".format(">>> " if valid else "!!! ", message)) + _print(f'{">>> " if valid else "!!! "}{message}') def _boot_print_versions(use_staging, local_version, openpype_root): @@ -914,13 +940,29 @@ def _boot_print_versions(use_staging, local_version, openpype_root): _print("--- This will list only staging versions detected.") _print(" To see other version, omit --use-staging argument.") - openpype_versions = bootstrap.find_openpype(include_zips=True, - staging=use_staging) if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(openpype_root)) else: local_version = OpenPypeVersion.get_installed_version_str() + compatible_with = OpenPypeVersion(version=local_version) + if "--all" in sys.argv: + compatible_with = None + _print("--- Showing all version (even those not compatible).") + else: + _print(("--- Showing only compatible versions " + f"with [ {compatible_with.major}.{compatible_with.minor} ]")) + + openpype_versions = bootstrap.find_openpype( + include_zips=True, + staging=use_staging, + ) + openpype_versions = [ + version for version in openpype_versions + if version.is_compatible( + OpenPypeVersion.get_installed_version()) + ] + list_versions(openpype_versions, local_version) @@ -937,6 +979,9 @@ def _boot_handle_missing_version(local_version, use_staging, message): def boot(): """Bootstrap OpenPype.""" + global silent_mode + if any(arg in silent_commands for arg in sys.argv): + silent_mode = True # ------------------------------------------------------------------------ # Set environment to OpenPype root path @@ -1040,7 +1085,7 @@ def boot(): if not result[0]: _print(f"!!! Invalid version: {result[1]}") sys.exit(1) - _print(f"--- version is valid") + _print("--- version is valid") else: try: version_path = _bootstrap_from_code(use_version, use_staging) @@ -1113,8 +1158,12 @@ def boot(): def get_info(use_staging=None) -> list: """Print additional information to console.""" - from openpype.lib.mongo import get_default_components - from openpype.lib.log import PypeLogger + from openpype.client.mongo import get_default_components + try: + from openpype.lib.log import Logger + except ImportError: + # Backwards compatibility for 'PypeLogger' + from openpype.lib.log import PypeLogger as Logger components = get_default_components() @@ -1141,14 +1190,14 @@ def get_info(use_staging=None) -> list: os.environ.get("MUSTER_REST_URL"))) # Reinitialize - PypeLogger.initialize() + Logger.initialize() mongo_components = get_default_components() if mongo_components["host"]: inf.append(("Logging to MongoDB", mongo_components["host"])) inf.append((" - port", mongo_components["port"] or "")) - inf.append((" - database", PypeLogger.log_database_name)) - inf.append((" - collection", PypeLogger.log_collection_name)) + inf.append((" - database", Logger.log_database_name)) + inf.append((" - collection", Logger.log_collection_name)) inf.append((" - user", mongo_components["username"] or "")) if mongo_components["auth_db"]: inf.append((" - auth source", mongo_components["auth_db"])) @@ -1157,8 +1206,7 @@ def get_info(use_staging=None) -> list: formatted = [] for info in inf: padding = (maximum - len(info[0])) + 1 - formatted.append( - "... {}:{}[ {} ]".format(info[0], " " * padding, info[1])) + formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]') return formatted diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py new file mode 100644 index 0000000000..c882e0f9b2 --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -0,0 +1,64 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AfterEffectsTestClass): + """Basic test case for publishing in AfterEffects + + Should publish 5 frames + """ + PERSIST = True + + TEST_FILES = [ + ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", + "test_aftereffects_publish_multiframe.zip", + "") + ] + + APP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackgroundcopy")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="reviewTesttask")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTestTaskDefault", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index f991f02227..64676f62f4 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -12,8 +12,6 @@ import platform from tests.lib.db_handler import DBHandler from tests.lib.file_handler import RemoteFileHandler -from openpype.lib.remote_publish import find_variant_key - class BaseTest: """Empty base test class""" @@ -210,7 +208,10 @@ class PublishTest(ModuleUnitTest): application_manager = ApplicationManager() if not app_variant: - app_variant = find_variant_key(application_manager, self.APP) + variant = ( + application_manager.find_latest_available_variant_for_group( + self.APP)) + app_variant = variant.name yield "{}/{}".format(self.APP, app_variant) @@ -314,30 +315,22 @@ class PublishTest(ModuleUnitTest): Compares only presence, not size nor content! """ - published_dir_base = download_test_data - published_dir = os.path.join(output_folder_url, - self.PROJECT, - self.ASSET, - self.TASK, - "**") - expected_dir_base = os.path.join(published_dir_base, + published_dir_base = output_folder_url + expected_dir_base = os.path.join(download_test_data, "expected") - expected_dir = os.path.join(expected_dir_base, - self.PROJECT, - self.ASSET, - self.TASK, - "**") - print("Comparing published:'{}' : expected:'{}'".format(published_dir, - expected_dir)) - published = set(f.replace(published_dir_base, '') for f in - glob.glob(published_dir, recursive=True) if - f != published_dir_base and os.path.exists(f)) - expected = set(f.replace(expected_dir_base, '') for f in - glob.glob(expected_dir, recursive=True) if - f != expected_dir_base and os.path.exists(f)) - not_matched = expected.difference(published) - assert not not_matched, "Missing {} files".format(not_matched) + print("Comparing published:'{}' : expected:'{}'".format( + published_dir_base, expected_dir_base)) + published = set(f.replace(published_dir_base, '') for f in + glob.glob(published_dir_base + "\\**", recursive=True) + if f != published_dir_base and os.path.exists(f)) + expected = set(f.replace(expected_dir_base, '') for f in + glob.glob(expected_dir_base + "\\**", recursive=True) + if f != expected_dir_base and os.path.exists(f)) + + not_matched = expected.symmetric_difference(published) + assert not not_matched, "Missing {} files".format( + "\n".join(sorted(not_matched))) class HostFixtures(PublishTest): @@ -350,4 +343,4 @@ class HostFixtures(PublishTest): @pytest.fixture(scope="module") def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" - raise NotImplementedError \ No newline at end of file + raise NotImplementedError diff --git a/tools/build.sh b/tools/build.sh index b5aa22dfc0..4f0e4bdcf0 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -193,15 +193,15 @@ if [ "$disable_submodule_update" == 1 ]; then if [[ "$OSTYPE" == "darwin"* ]]; then # fix code signing issue - codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python" + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" if command -v create-dmg > /dev/null 2>&1; then create-dmg \ - --volname "OpenPype Installer" \ + --volname "OpenPype $openpype_version Installer" \ --window-pos 200 120 \ --window-size 600 300 \ --app-drop-link 100 50 \ - "$openpype_root/build/OpenPype-Installer.dmg" \ - "$openpype_root/build/OpenPype.app" + "$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \ + "$openpype_root/build/OpenPype $openpype_version.app" else echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." fi diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index d3566dd289..d186ead881 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -29,6 +29,7 @@ import shutil import blessed import enlighten import time +import re term = blessed.Terminal() @@ -52,7 +53,7 @@ def _print(msg: str, type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") def count_folders(path: Path) -> int: @@ -95,16 +96,22 @@ assert site_pkg, "No venv site-packages are found." _print(f"Working with: {site_pkg}", 2) openpype_root = Path(os.path.dirname(__file__)).parent +version = {} +with open(openpype_root / "openpype" / "version.py") as fp: + exec(fp.read(), version) + +version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) +openpype_version = version_match[1] # create full path if platform.system().lower() == "darwin": build_dir = openpype_root.joinpath( "build", - "OpenPype.app", + f"OpenPype {openpype_version}.app", "Contents", "MacOS") else: - build_subdir = "exe.{}-{}".format(get_platform(), sys.version[0:3]) + build_subdir = f"exe.{get_platform()}-{sys.version[:3]}" build_dir = openpype_root / "build" / build_subdir _print(f"Using build at {build_dir}", 2) diff --git a/tools/create_zip.py b/tools/create_zip.py index 2fc351469a..6392428f58 100644 --- a/tools/create_zip.py +++ b/tools/create_zip.py @@ -61,7 +61,7 @@ def _print(msg: str, message_type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") if __name__ == "__main__": diff --git a/tools/fetch_thirdparty_libs.py b/tools/fetch_thirdparty_libs.py index b616beab27..421cc32dbd 100644 --- a/tools/fetch_thirdparty_libs.py +++ b/tools/fetch_thirdparty_libs.py @@ -109,13 +109,20 @@ except AttributeError: for k, v in thirdparty.items(): _print(f"processing {k}") - destination_path = openpype_root / "vendor" / "bin" / k / platform_name - url = v.get(platform_name).get("url") + destination_path = openpype_root / "vendor" / "bin" / k + if not v.get(platform_name): _print(("missing definition for current " - f"platform [ {platform_name} ]"), 1) - sys.exit(1) + f"platform [ {platform_name} ]"), 2) + _print("trying to get universal url for all platforms") + url = v.get("url") + if not url: + _print("cannot get url", 1) + sys.exit(1) + else: + url = v.get(platform_name).get("url") + destination_path = destination_path / platform_name parsed_url = urlparse(url) @@ -147,7 +154,13 @@ for k, v in thirdparty.items(): # get file with checksum _print("Calculating sha256 ...", 2) calc_checksum = sha256_sum(temp_file) - if v.get(platform_name).get("hash") != calc_checksum: + + if v.get(platform_name): + item_hash = v.get(platform_name).get("hash") + else: + item_hash = v.get("hash") + + if item_hash != calc_checksum: _print("Downloaded files checksum invalid.") sys.exit(1) diff --git a/website/docs/admin_hosts_maya.md b/website/docs/admin_hosts_maya.md index 93bf32798f..0e77f29fc2 100644 --- a/website/docs/admin_hosts_maya.md +++ b/website/docs/admin_hosts_maya.md @@ -120,3 +120,54 @@ raw json. You can configure path mapping using Maya `dirmap` command. This will add bi-directional mapping between list of paths specified in **Settings**. You can find it in **Settings -> Project Settings -> Maya -> Maya Directory Mapping** ![Dirmap settings](assets/maya-admin_dirmap_settings.png) + +## Templated Build Workfile + +Building a workfile using a template designed by users. Helping to assert homogeneous subsets hierarchy and imports. Template stored as file easy to define, change and customize for production needs. + + **1. Make a template** + +Make your template. Add families and everything needed for your tasks. Here is an example template for the modeling task using a placeholder to import a gauge. + +![maya outliner](assets/maya-workfile-outliner.png) + +If needed, you can add placeholders when the template needs to load some assets. **OpenPype > Template Builder > Create Placeholder** + +![create placeholder](assets/maya-create_placeholder.png) + +- **Configure placeholders** + +Fill in the necessary fields (the optional fields are regex filters) + +![new place holder](assets/maya-placeholder_new.png) + + + - Builder type: Wether the the placeholder should load current asset representations or linked assets representations + + - Representation: Representation that will be loaded (ex: ma, abc, png, etc...) + + - Family: Family of the representation to load (main, look, image, etc ...) + + - Loader: Placeholder loader name that will be used to load corresponding representations + + - Order: Priority for current placeholder loader (priority is lowest first, highet last) + +- **Save your template** + + + **2. Configure Template** + +- **Go to Studio settings > Project > Your DCC > Templated Build Settings** +- Add a profile for your task and enter path to your template + +![setting build template](assets/settings/template_build_workfile.png) + +**3. Build your workfile** + +- Open maya + +- Build your workfile + +![maya build template](assets/maya-build_workfile_from_template.png) + + diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 53fc12410f..85f661d51e 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -40,7 +40,6 @@ For more information [see here](admin_use.md#run-openpype). | module | Run command line arguments for modules. | | | repack-version | Tool to re-create version zip. | [📑](#repack-version-arguments) | | tray | Launch OpenPype Tray. | [📑](#tray-arguments) -| eventserver | This should be ideally used by system service (such as systemd or upstart on linux and window service). | [📑](#eventserver-arguments) | | launch | Launch application in Pype environment. | [📑](#launch-arguments) | | publish | Pype takes JSON from provided path and use it to publish data in it. | [📑](#publish-arguments) | | extractenvironments | Extract environment variables for entered context to a json file. | [📑](#extractenvironments-arguments) | @@ -48,7 +47,6 @@ For more information [see here](admin_use.md#run-openpype). | interactive | Start python like interactive console session. | | | projectmanager | Launch Project Manager UI | [📑](#projectmanager-arguments) | | settings | Open Settings UI | [📑](#settings-arguments) | -| standalonepublisher | Open Standalone Publisher UI | [📑](#standalonepublisher-arguments) | --- ### `tray` arguments {#tray-arguments} @@ -57,25 +55,7 @@ For more information [see here](admin_use.md#run-openpype). openpype_console tray ``` --- -### `launch` arguments {#eventserver-arguments} -You have to set either proper environment variables to provide URL and credentials or use -option to specify them. -| Argument | Description | -| --- | --- | -| `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) | -| `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) | -| `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) | -| `--legacy` | run event server without mongo storing | -| `--clockify-api-key` | Clockify API key (can be set with `CLOCKIFY_API_KEY`) | -| `--clockify-workspace` | Clockify workspace (can be set with `CLOCKIFY_WORKSPACE`) | - -To run ftrack event server: -```shell -openpype_console eventserver --ftrack-url= --ftrack-user= --ftrack-api-key= -``` - ---- ### `launch` arguments {#launch-arguments} | Argument | Description | @@ -159,12 +139,6 @@ openpypeconsole settings ``` --- -### `standalonepublisher` arguments {#standalonepublisher-arguments} -`standalonepublisher` has no command-line arguments. -```shell -openpype_console standalonepublisher -``` - ### `repack-version` arguments {#repack-version-arguments} Takes path to unzipped and possibly modified OpenPype version. Files will be zipped, checksums recalculated and version will be determined by folder name diff --git a/website/docs/admin_releases.md b/website/docs/admin_releases.md new file mode 100644 index 0000000000..bba5a22110 --- /dev/null +++ b/website/docs/admin_releases.md @@ -0,0 +1,9 @@ +--- +id: admin_releases +title: Releases +sidebar_label: Releases +--- + +Information about releases can be found on GitHub [Releases page](https://github.com/pypeclub/OpenPype/releases). + +You can find features and bugfixes in the codebase or full changelog for advanced users. diff --git a/website/docs/artist_concepts.md b/website/docs/artist_concepts.md index 9005cffe87..7582540811 100644 --- a/website/docs/artist_concepts.md +++ b/website/docs/artist_concepts.md @@ -10,6 +10,8 @@ sidebar_label: Key Concepts In our pipeline all the main entities the project is made from are internally considered *'Assets'*. Episode, sequence, shot, character, prop, etc. All of these behave identically in the pipeline. Asset names need to be absolutely unique within the project because they are their key identifier. +OpenPype has a limitation regarding duplicated names. Name of assets must be unique across whole project. + ### Subset Usually, an asset needs to be created in multiple *'flavours'*. A character might have multiple different looks, model needs to be published in different resolutions, a standard animation rig might not be usable in a crowd system and so on. 'Subsets' are here to accommodate all this variety that might be needed within a single asset. A model might have subset: *'main'*, *'proxy'*, *'sculpt'*, while data of *'look'* family could have subsets *'main'*, *'dirty'*, *'damaged'*. Subsets have some recommendations for their names, but ultimately it's up to the artist to use them for separation of publishes when needed. @@ -24,6 +26,11 @@ A numbered iteration of a given subset. Each version contains at least one [repr Each published variant can come out of the software in multiple representations. All of them hold exactly the same data, but in different formats. A model, for example, might be saved as `.OBJ`, Alembic, Maya geometry or as all of them, to be ready for pickup in any other applications supporting these formats. + +#### Naming convention + +At this moment names of assets, tasks, subsets or representations can contain only letters, numbers and underscore. + ### Family Each published [subset][3b89d8e0] can have exactly one family assigned to it. Family determines the type of data that the subset holds. Family doesn't dictate the file type, but can enforce certain technical specifications. For example OpenPype default configuration expects `model` family to only contain geometry without any shaders or joints when it is published. diff --git a/website/docs/artist_hosts_unreal.md b/website/docs/artist_hosts_unreal.md index 1ff09893e3..45a0c8bb6f 100644 --- a/website/docs/artist_hosts_unreal.md +++ b/website/docs/artist_hosts_unreal.md @@ -8,6 +8,20 @@ sidebar_label: Unreal OpenPype supports Unreal in similar ways as in other DCCs Yet there are few specific you need to be aware of. +### Creating the Unreal project + +Selecting a task and opening it with Unreal will generate the Unreal project, if it hasn't been created before. +By default, OpenPype includes the plugin that will be built together with the project. + +Alternatively, the Environment variable `"OPENPYPE_UNREAL_PLUGIN"` can be set to the path of a compiled version of the plugin. +The version of the compiled plugin must match the version of Unreal with which the project is being created. + +:::note +Unreal version 5.0 onwards requires the following Environment variable: + +`"UE_PYTHONPATH": "{PYTHONPATH}"` +::: + ### Project naming Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are @@ -15,9 +29,9 @@ invalid. If OpenPype detects such name it automatically prepends letter **P** to ## OpenPype global tools -OpenPype global tools can be found in *Window* main menu: +OpenPype global tools can be found in Unreal's toolbar and in the *Tools* main menu: -![Unreal OpenPype Menu](assets/unreal-avalon_tools.jpg) +![Unreal OpenPype Menu](assets/unreal_openpype_tools.png) - [Create](artist_tools.md#creator) - [Load](artist_tools.md#loader) @@ -31,10 +45,118 @@ OpenPype global tools can be found in *Window* main menu: To import Static Mesh model, just choose **OpenPype → Load ...** and select your mesh. Static meshes are transferred as FBX files as specified in [Unreal Engine 4 Static Mesh Pipeline](https://docs.unrealengine.com/en-US/Engine/Content/Importing/FBX/StaticMeshes/index.html). This action will create new folder with subset name (`unrealStaticMeshMain_CON` for example) and put all data into it. Inside, you can find: -![Unreal Container Content](assets/unreal-container.jpg) +![Unreal Container Content](assets/unreal_container.jpg) -In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **unrealStaticMeshCube** is the geometry itself, **unrealStaticMeshCube_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. +In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **antennaA_modelMain** is the geometry itself, **modelMain_v002_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. ### Publishing -Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...** \ No newline at end of file +Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...** + +## Layout + +There are two different layout options in Unreal, depending on the type of project you are working on. +One only imports the layout, and saves it in a level. +The other uses [Master Sequences](https://docs.unrealengine.com/4.27/en-US/AnimatingObjects/Sequencer/Overview/TracksShot/) to track the whole level sequence hierarchy. +You can choose in the Project Settings if you want to generate the level sequences. + +![Unreal OP Settings Level Sequence](assets/unreal_setting_level_sequence.png) + +### Loading + +To load a layout, click on the OpenPype icon in Unreal’s main taskbar, and select **Load**. + +![Unreal OP Tools Load](assets/unreal_openpype_tools_load.png) + +Select the task on the left, then right click on the layout asset and select **Load Layout**. + +![Unreal Layout Load](assets/unreal_load_layout.png) + +If you need to load multiple layouts, you can select more than one task on the left, and you can load them together. + +![Unreal Layout Load Batch](assets/unreal_load_layout_batch.png) + +### Navigating the project + +The layout will be imported in the directory `/Content/OpenPype`. The layout will be split into two subfolders: +- *Assets*, which will contain all the rigs and models contained in the layout; +- *Asset name* (in the following example, *episode 2*), a folder named as the **asset** of the current **task**. + +![Unreal Layout Loading Result](assets/unreal_layout_loading_result.png) + +If you chose to generate the level sequences, in the second folder you will find the master level for the task (usually an episode), the level sequence and the folders for all the scenes in the episodes. +Otherwise you will find the level generated for the loaded layout. + +#### Layout without level sequences + +In the layout folder, you will find the level with the imported layout and an object of *AssetContainer* type. The latter is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. + +![Unreal Layout Loading No Sequence](assets/unreal_layout_loading_no_sequence.png) + +The layout level will and should contain only the data included in the layout. To add lighting, or other elements, like an environment, you have to create a master level, and add the layout level as a [streaming level](https://docs.unrealengine.com/5.0/en-US/level-streaming-in-unreal-engine/). + +Create the master level and open it. Then, open the *Levels* window (from the menu **Windows → Levels**). Click on **Levels → Add Existing** and select the layout level and the other levels you with to include in the scene. The following example shows a master level in which have been added a light level and the layout level. + +![Unreal Add Level](assets/unreal_add_level.png) +![Unreal Level List](assets/unreal_level_list_no_sequences.png) + +#### Layout with level sequences + +In the episode folder, you will find the master level for the episode, the master level sequence and the folders for all the scenes in the episodes. + +After opening the master level, open the *Levels* window (from the menu **Windows → Levels**), and you will see the list of the levels of each shot of the episode for which a layout has been loaded. + +![Unreal Level List](assets/unreal_level_list.png) + +If it has not been added already, you will need to add the environment to the level. Click on **Levels → Add Existing** and select the level with the environment (check with the studio where it is located). + +![Unreal Add Level](assets/unreal_add_level.png) + +After adding the environment level to the master level, you will need to set it as always loaded by right clicking it, and selecting **Change Streaming Method** and selecting **Always Loaded**. + +![Unreal Level Streaming Method](assets/unreal_level_streaming_method.png) + +### Update layouts + +To manage loaded layouts, click on the OpenPype icon in Unreal’s main taskbar, and select **Manage**. + +![Unreal OP Tools Manage](assets/unreal_openpype_tools_manage.png) + +You will get a list of all the assets that have been loaded in the project. +The version number will be in red if it isn’t the latest version. Right click on the element, and select Update if you need to update the layout. + +:::note +**DO NOT** update rigs or models imported with a layout. Update only the layout. +::: + +## Rendering + +:::note +The rendering requires a layout loaded with the option to create the level sequences **on**. +::: + +To render and publish an episode, a scene or a shot, you will need to create a publish instance. The publish instance for the rendering is based on one level sequence. That means that if you want to render the whole episode, you will need to create it for the level sequence of the episode, but if you want to render just one shot, you will need to create it for that shot. + +Navigate to the folder that contains the level sequence that you need to render. Select the level sequence, and then click on the OpenPype icon in Unreal’s main taskbar, and select **Create**. + +![Unreal OP Tools Create](assets/unreal_openpype_tools_create.png) + +In the Instance Creator, select **Unreal - Render**, give it a name, and click **Create**. + +![Unreal OP Instance Creator](assets/unreal_create_render.png) + +The render instance will be created in `/Content/OpenPype/PublishInstances`. + +Select the instance you need to render, and then click on the OpenPype icon in Unreal’s main taskbar, and select **Render**. You can render more than one instance at a time, if needed. Just select all the instances that you need to render before selecting the **Render** button from the OpenPype menu. + +![Unreal OP Tools Render](assets/unreal_openpype_tools_render.png) + +Once the render is finished, click on the OpenPype icon in Unreal’s main taskbar, and select **Publish**. + +![Unreal OP Tools Publish](assets/unreal_openpype_tools_publish.png) + +On the left, you will see the render instances. They will be automatically reorganised to have an instance for each shot. So, for example, if you have created the render instance for the whole episode, here you will have an instance for each shot in the episode. + +![Unreal Publish Render](assets/unreal_publish_render.png) + +Click on the play button in the bottom right, and it will start the publishing process. diff --git a/website/docs/assets/maya-build_workfile_from_template.png b/website/docs/assets/maya-build_workfile_from_template.png new file mode 100644 index 0000000000..7ef87861fe Binary files /dev/null and b/website/docs/assets/maya-build_workfile_from_template.png differ diff --git a/website/docs/assets/maya-create_placeholder.png b/website/docs/assets/maya-create_placeholder.png new file mode 100644 index 0000000000..3f49fe2e2b Binary files /dev/null and b/website/docs/assets/maya-create_placeholder.png differ diff --git a/website/docs/assets/maya-placeholder_new.png b/website/docs/assets/maya-placeholder_new.png new file mode 100644 index 0000000000..106a5275cd Binary files /dev/null and b/website/docs/assets/maya-placeholder_new.png differ diff --git a/website/docs/assets/maya-workfile-outliner.png b/website/docs/assets/maya-workfile-outliner.png new file mode 100644 index 0000000000..fbd1bbd03b Binary files /dev/null and b/website/docs/assets/maya-workfile-outliner.png differ diff --git a/website/docs/assets/settings/template_build_workfile.png b/website/docs/assets/settings/template_build_workfile.png new file mode 100644 index 0000000000..1bea5b01f5 Binary files /dev/null and b/website/docs/assets/settings/template_build_workfile.png differ diff --git a/website/docs/assets/settings_dev.png b/website/docs/assets/settings_dev.png new file mode 100644 index 0000000000..4d0359461e Binary files /dev/null and b/website/docs/assets/settings_dev.png differ diff --git a/website/docs/assets/unreal-avalon_tools.jpg b/website/docs/assets/unreal-avalon_tools.jpg deleted file mode 100644 index 531fbe516a..0000000000 Binary files a/website/docs/assets/unreal-avalon_tools.jpg and /dev/null differ diff --git a/website/docs/assets/unreal-container.jpg b/website/docs/assets/unreal-container.jpg deleted file mode 100644 index f0c0a61e95..0000000000 Binary files a/website/docs/assets/unreal-container.jpg and /dev/null differ diff --git a/website/docs/assets/unreal_add_level.png b/website/docs/assets/unreal_add_level.png new file mode 100644 index 0000000000..caeef03d10 Binary files /dev/null and b/website/docs/assets/unreal_add_level.png differ diff --git a/website/docs/assets/unreal_container.jpg b/website/docs/assets/unreal_container.jpg new file mode 100644 index 0000000000..0fda640b00 Binary files /dev/null and b/website/docs/assets/unreal_container.jpg differ diff --git a/website/docs/assets/unreal_create_render.png b/website/docs/assets/unreal_create_render.png new file mode 100644 index 0000000000..2e3ef20b35 Binary files /dev/null and b/website/docs/assets/unreal_create_render.png differ diff --git a/website/docs/assets/unreal_layout_loading_no_sequence.png b/website/docs/assets/unreal_layout_loading_no_sequence.png new file mode 100644 index 0000000000..ed05d77f53 Binary files /dev/null and b/website/docs/assets/unreal_layout_loading_no_sequence.png differ diff --git a/website/docs/assets/unreal_layout_loading_result.png b/website/docs/assets/unreal_layout_loading_result.png new file mode 100644 index 0000000000..55b329110b Binary files /dev/null and b/website/docs/assets/unreal_layout_loading_result.png differ diff --git a/website/docs/assets/unreal_level_list.png b/website/docs/assets/unreal_level_list.png new file mode 100644 index 0000000000..2fc0c1bfc7 Binary files /dev/null and b/website/docs/assets/unreal_level_list.png differ diff --git a/website/docs/assets/unreal_level_list_no_sequences.png b/website/docs/assets/unreal_level_list_no_sequences.png new file mode 100644 index 0000000000..7ed912b68b Binary files /dev/null and b/website/docs/assets/unreal_level_list_no_sequences.png differ diff --git a/website/docs/assets/unreal_level_streaming_method.png b/website/docs/assets/unreal_level_streaming_method.png new file mode 100644 index 0000000000..8f817abd2e Binary files /dev/null and b/website/docs/assets/unreal_level_streaming_method.png differ diff --git a/website/docs/assets/unreal_level_streaming_method_no_sequences.png b/website/docs/assets/unreal_level_streaming_method_no_sequences.png new file mode 100644 index 0000000000..77a2754ded Binary files /dev/null and b/website/docs/assets/unreal_level_streaming_method_no_sequences.png differ diff --git a/website/docs/assets/unreal_load_layout.png b/website/docs/assets/unreal_load_layout.png new file mode 100644 index 0000000000..ffad60ae9b Binary files /dev/null and b/website/docs/assets/unreal_load_layout.png differ diff --git a/website/docs/assets/unreal_load_layout_batch.png b/website/docs/assets/unreal_load_layout_batch.png new file mode 100644 index 0000000000..dd2f2f3e8f Binary files /dev/null and b/website/docs/assets/unreal_load_layout_batch.png differ diff --git a/website/docs/assets/unreal_openpype_tools.png b/website/docs/assets/unreal_openpype_tools.png new file mode 100644 index 0000000000..bf7d850ab2 Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools.png differ diff --git a/website/docs/assets/unreal_openpype_tools_create.png b/website/docs/assets/unreal_openpype_tools_create.png new file mode 100644 index 0000000000..9cfb95f2a1 Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools_create.png differ diff --git a/website/docs/assets/unreal_openpype_tools_load.png b/website/docs/assets/unreal_openpype_tools_load.png new file mode 100644 index 0000000000..4909feac3b Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools_load.png differ diff --git a/website/docs/assets/unreal_openpype_tools_manage.png b/website/docs/assets/unreal_openpype_tools_manage.png new file mode 100644 index 0000000000..af7b182842 Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools_manage.png differ diff --git a/website/docs/assets/unreal_openpype_tools_publish.png b/website/docs/assets/unreal_openpype_tools_publish.png new file mode 100644 index 0000000000..ab4c10c4ca Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools_publish.png differ diff --git a/website/docs/assets/unreal_openpype_tools_render.png b/website/docs/assets/unreal_openpype_tools_render.png new file mode 100644 index 0000000000..377dc2951e Binary files /dev/null and b/website/docs/assets/unreal_openpype_tools_render.png differ diff --git a/website/docs/assets/unreal_publish_render.png b/website/docs/assets/unreal_publish_render.png new file mode 100644 index 0000000000..674b0ac30e Binary files /dev/null and b/website/docs/assets/unreal_publish_render.png differ diff --git a/website/docs/assets/unreal_setting_level_sequence.png b/website/docs/assets/unreal_setting_level_sequence.png new file mode 100644 index 0000000000..5a8adc6257 Binary files /dev/null and b/website/docs/assets/unreal_setting_level_sequence.png differ diff --git a/website/docs/changelog.md b/website/docs/changelog.md deleted file mode 100644 index 448592b930..0000000000 --- a/website/docs/changelog.md +++ /dev/null @@ -1,1138 +0,0 @@ ---- -id: changelog -title: Changelog -sidebar_label: Changelog ---- - -## [2.18.0](https://github.com/pypeclub/openpype/tree/2.18.0) -_**release date:** (2021-05-18)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/2.17.3...2.18.0) - -**Enhancements:** - -- Use SubsetLoader and multiple contexts for delete_old_versions [\#1484](ttps://github.com/pypeclub/OpenPype/pull/1484)) -- TVPaint: Increment workfile version on successful publish. [\#1489](https://github.com/pypeclub/OpenPype/pull/1489) -- Maya: Use of multiple deadline servers [\#1483](https://github.com/pypeclub/OpenPype/pull/1483) - -**Fixed bugs:** - -- Use instance frame start instead of timeline. [\#1486](https://github.com/pypeclub/OpenPype/pull/1486) -- Maya: Redshift - set proper start frame on proxy [\#1480](https://github.com/pypeclub/OpenPype/pull/1480) -- Maya: wrong collection of playblasted frames [\#1517](https://github.com/pypeclub/OpenPype/pull/1517) -- Existing subsets hints in creator [\#1502](https://github.com/pypeclub/OpenPype/pull/1502) - - -### [2.17.3](https://github.com/pypeclub/openpype/tree/2.17.3) -_**release date:** (2021-05-06)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-rc.3...2.17.3) - -**Fixed bugs:** - -- Nuke: workfile version synced to db version always [\#1479](https://github.com/pypeclub/OpenPype/pull/1479) - -### [2.17.2](https://github.com/pypeclub/openpype/tree/2.17.2) -_**release date:** (2021-05-04)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-rc.1...2.17.2) - -**Enhancements:** - -- Forward/Backward compatible apps and tools with OpenPype 3 [\#1463](https://github.com/pypeclub/OpenPype/pull/1463) - -### [2.17.1](https://github.com/pypeclub/openpype/tree/2.17.1) -_**release date:** (2021-04-30)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/2.17.0...2.17.1) - -**Enhancements:** - -- Faster settings UI loading [\#1442](https://github.com/pypeclub/OpenPype/pull/1442) -- Nuke: deadline submission with gpu [\#1414](https://github.com/pypeclub/OpenPype/pull/1414) -- TVPaint frame range definition [\#1424](https://github.com/pypeclub/OpenPype/pull/1424) -- PS - group all published instances [\#1415](https://github.com/pypeclub/OpenPype/pull/1415) -- Add task name to context pop up. [\#1383](https://github.com/pypeclub/OpenPype/pull/1383) -- Enhance review letterbox feature. [\#1371](https://github.com/pypeclub/OpenPype/pull/1371) -- AE add duration validation [\#1363](https://github.com/pypeclub/OpenPype/pull/1363) - -**Fixed bugs:** - -- Houdini menu filename [\#1417](https://github.com/pypeclub/OpenPype/pull/1417) -- Nuke: fixing undo for loaded mov and sequence [\#1433](https://github.com/pypeclub/OpenPype/pull/1433) -- AE - validation for duration was 1 frame shorter [\#1426](https://github.com/pypeclub/OpenPype/pull/1426) - -**Merged pull requests:** - -- Maya: Vray - problem getting all file nodes for look publishing [\#1399](https://github.com/pypeclub/OpenPype/pull/1399) -- Maya: Support for Redshift proxies [\#1360](https://github.com/pypeclub/OpenPype/pull/1360) - -## [2.17.0](https://github.com/pypeclub/openpype/tree/2.17.0) -_**release date:** (2021-04-20)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-beta.2...2.17.0) - -**Enhancements:** - -- Forward compatible ftrack group [\#1243](https://github.com/pypeclub/OpenPype/pull/1243) -- Maya: Make tx option configurable with presets [\#1328](https://github.com/pypeclub/OpenPype/pull/1328) -- TVPaint asset name validation [\#1302](https://github.com/pypeclub/OpenPype/pull/1302) -- TV Paint: Set initial project settings. [\#1299](https://github.com/pypeclub/OpenPype/pull/1299) -- TV Paint: Validate mark in and out. [\#1298](https://github.com/pypeclub/OpenPype/pull/1298) -- Validate project settings [\#1297](https://github.com/pypeclub/OpenPype/pull/1297) -- After Effects: added SubsetManager [\#1234](https://github.com/pypeclub/OpenPype/pull/1234) -- Show error message in pyblish UI [\#1206](https://github.com/pypeclub/OpenPype/pull/1206) - -**Fixed bugs:** - -- Hiero: fixing source frame from correct object [\#1362](https://github.com/pypeclub/OpenPype/pull/1362) -- Nuke: fix colourspace, prerenders and nuke panes opening [\#1308](https://github.com/pypeclub/OpenPype/pull/1308) -- AE remove orphaned instance from workfile - fix self.stub [\#1282](https://github.com/pypeclub/OpenPype/pull/1282) -- Nuke: deadline submission with search replaced env values from preset [\#1194](https://github.com/pypeclub/OpenPype/pull/1194) -- Ftrack custom attributes in bulks [\#1312](https://github.com/pypeclub/OpenPype/pull/1312) -- Ftrack optional pypclub role [\#1303](https://github.com/pypeclub/OpenPype/pull/1303) -- After Effects: remove orphaned instances [\#1275](https://github.com/pypeclub/OpenPype/pull/1275) -- Avalon schema names [\#1242](https://github.com/pypeclub/OpenPype/pull/1242) -- Handle duplication of Task name [\#1226](https://github.com/pypeclub/OpenPype/pull/1226) -- Modified path of plugin loads for Harmony and TVPaint [\#1217](https://github.com/pypeclub/OpenPype/pull/1217) -- Regex checks in profiles filtering [\#1214](https://github.com/pypeclub/OpenPype/pull/1214) -- Update custom ftrack session attributes [\#1202](https://github.com/pypeclub/OpenPype/pull/1202) -- Nuke: write node colorspace ignore `default\(\)` label [\#1199](https://github.com/pypeclub/OpenPype/pull/1199) - -## [2.16.0](https://github.com/pypeclub/pype/tree/2.16.0) - - _**release date:** 2021-03-22_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.3...2.16.0) - -**Enhancements:** - -- Nuke: deadline submit limit group filter [\#1167](https://github.com/pypeclub/pype/pull/1167) -- Maya: support for Deadline Group and Limit Groups - backport 2.x [\#1156](https://github.com/pypeclub/pype/pull/1156) -- Maya: fixes for Redshift support [\#1152](https://github.com/pypeclub/pype/pull/1152) -- Nuke: adding preset for a Read node name to all img and mov Loaders [\#1146](https://github.com/pypeclub/pype/pull/1146) -- nuke deadline submit with environ var from presets overrides [\#1142](https://github.com/pypeclub/pype/pull/1142) -- Change timers after task change [\#1138](https://github.com/pypeclub/pype/pull/1138) -- Nuke: shortcuts for Pype menu [\#1127](https://github.com/pypeclub/pype/pull/1127) -- Nuke: workfile template [\#1124](https://github.com/pypeclub/pype/pull/1124) -- Sites local settings by site name [\#1117](https://github.com/pypeclub/pype/pull/1117) -- Reset loader's asset selection on context change [\#1106](https://github.com/pypeclub/pype/pull/1106) -- Bulk mov render publishing [\#1101](https://github.com/pypeclub/pype/pull/1101) -- Photoshop: mark publishable instances [\#1093](https://github.com/pypeclub/pype/pull/1093) -- Added ability to define BG color for extract review [\#1088](https://github.com/pypeclub/pype/pull/1088) -- TVPaint extractor enhancement [\#1080](https://github.com/pypeclub/pype/pull/1080) -- Photoshop: added support for .psb in workfiles [\#1078](https://github.com/pypeclub/pype/pull/1078) -- Optionally add task to subset name [\#1072](https://github.com/pypeclub/pype/pull/1072) -- Only extend clip range when collecting. [\#1008](https://github.com/pypeclub/pype/pull/1008) -- Collect audio for farm reviews. [\#1073](https://github.com/pypeclub/pype/pull/1073) - - -**Fixed bugs:** - -- Fix path spaces in jpeg extractor [\#1174](https://github.com/pypeclub/pype/pull/1174) -- Maya: Bugfix: superclass for CreateCameraRig [\#1166](https://github.com/pypeclub/pype/pull/1166) -- Maya: Submit to Deadline - fix typo in condition [\#1163](https://github.com/pypeclub/pype/pull/1163) -- Avoid dot in repre extension [\#1125](https://github.com/pypeclub/pype/pull/1125) -- Fix versions variable usage in standalone publisher [\#1090](https://github.com/pypeclub/pype/pull/1090) -- Collect instance data fix subset query [\#1082](https://github.com/pypeclub/pype/pull/1082) -- Fix getting the camera name. [\#1067](https://github.com/pypeclub/pype/pull/1067) -- Nuke: Ensure "NUKE\_TEMP\_DIR" is not part of the Deadline job environment. [\#1064](https://github.com/pypeclub/pype/pull/1064) - -### [2.15.3](https://github.com/pypeclub/pype/tree/2.15.3) - - _**release date:** 2021-02-26_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.2...2.15.3) - -**Enhancements:** - -- Maya: speedup renderable camera collection [\#1053](https://github.com/pypeclub/pype/pull/1053) -- Harmony - add regex search to filter allowed task names for collectin… [\#1047](https://github.com/pypeclub/pype/pull/1047) - -**Fixed bugs:** - -- Ftrack integrate hierarchy fix [\#1085](https://github.com/pypeclub/pype/pull/1085) -- Explicit subset filter in anatomy instance data [\#1059](https://github.com/pypeclub/pype/pull/1059) -- TVPaint frame offset [\#1057](https://github.com/pypeclub/pype/pull/1057) -- Auto fix unicode strings [\#1046](https://github.com/pypeclub/pype/pull/1046) - -### [2.15.2](https://github.com/pypeclub/pype/tree/2.15.2) - - _**release date:** 2021-02-19_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.1...2.15.2) - -**Enhancements:** - -- Maya: Vray scene publishing [\#1013](https://github.com/pypeclub/pype/pull/1013) - -**Fixed bugs:** - -- Fix entity move under project [\#1040](https://github.com/pypeclub/pype/pull/1040) -- smaller nuke fixes from production [\#1036](https://github.com/pypeclub/pype/pull/1036) -- TVPaint thumbnail extract fix [\#1031](https://github.com/pypeclub/pype/pull/1031) - -### [2.15.1](https://github.com/pypeclub/pype/tree/2.15.1) - - _**release date:** 2021-02-12_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.0...2.15.1) - -**Enhancements:** - -- Delete version as loader action [\#1011](https://github.com/pypeclub/pype/pull/1011) -- Delete old versions [\#445](https://github.com/pypeclub/pype/pull/445) - -**Fixed bugs:** - -- PS - remove obsolete functions from pywin32 [\#1006](https://github.com/pypeclub/pype/pull/1006) -- Clone description of review session objects. [\#922](https://github.com/pypeclub/pype/pull/922) - -## [2.15.0](https://github.com/pypeclub/pype/tree/2.15.0) - - _**release date:** 2021-02-09_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.6...2.15.0) - -**Enhancements:** - -- Resolve - loading and updating clips [\#932](https://github.com/pypeclub/pype/pull/932) -- Release/2.15.0 [\#926](https://github.com/pypeclub/pype/pull/926) -- Photoshop: add option for template.psd and prelaunch hook [\#894](https://github.com/pypeclub/pype/pull/894) -- Nuke: deadline presets [\#993](https://github.com/pypeclub/pype/pull/993) -- Maya: Alembic only set attributes that exists. [\#986](https://github.com/pypeclub/pype/pull/986) -- Harmony: render local and handle fixes [\#981](https://github.com/pypeclub/pype/pull/981) -- PSD Bulk export of ANIM group [\#965](https://github.com/pypeclub/pype/pull/965) -- AE - added prelaunch hook for opening last or workfile from template [\#944](https://github.com/pypeclub/pype/pull/944) -- PS - safer handling of loading of workfile [\#941](https://github.com/pypeclub/pype/pull/941) -- Maya: Handling Arnold referenced AOVs [\#938](https://github.com/pypeclub/pype/pull/938) -- TVPaint: switch layer IDs for layer names during identification [\#903](https://github.com/pypeclub/pype/pull/903) -- TVPaint audio/sound loader [\#893](https://github.com/pypeclub/pype/pull/893) -- Clone review session with children. [\#891](https://github.com/pypeclub/pype/pull/891) -- Simple compositing data packager for freelancers [\#884](https://github.com/pypeclub/pype/pull/884) -- Harmony deadline submission [\#881](https://github.com/pypeclub/pype/pull/881) -- Maya: Optionally hide image planes from reviews. [\#840](https://github.com/pypeclub/pype/pull/840) -- Maya: handle referenced AOVs for Vray [\#824](https://github.com/pypeclub/pype/pull/824) -- DWAA/DWAB support on windows [\#795](https://github.com/pypeclub/pype/pull/795) -- Unreal: animation, layout and setdress updates [\#695](https://github.com/pypeclub/pype/pull/695) - -**Fixed bugs:** - -- Maya: Looks - disable hardlinks [\#995](https://github.com/pypeclub/pype/pull/995) -- Fix Ftrack custom attribute update [\#982](https://github.com/pypeclub/pype/pull/982) -- Prores ks in burnin script [\#960](https://github.com/pypeclub/pype/pull/960) -- terminal.py crash on import [\#839](https://github.com/pypeclub/pype/pull/839) -- Extract review handle bizarre pixel aspect ratio [\#990](https://github.com/pypeclub/pype/pull/990) -- Nuke: add nuke related env var to sumbission [\#988](https://github.com/pypeclub/pype/pull/988) -- Nuke: missing preset's variable [\#984](https://github.com/pypeclub/pype/pull/984) -- Get creator by name fix [\#979](https://github.com/pypeclub/pype/pull/979) -- Fix update of project's tasks on Ftrack sync [\#972](https://github.com/pypeclub/pype/pull/972) -- nuke: wrong frame offset in mov loader [\#971](https://github.com/pypeclub/pype/pull/971) -- Create project structure action fix multiroot [\#967](https://github.com/pypeclub/pype/pull/967) -- PS: remove pywin installation from hook [\#964](https://github.com/pypeclub/pype/pull/964) -- Prores ks in burnin script [\#959](https://github.com/pypeclub/pype/pull/959) -- Subset family is now stored in subset document [\#956](https://github.com/pypeclub/pype/pull/956) -- DJV new version arguments [\#954](https://github.com/pypeclub/pype/pull/954) -- TV Paint: Fix single frame Sequence [\#953](https://github.com/pypeclub/pype/pull/953) -- nuke: missing `file` knob update [\#933](https://github.com/pypeclub/pype/pull/933) -- Photoshop: Create from single layer was failing [\#920](https://github.com/pypeclub/pype/pull/920) -- Nuke: baking mov with correct colorspace inherited from write [\#909](https://github.com/pypeclub/pype/pull/909) -- Launcher fix actions discover [\#896](https://github.com/pypeclub/pype/pull/896) -- Get the correct file path for the updated mov. [\#889](https://github.com/pypeclub/pype/pull/889) -- Maya: Deadline submitter - shared data access violation [\#831](https://github.com/pypeclub/pype/pull/831) -- Maya: Take into account vray master AOV switch [\#822](https://github.com/pypeclub/pype/pull/822) - -**Merged pull requests:** - -- Refactor blender to 3.0 format [\#934](https://github.com/pypeclub/pype/pull/934) - -### [2.14.6](https://github.com/pypeclub/pype/tree/2.14.6) - - _**release date:** 2021-01-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.5...2.14.6) - -**Fixed bugs:** - -- Nuke: improving of hashing path [\#885](https://github.com/pypeclub/pype/pull/885) - -**Merged pull requests:** - -- Hiero: cut videos with correct secons [\#892](https://github.com/pypeclub/pype/pull/892) -- Faster sync to avalon preparation [\#869](https://github.com/pypeclub/pype/pull/869) - -### [2.14.5](https://github.com/pypeclub/pype/tree/2.14.5) - - _**release date:** 2021-01-06_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.4...2.14.5) - -**Merged pull requests:** - -- Pype logger refactor [\#866](https://github.com/pypeclub/pype/pull/866) - -### [2.14.4](https://github.com/pypeclub/pype/tree/2.14.4) - - _**release date:** 2020-12-18_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.3...2.14.4) - -**Merged pull requests:** - -- Fix - AE - added explicit cast to int [\#837](https://github.com/pypeclub/pype/pull/837) - -### [2.14.3](https://github.com/pypeclub/pype/tree/2.14.3) - - _**release date:** 2020-12-16_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.2...2.14.3) - -**Fixed bugs:** - -- TVPaint repair invalid metadata [\#809](https://github.com/pypeclub/pype/pull/809) -- Feature/push hier value to nonhier action [\#807](https://github.com/pypeclub/pype/pull/807) -- Harmony: fix palette and image sequence loader [\#806](https://github.com/pypeclub/pype/pull/806) - -**Merged pull requests:** - -- respecting space in path [\#823](https://github.com/pypeclub/pype/pull/823) - -### [2.14.2](https://github.com/pypeclub/pype/tree/2.14.2) - - _**release date:** 2020-12-04_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.1...2.14.2) - -**Enhancements:** - -- Collapsible wrapper in settings [\#767](https://github.com/pypeclub/pype/pull/767) - -**Fixed bugs:** - -- Harmony: template extraction and palettes thumbnails on mac [\#768](https://github.com/pypeclub/pype/pull/768) -- TVPaint store context to workfile metadata \(764\) [\#766](https://github.com/pypeclub/pype/pull/766) -- Extract review audio cut fix [\#763](https://github.com/pypeclub/pype/pull/763) - -**Merged pull requests:** - -- AE: fix publish after background load [\#781](https://github.com/pypeclub/pype/pull/781) -- TVPaint store members key [\#769](https://github.com/pypeclub/pype/pull/769) - -### [2.14.1](https://github.com/pypeclub/pype/tree/2.14.1) - - _**release date:** 2020-11-27_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.0...2.14.1) - -**Enhancements:** - -- Settings required keys in modifiable dict [\#770](https://github.com/pypeclub/pype/pull/770) -- Extract review may not add audio to output [\#761](https://github.com/pypeclub/pype/pull/761) - -**Fixed bugs:** - -- After Effects: frame range, file format and render source scene fixes [\#760](https://github.com/pypeclub/pype/pull/760) -- Hiero: trimming review with clip event number [\#754](https://github.com/pypeclub/pype/pull/754) -- TVPaint: fix updating of loaded subsets [\#752](https://github.com/pypeclub/pype/pull/752) -- Maya: Vray handling of default aov [\#748](https://github.com/pypeclub/pype/pull/748) -- Maya: multiple renderable cameras in layer didn't work [\#744](https://github.com/pypeclub/pype/pull/744) -- Ftrack integrate custom attributes fix [\#742](https://github.com/pypeclub/pype/pull/742) - - - -## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) - - _**release date:** 2020-11-24_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.7...2.14.0) - -**Enhancements:** - -- Ftrack: Event for syncing shot or asset status with tasks.[\#736](https://github.com/pypeclub/pype/pull/736) -- Maya: add camera rig publishing option [\#721](https://github.com/pypeclub/pype/pull/721) -- Maya: Ask user to select non-default camera from scene or create a new. [\#678](https://github.com/pypeclub/pype/pull/678) -- Maya: Camera name can be added to burnins. [\#674](https://github.com/pypeclub/pype/pull/674) -- Sort instances by label in pyblish gui [\#719](https://github.com/pypeclub/pype/pull/719) -- Synchronize ftrack hierarchical and shot attributes [\#716](https://github.com/pypeclub/pype/pull/716) -- Standalone Publisher: Publish editorial from separate image sequences [\#699](https://github.com/pypeclub/pype/pull/699) -- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) -- TV Paint: image loader with options [\#675](https://github.com/pypeclub/pype/pull/675) -- **TV Paint (Beta):** initial implementation of creators and local rendering [\#693](https://github.com/pypeclub/pype/pull/693) -- **After Effects (Beta):** base integration with loaders [\#667](https://github.com/pypeclub/pype/pull/667) -- Harmony: Javascript refactoring and overall stability improvements [\#666](https://github.com/pypeclub/pype/pull/666) - -**Fixed bugs:** - -- TVPaint: extract review fix [\#740](https://github.com/pypeclub/pype/pull/740) -- After Effects: Review were not being sent to ftrack [\#738](https://github.com/pypeclub/pype/pull/738) -- Maya: vray proxy was not loading [\#722](https://github.com/pypeclub/pype/pull/722) -- Maya: Vray expected file fixes [\#682](https://github.com/pypeclub/pype/pull/682) - -**Deprecated:** - -- Removed artist view from pyblish gui [\#717](https://github.com/pypeclub/pype/pull/717) -- Maya: disable legacy override check for cameras [\#715](https://github.com/pypeclub/pype/pull/715) - - - - -### [2.13.7](https://github.com/pypeclub/pype/tree/2.13.7) - - _**release date:** 2020-11-19_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.6...2.13.7) - -**Merged pull requests:** - -- fix\(SP\): getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) - - - - -### [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) - - _**release date:** 2020-11-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) - -**Fixed bugs:** - -- Maya workfile version wasn't syncing with renders properly [\#711](https://github.com/pypeclub/pype/pull/711) -- Maya: Fix for publishing multiple cameras with review from the same scene [\#710](https://github.com/pypeclub/pype/pull/710) - - - - -### [2.13.5](https://github.com/pypeclub/pype/tree/2.13.5) - - _**release date:** 2020-11-12_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.4...2.13.5) - - -**Fixed bugs:** - -- Wrong thumbnail file was picked when publishing sequence in standalone publisher [\#703](https://github.com/pypeclub/pype/pull/703) -- Fix: Burnin data pass and FFmpeg tool check [\#701](https://github.com/pypeclub/pype/pull/701) - - - - -### [2.13.4](https://github.com/pypeclub/pype/tree/2.13.4) - - _**release date:** 2020-11-09_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.3...2.13.4) - - -**Fixed bugs:** - -- Photoshop unhiding hidden layers [\#688](https://github.com/pypeclub/pype/issues/688) -- Nuke: Favorite directories "shot dir" "project dir" - not working \#684 [\#685](https://github.com/pypeclub/pype/pull/685) - - - - - -### [2.13.3](https://github.com/pypeclub/pype/tree/2.13.3) - - _**release date:** _2020-11-03_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.2...2.13.3) - -**Fixed bugs:** - -- Fix ffmpeg executable path with spaces [\#680](https://github.com/pypeclub/pype/pull/680) -- Hotfix: Added default version number [\#679](https://github.com/pypeclub/pype/pull/679) - - - - -### [2.13.2](https://github.com/pypeclub/pype/tree/2.13.2) - - _**release date:** 2020-10-28_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.1...2.13.2) - -**Fixed bugs:** - -- Nuke: wrong conditions when fixing legacy write nodes [\#665](https://github.com/pypeclub/pype/pull/665) - - - - -### [2.13.1](https://github.com/pypeclub/pype/tree/2.13.1) - - _**release date:** 2020-10-23_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.0...2.13.1) - -**Fixed bugs:** - -- Photoshop: Layer name is not propagating to metadata [\#654](https://github.com/pypeclub/pype/issues/654) -- Photoshop: Loader in fails with "can't set attribute" [\#650](https://github.com/pypeclub/pype/issues/650) -- Hiero: Review video file adding one frame to the end [\#659](https://github.com/pypeclub/pype/issues/659) - - - -## [2.13.0](https://github.com/pypeclub/pype/tree/2.13.0) - - _**release date:** 2020-10-16_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.5...2.13.0) - -**Enhancements:** - -- Deadline Output Folder [\#636](https://github.com/pypeclub/pype/issues/636) -- Nuke Camera Loader [\#565](https://github.com/pypeclub/pype/issues/565) -- Deadline publish job shows publishing output folder [\#649](https://github.com/pypeclub/pype/pull/649) -- Get latest version in lib [\#642](https://github.com/pypeclub/pype/pull/642) -- Improved publishing of multiple representation from SP [\#638](https://github.com/pypeclub/pype/pull/638) -- TvPaint: launch shot work file from within Ftrack [\#631](https://github.com/pypeclub/pype/pull/631) -- Add mp4 support for RV action. [\#628](https://github.com/pypeclub/pype/pull/628) -- Maya: allow renders to have version synced with workfile [\#618](https://github.com/pypeclub/pype/pull/618) -- Renaming nukestudio host folder to hiero [\#617](https://github.com/pypeclub/pype/pull/617) -- Harmony: More efficient publishing [\#615](https://github.com/pypeclub/pype/pull/615) -- Ftrack server action improvement [\#608](https://github.com/pypeclub/pype/pull/608) -- Deadline user defaults to pype username if present [\#607](https://github.com/pypeclub/pype/pull/607) -- Standalone publisher now has icon [\#606](https://github.com/pypeclub/pype/pull/606) -- Nuke render write targeting knob improvement [\#603](https://github.com/pypeclub/pype/pull/603) -- Animated pyblish gui [\#602](https://github.com/pypeclub/pype/pull/602) -- Maya: Deadline - make use of asset dependencies optional [\#591](https://github.com/pypeclub/pype/pull/591) -- Nuke: Publishing, loading and updating alembic cameras [\#575](https://github.com/pypeclub/pype/pull/575) -- Maya: add look assigner to pype menu even if scriptsmenu is not available [\#573](https://github.com/pypeclub/pype/pull/573) -- Store task types in the database [\#572](https://github.com/pypeclub/pype/pull/572) -- Maya: Tiled EXRs to scanline EXRs render option [\#512](https://github.com/pypeclub/pype/pull/512) -- Fusion: basic integration refresh [\#452](https://github.com/pypeclub/pype/pull/452) - -**Fixed bugs:** - -- Burnin script did not propagate ffmpeg output [\#640](https://github.com/pypeclub/pype/issues/640) -- Pyblish-pype spacer in terminal wasn't transparent [\#646](https://github.com/pypeclub/pype/pull/646) -- Lib subprocess without logger [\#645](https://github.com/pypeclub/pype/pull/645) -- Nuke: prevent crash if we only have single frame in sequence [\#644](https://github.com/pypeclub/pype/pull/644) -- Burnin script logs better output [\#641](https://github.com/pypeclub/pype/pull/641) -- Missing audio on farm submission. [\#639](https://github.com/pypeclub/pype/pull/639) -- review from imagesequence error [\#633](https://github.com/pypeclub/pype/pull/633) -- Hiero: wrong order of fps clip instance data collecting [\#627](https://github.com/pypeclub/pype/pull/627) -- Add source for review instances. [\#625](https://github.com/pypeclub/pype/pull/625) -- Task processing in event sync [\#623](https://github.com/pypeclub/pype/pull/623) -- sync to avalon doesn t remove renamed task [\#619](https://github.com/pypeclub/pype/pull/619) -- Intent publish setting wasn't working with default value [\#562](https://github.com/pypeclub/pype/pull/562) -- Maya: Updating a look where the shader name changed, leaves the geo without a shader [\#514](https://github.com/pypeclub/pype/pull/514) - - -### [2.12.5](https://github.com/pypeclub/pype/tree/2.12.5) - -_**release date:** 2020-10-14_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.4...2.12.5) - -**Fixed Bugs:** - -- Harmony: Disable application launch logic [\#637](https://github.com/pypeclub/pype/pull/637) - -### [2.12.4](https://github.com/pypeclub/pype/tree/2.12.4) - -_**release date:** 2020-10-08_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.3...2.12.4) - -**Fixed bugs:** - -- Sync to avalon doesn't remove renamed task [\#605](https://github.com/pypeclub/pype/issues/605) - - -**Merged pull requests:** - -- NukeStudio: small fixes [\#622](https://github.com/pypeclub/pype/pull/622) -- NukeStudio: broken order of plugins [\#620](https://github.com/pypeclub/pype/pull/620) - -### [2.12.3](https://github.com/pypeclub/pype/tree/2.12.3) - -_**release date:** 2020-10-06_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.2...2.12.3) - -**Fixed bugs:** - -- Harmony: empty scene contamination [\#583](https://github.com/pypeclub/pype/issues/583) -- Edit publishing in SP doesn't respect shot selection for publishing [\#542](https://github.com/pypeclub/pype/issues/542) -- Pathlib breaks compatibility with python2 hosts [\#281](https://github.com/pypeclub/pype/issues/281) -- Maya: fix maya scene type preset exception [\#569](https://github.com/pypeclub/pype/pull/569) -- Standalone publisher editorial plugins interfering [\#580](https://github.com/pypeclub/pype/pull/580) - -### [2.12.2](https://github.com/pypeclub/pype/tree/2.12.2) - -_**release date:** 2020-09-25_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.1...2.12.2) - -**Fixed bugs:** - -- Harmony: Saving heavy scenes will crash [\#507](https://github.com/pypeclub/pype/issues/507) -- Extract review a representation name with `\*\_burnin` [\#388](https://github.com/pypeclub/pype/issues/388) -- Hierarchy data was not considering active instances [\#551](https://github.com/pypeclub/pype/pull/551) - -### [2.12.1](https://github.com/pypeclub/pype/tree/2.12.1) - -_**release date:** 2020-09-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.0...2.12.1) - -**Fixed bugs:** - -- dependency security alert ! [\#484](https://github.com/pypeclub/pype/issues/484) -- Maya: RenderSetup is missing update [\#106](https://github.com/pypeclub/pype/issues/106) -- \ extract effects creates new instance [\#78](https://github.com/pypeclub/pype/issues/78) - - - - -## [2.12.0](https://github.com/pypeclub/pype/tree/2.12.0) ## - -_**release date:** 09 Sept 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.8...2.12.0) - -**Enhancements:** - -- Pype now uses less mongo connections [\#509](https://github.com/pypeclub/pype/pull/509) -- Nuke: adding image loader [\#499](https://github.com/pypeclub/pype/pull/499) -- Completely new application launcher [\#443](https://github.com/pypeclub/pype/pull/443) -- Maya: Optional skip review on renders. [\#441](https://github.com/pypeclub/pype/pull/441) -- Ftrack: Option to push status from task to latest version [\#440](https://github.com/pypeclub/pype/pull/440) -- Maya: Properly containerize image plane loads. [\#434](https://github.com/pypeclub/pype/pull/434) -- Option to keep the review files. [\#426](https://github.com/pypeclub/pype/pull/426) -- Maya: Isolate models during preview publishing [\#425](https://github.com/pypeclub/pype/pull/425) -- Ftrack attribute group is backwards compatible [\#418](https://github.com/pypeclub/pype/pull/418) -- Maya: Publishing of tile renderings on Deadline [\#398](https://github.com/pypeclub/pype/pull/398) -- Slightly better logging gui [\#383](https://github.com/pypeclub/pype/pull/383) -- Standalonepublisher: editorial family features expansion [\#411](https://github.com/pypeclub/pype/pull/411) - -**Fixed bugs:** - -- Maya: Fix tile order for Draft Tile Assembler [\#511](https://github.com/pypeclub/pype/pull/511) -- Remove extra dash [\#501](https://github.com/pypeclub/pype/pull/501) -- Fix: strip dot from repre names in single frame renders [\#498](https://github.com/pypeclub/pype/pull/498) -- Better handling of destination during integrating [\#485](https://github.com/pypeclub/pype/pull/485) -- Fix: allow thumbnail creation for single frame renders [\#460](https://github.com/pypeclub/pype/pull/460) -- added missing argument to launch\_application in ftrack app handler [\#453](https://github.com/pypeclub/pype/pull/453) -- Burnins: Copy bit rate of input video to match quality. [\#448](https://github.com/pypeclub/pype/pull/448) -- Standalone publisher is now independent from tray [\#442](https://github.com/pypeclub/pype/pull/442) -- Bugfix/empty enumerator attributes [\#436](https://github.com/pypeclub/pype/pull/436) -- Fixed wrong order of "other" category collapssing in publisher [\#435](https://github.com/pypeclub/pype/pull/435) -- Multiple reviews where being overwritten to one. [\#424](https://github.com/pypeclub/pype/pull/424) -- Cleanup plugin fail on instances without staging dir [\#420](https://github.com/pypeclub/pype/pull/420) -- deprecated -intra parameter in ffmpeg to new `-g` [\#417](https://github.com/pypeclub/pype/pull/417) -- Delivery action can now work with entered path [\#397](https://github.com/pypeclub/pype/pull/397) - - - - - -### [2.11.8](https://github.com/pypeclub/pype/tree/2.11.8) ## - -_**release date:** 27 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.7...2.11.8) - -**Fixed bugs:** - -- pyblish pype - other group is collapsed before plugins are done [\#431](https://github.com/pypeclub/pype/issues/431) -- Alpha white edges in harmony on PNGs [\#412](https://github.com/pypeclub/pype/issues/412) -- harmony image loader picks wrong representations [\#404](https://github.com/pypeclub/pype/issues/404) -- Clockify crash when response contain symbol not allowed by UTF-8 [\#81](https://github.com/pypeclub/pype/issues/81) - - - - -### [2.11.7](https://github.com/pypeclub/pype/tree/2.11.7) ## - -_**release date:** 21 Aug 2020_ - - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.6...2.11.7) - -**Fixed bugs:** - -- Clean Up Baked Movie [\#369](https://github.com/pypeclub/pype/issues/369) -- celaction last workfile wasn't picked up correctly [\#459](https://github.com/pypeclub/pype/pull/459) - - - -### [2.11.5](https://github.com/pypeclub/pype/tree/2.11.5) ## - -_**release date:** 13 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.4...2.11.5) - -**Enhancements:** - -- Standalone publisher now only groups sequence if the extension is known [\#439](https://github.com/pypeclub/pype/pull/439) - -**Fixed bugs:** - -- Logs have been disable for editorial by default to speed up publishing [\#433](https://github.com/pypeclub/pype/pull/433) -- Various fixes for celaction [\#430](https://github.com/pypeclub/pype/pull/430) -- Harmony: invalid variable scope in validate scene settings [\#428](https://github.com/pypeclub/pype/pull/428) -- Harmomny: new representation name for audio was not accepted [\#427](https://github.com/pypeclub/pype/pull/427) - - - - -### [2.11.3](https://github.com/pypeclub/pype/tree/2.11.3) ## - -_**release date:** 4 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.2...2.11.3) - -**Fixed bugs:** - -- Harmony: publishing performance issues [\#408](https://github.com/pypeclub/pype/pull/408) - - - - -## 2.11.0 ## - -_**release date:** 27 July 2020_ - -**new:** -- _(blender)_ namespace support [\#341](https://github.com/pypeclub/pype/pull/341) -- _(blender)_ start end frames [\#330](https://github.com/pypeclub/pype/pull/330) -- _(blender)_ camera asset [\#322](https://github.com/pypeclub/pype/pull/322) -- _(pype)_ toggle instances per family in pyblish GUI [\#320](https://github.com/pypeclub/pype/pull/320) -- _(pype)_ current release version is now shown in the tray menu [#379](https://github.com/pypeclub/pype/pull/379) - - -**improved:** -- _(resolve)_ tagging for publish [\#239](https://github.com/pypeclub/pype/issues/239) -- _(pype)_ Support publishing a subset of shots with standalone editorial [\#336](https://github.com/pypeclub/pype/pull/336) -- _(harmony)_ Basic support for palettes [\#324](https://github.com/pypeclub/pype/pull/324) -- _(photoshop)_ Flag outdated containers on startup and publish. [\#309](https://github.com/pypeclub/pype/pull/309) -- _(harmony)_ Flag Outdated containers [\#302](https://github.com/pypeclub/pype/pull/302) -- _(photoshop)_ Publish review [\#298](https://github.com/pypeclub/pype/pull/298) -- _(pype)_ Optional Last workfile launch [\#365](https://github.com/pypeclub/pype/pull/365) - - -**fixed:** -- _(premiere)_ workflow fixes [\#346](https://github.com/pypeclub/pype/pull/346) -- _(pype)_ pype-setup does not work with space in path [\#327](https://github.com/pypeclub/pype/issues/327) -- _(ftrack)_ Ftrack delete action cause circular error [\#206](https://github.com/pypeclub/pype/issues/206) -- _(nuke)_ Priority was forced to 50 [\#345](https://github.com/pypeclub/pype/pull/345) -- _(nuke)_ Fix ValidateNukeWriteKnobs [\#340](https://github.com/pypeclub/pype/pull/340) -- _(maya)_ If camera attributes are connected, we can ignore them. [\#339](https://github.com/pypeclub/pype/pull/339) -- _(pype)_ stop appending of tools environment to existing env [\#337](https://github.com/pypeclub/pype/pull/337) -- _(ftrack)_ Ftrack timeout needs to look at AVALON\_TIMEOUT [\#325](https://github.com/pypeclub/pype/pull/325) -- _(harmony)_ Only zip files are supported. [\#310](https://github.com/pypeclub/pype/pull/310) -- _(pype)_ hotfix/Fix event server mongo uri [\#305](https://github.com/pypeclub/pype/pull/305) -- _(photoshop)_ Subset was not named or validated correctly. [\#304](https://github.com/pypeclub/pype/pull/304) - - - - - -## 2.10.0 ## - -_**release date:** 17 June 2020_ - -**new:** -- _(harmony)_ **Toon Boom Harmony** has been greatly extended to support rigging, scene build, animation and rendering workflows. [#270](https://github.com/pypeclub/pype/issues/270) [#271](https://github.com/pypeclub/pype/issues/271) [#190](https://github.com/pypeclub/pype/issues/190) [#191](https://github.com/pypeclub/pype/issues/191) [#172](https://github.com/pypeclub/pype/issues/172) [#168](https://github.com/pypeclub/pype/issues/168) -- _(pype)_ Added support for rudimentary **edl publishing** into individual shots. [#265](https://github.com/pypeclub/pype/issues/265) -- _(celaction)_ Simple **Celaction** integration has been added with support for workfiles and rendering. [#255](https://github.com/pypeclub/pype/issues/255) -- _(maya)_ Support for multiple job types when submitting to the farm. We can now render Maya or Standalone render jobs for Vray and Arnold (limited support for arnold) [#204](https://github.com/pypeclub/pype/issues/204) -- _(photoshop)_ Added initial support for Photoshop [#232](https://github.com/pypeclub/pype/issues/232) - -**improved:** -- _(blender)_ Updated support for rigs and added support Layout family [#233](https://github.com/pypeclub/pype/issues/233) [#226](https://github.com/pypeclub/pype/issues/226) -- _(premiere)_ It is now possible to choose different storage root for workfiles of different task types. [#255](https://github.com/pypeclub/pype/issues/255) -- _(maya)_ Support for unmerged AOVs in Redshift multipart EXRs [#197](https://github.com/pypeclub/pype/issues/197) -- _(pype)_ Pype repository has been refactored in preparation for 3.0 release [#169](https://github.com/pypeclub/pype/issues/169) -- _(deadline)_ All file dependencies are now passed to deadline from maya to prevent premature start of rendering if caches or textures haven't been coppied over yet. [#195](https://github.com/pypeclub/pype/issues/195) -- _(nuke)_ Script validation can now be made optional. [#194](https://github.com/pypeclub/pype/issues/194) -- _(pype)_ Publishing can now be stopped at any time. [#194](https://github.com/pypeclub/pype/issues/194) - -**fix:** -- _(pype)_ Pyblish-lite has been integrated into pype repository, plus various publishing GUI fixes. [#274](https://github.com/pypeclub/pype/issues/274) [#275](https://github.com/pypeclub/pype/issues/275) [#268](https://github.com/pypeclub/pype/issues/268) [#227](https://github.com/pypeclub/pype/issues/227) [#238](https://github.com/pypeclub/pype/issues/238) -- _(maya)_ Alembic extractor was getting wrong frame range type in certain scenarios [#254](https://github.com/pypeclub/pype/issues/254) -- _(maya)_ Attaching a render to subset in maya was not passing validation in certain scenarios [#256](https://github.com/pypeclub/pype/issues/256) -- _(ftrack)_ Various small fixes to ftrack sync [#263](https://github.com/pypeclub/pype/issues/263) [#259](https://github.com/pypeclub/pype/issues/259) -- _(maya)_ Look extraction is now able to skp invalid connections in shaders [#207](https://github.com/pypeclub/pype/issues/207) - - - - - -## 2.9.0 ## - -_**release date:** 25 May 2020_ - -**new:** -- _(pype)_ Support for **Multiroot projects**. You can now store project data on multiple physical or virtual storages and target individual publishes to these locations. For instance render can be stored on a faster storage than the rest of the project. [#145](https://github.com/pypeclub/pype/issues/145), [#38](https://github.com/pypeclub/pype/issues/38) -- _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) -- _(pype)_ OSX support is in public beta now. There are issues to be expected, but the main implementation should be functional. [#141](https://github.com/pypeclub/pype/issues/141) - - -**improved:** - -- _(pype)_ **Review extractor** has been completely rebuilt. It now supports granular filtering so you can create **multiple outputs** for different tasks, families or hosts. [#103](https://github.com/pypeclub/pype/issues/103), [#166](https://github.com/pypeclub/pype/issues/166), [#165](https://github.com/pypeclub/pype/issues/165) -- _(pype)_ **Burnin** generation had been extended to **support same multi-output filtering** as review extractor [#103](https://github.com/pypeclub/pype/issues/103) -- _(pype)_ Publishing file templates can now be specified in config for each individual family [#114](https://github.com/pypeclub/pype/issues/114) -- _(pype)_ Studio specific plugins can now be appended to pype standard publishing plugins. [#112](https://github.com/pypeclub/pype/issues/112) -- _(nukestudio)_ Reviewable clips no longer need to be previously cut, exported and re-imported to timeline. **Pype can now dynamically cut reviewable quicktimes** from continuous offline footage during publishing. [#23](https://github.com/pypeclub/pype/issues/23) -- _(deadline)_ Deadline can now correctly differentiate between staging and production pype. [#154](https://github.com/pypeclub/pype/issues/154) -- _(deadline)_ `PYPE_PYTHON_EXE` env variable can now be used to direct publishing to explicit python installation. [#120](https://github.com/pypeclub/pype/issues/120) -- _(nuke)_ Nuke now check for new version of loaded data on file open. [#140](https://github.com/pypeclub/pype/issues/140) -- _(nuke)_ frame range and limit checkboxes are now exposed on write node. [#119](https://github.com/pypeclub/pype/issues/119) - - - -**fix:** - -- _(nukestudio)_ Project Location was using backslashes which was breaking nukestudio native exporting in certains configurations [#82](https://github.com/pypeclub/pype/issues/82) -- _(nukestudio)_ Duplicity in hierarchy tags was prone to throwing publishing error [#130](https://github.com/pypeclub/pype/issues/130), [#144](https://github.com/pypeclub/pype/issues/144) -- _(ftrack)_ multiple stability improvements [#157](https://github.com/pypeclub/pype/issues/157), [#159](https://github.com/pypeclub/pype/issues/159), [#128](https://github.com/pypeclub/pype/issues/128), [#118](https://github.com/pypeclub/pype/issues/118), [#127](https://github.com/pypeclub/pype/issues/127) -- _(deadline)_ multipart EXRs were stopping review publishing on the farm. They are still not supported for automatic review generation, but the publish will go through correctly without the quicktime. [#155](https://github.com/pypeclub/pype/issues/155) -- _(deadline)_ If deadline is non-responsive it will no longer freeze host when publishing [#149](https://github.com/pypeclub/pype/issues/149) -- _(deadline)_ Sometimes deadline was trying to launch render before all the source data was coppied over. [#137](https://github.com/pypeclub/pype/issues/137) _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) -- _(nuke)_ Filepath knob wasn't updated properly. [#131](https://github.com/pypeclub/pype/issues/131) -- _(maya)_ When extracting animation, the "Write Color Set" options on the instance were not respected. [#108](https://github.com/pypeclub/pype/issues/108) -- _(maya)_ Attribute overrides for AOV only worked for the legacy render layers. Now it works for new render setup as well [#132](https://github.com/pypeclub/pype/issues/132) -- _(maya)_ Stability and usability improvements in yeti workflow [#104](https://github.com/pypeclub/pype/issues/104) - - - - - -## 2.8.0 ## - -_**release date:** 20 April 2020_ - -**new:** - -- _(pype)_ Option to generate slates from json templates. [PYPE-628] [#26](https://github.com/pypeclub/pype/issues/26) -- _(pype)_ It is now possible to automate loading of published subsets into any scene. Documentation will follow :). [PYPE-611] [#24](https://github.com/pypeclub/pype/issues/24) - -**fix:** - -- _(maya)_ Some Redshift render tokens could break publishing. [PYPE-778] [#33](https://github.com/pypeclub/pype/issues/33) -- _(maya)_ Publish was not preserving maya file extension. [#39](https://github.com/pypeclub/pype/issues/39) -- _(maya)_ Rig output validator was failing on nodes without shapes. [#40](https://github.com/pypeclub/pype/issues/40) -- _(maya)_ Yeti caches can now be properly versioned up in the scene inventory. [#40](https://github.com/pypeclub/pype/issues/40) -- _(nuke)_ Build first workfiles was not accepting jpeg sequences. [#34](https://github.com/pypeclub/pype/issues/34) -- _(deadline)_ Trying to generate ffmpeg review from multipart EXRs no longer crashes publishing. [PYPE-781] -- _(deadline)_ Render publishing is more stable in multiplatform environments. [PYPE-775] - - - - - -## 2.7.0 ## - -_**release date:** 30 March 2020_ - -**new:** - -- _(maya)_ Artist can now choose to load multiple references of the same subset at once [PYPE-646, PYPS-81] -- _(nuke)_ Option to use named OCIO colorspaces for review colour baking. [PYPS-82] -- _(pype)_ Pype can now work with `master` versions for publishing and loading. These are non-versioned publishes that are overwritten with the latest version during publish. These are now supported in all the GUIs, but their publishing is deactivated by default. [PYPE-653] -- _(blender)_ Added support for basic blender workflow. We currently support `rig`, `model` and `animation` families. [PYPE-768] -- _(pype)_ Source timecode can now be used in burn-ins. [PYPE-777] -- _(pype)_ Review outputs profiles can now specify delivery resolution different than project setting [PYPE-759] -- _(nuke)_ Bookmark to current context is now added automatically to all nuke browser windows. [PYPE-712] - -**change:** - -- _(maya)_ It is now possible to publish camera without. baking. Keep in mind that unbaked cameras can't be guaranteed to work in other hosts. [PYPE-595] -- _(maya)_ All the renders from maya are now grouped in the loader by their Layer name. [PYPE-482] -- _(nuke/hiero)_ Any publishes from nuke and hiero can now be versioned independently of the workfile. [PYPE-728] - - -**fix:** - -- _(nuke)_ Mixed slashes caused issues in ocio config path. -- _(pype)_ Intent field in pyblish GUI was passing label instead of value to ftrack. [PYPE-733] -- _(nuke)_ Publishing of pre-renders was inconsistent. [PYPE-766] -- _(maya)_ Handles and frame ranges were inconsistent in various places during publishing. -- _(nuke)_ Nuke was crashing if it ran into certain missing knobs. For example DPX output missing `autocrop` [PYPE-774] -- _(deadline)_ Project overrides were not working properly with farm render publishing. -- _(hiero)_ Problems with single frame plates publishing. -- _(maya)_ Redshift RenderPass token were breaking render publishing. [PYPE-778] -- _(nuke)_ Build first workfile was not accepting jpeg sequences. -- _(maya)_ Multipart (Multilayer) EXRs were breaking review publishing due to FFMPEG incompatiblity [PYPE-781] - - - - -## 2.6.0 ## - -_**release date:** 9 March 2020_ - -**change:** -- _(maya)_ render publishing has been simplified and made more robust. Render setup layers are now automatically added to publishing subsets and `render globals` family has been replaced with simple `render` [PYPE-570] -- _(avalon)_ change context and workfiles apps, have been merged into one, that allows both actions to be performed at the same time. [PYPE-747] -- _(pype)_ thumbnails are now automatically propagate to asset from the last published subset in the loader -- _(ftrack)_ publishing comment and intent are now being published to ftrack note as well as describtion. [PYPE-727] -- _(pype)_ when overriding existing version new old representations are now overriden, instead of the new ones just being appended. (to allow this behaviour, the version validator need to be disabled. [PYPE-690]) -- _(pype)_ burnin preset has been significantly simplified. It now doesn't require passing function to each field, but only need the actual text template. to use this, all the current burnin PRESETS MUST BE UPDATED for all the projects. -- _(ftrack)_ credentials are now stored on a per server basis, so it's possible to switch between ftrack servers without having to log in and out. [PYPE-723] - - -**new:** -- _(pype)_ production and development deployments now have different colour of the tray icon. Orange for Dev and Green for production [PYPE-718] -- _(maya)_ renders can now be attached to a publishable subset rather than creating their own subset. For example it is possible to create a reviewable `look` or `model` render and have it correctly attached as a representation of the subsets [PYPE-451] -- _(maya)_ after saving current scene into a new context (as a new shot for instance), all the scene publishing subsets data gets re-generated automatically to match the new context [PYPE-532] -- _(pype)_ we now support project specific publish, load and create plugins [PYPE-740] -- _(ftrack)_ new action that allow archiving/deleting old published versions. User can keep how many of the latest version to keep when the action is ran. [PYPE-748, PYPE-715] -- _(ftrack)_ it is now possible to monitor and restart ftrack event server using ftrack action. [PYPE-658] -- _(pype)_ validator that prevent accidental overwrites of previously published versions. [PYPE-680] -- _(avalon)_ avalon core updated to version 5.6.0 -- _(maya)_ added validator to make sure that relative paths are used when publishing arnold standins. -- _(nukestudio)_ it is now possible to extract and publish audio family from clip in nuke studio [PYPE-682] - -**fix**: -- _(maya)_ maya set framerange button was ignoring handles [PYPE-719] -- _(ftrack)_ sync to avalon was sometime crashing when ran on empty project -- _(nukestudio)_ publishing same shots after they've been previously archived/deleted would result in a crash. [PYPE-737] -- _(nuke)_ slate workflow was breaking in certain scenarios. [PYPE-730] -- _(pype)_ rendering publish workflow has been significantly improved to prevent error resulting from implicit render collection. [PYPE-665, PYPE-746] -- _(pype)_ launching application on a non-synced project resulted in obscure [PYPE-528] -- _(pype)_ missing keys in burnins no longer result in an error. [PYPE-706] -- _(ftrack)_ create folder structure action was sometimes failing for project managers due to wrong permissions. -- _(Nukestudio)_ using `source` in the start frame tag could result in wrong frame range calculation -- _(ftrack)_ sync to avalon action and event have been improved by catching more edge cases and provessing them properly. - - - - -## 2.5.0 ## - -_**release date:** 11 Feb 2020_ - -**change:** -- _(pype)_ added many logs for easier debugging -- _(pype)_ review presets can now be separated between 2d and 3d renders [PYPE-693] -- _(pype)_ anatomy module has been greatly improved to allow for more dynamic pulblishing and faster debugging [PYPE-685] -- _(pype)_ avalon schemas have been moved from `pype-config` to `pype` repository, for simplification. [PYPE-670] -- _(ftrack)_ updated to latest ftrack API -- _(ftrack)_ publishing comments now appear in ftrack also as a note on version with customisable category [PYPE-645] -- _(ftrack)_ delete asset/subset action had been improved. It is now able to remove multiple entities and descendants of the selected entities [PYPE-361, PYPS-72] -- _(workfiles)_ added date field to workfiles app [PYPE-603] -- _(maya)_ old deprecated loader have been removed in favour of a single unified reference loader (old scenes will upgrade automatically to the new loader upon opening) [PYPE-633, PYPE-697] -- _(avalon)_ core updated to 5.5.15 [PYPE-671] -- _(nuke)_ library loader is now available in nuke [PYPE-698] - - -**new:** -- _(pype)_ added pype render wrapper to allow rendering on mixed platform farms. [PYPE-634] -- _(pype)_ added `pype launch` command. It let's admin run applications with dynamically built environment based on the given context. [PYPE-634] -- _(pype)_ added support for extracting review sequences with burnins [PYPE-657] -- _(publish)_ users can now set intent next to a comment when publishing. This will then be reflected on an attribute in ftrack. [PYPE-632] -- _(burnin)_ timecode can now be added to burnin -- _(burnin)_ datetime keys can now be added to burnin and anatomy [PYPE-651] -- _(burnin)_ anatomy templates can now be used in burnins. [PYPE=626] -- _(nuke)_ new validator for render resolution -- _(nuke)_ support for attach slate to nuke renders [PYPE-630] -- _(nuke)_ png sequences were added to loaders -- _(maya)_ added maya 2020 compatibility [PYPE-677] -- _(maya)_ ability to publish and load .ASS standin sequences [PYPS-54] -- _(pype)_ thumbnails can now be published and are visible in the loader. `AVALON_THUMBNAIL_ROOT` environment variable needs to be set for this to work [PYPE-573, PYPE-132] -- _(blender)_ base implementation of blender was added with publishing and loading of .blend files [PYPE-612] -- _(ftrack)_ new action for preparing deliveries [PYPE-639] - - -**fix**: -- _(burnin)_ more robust way of finding ffmpeg for burnins. -- _(pype)_ improved UNC paths remapping when sending to farm. -- _(pype)_ float frames sometimes made their way to representation context in database, breaking loaders [PYPE-668] -- _(pype)_ `pype install --force` was failing sometimes [PYPE-600] -- _(pype)_ padding in published files got calculated wrongly sometimes. It is now instead being always read from project anatomy. [PYPE-667] -- _(publish)_ comment publishing was failing in certain situations -- _(ftrack)_ multiple edge case scenario fixes in auto sync and sync-to-avalon action -- _(ftrack)_ sync to avalon now works on empty projects -- _(ftrack)_ thumbnail update event was failing when deleting entities [PYPE-561] -- _(nuke)_ loader applies proper colorspaces from Presets -- _(nuke)_ publishing handles didn't always work correctly [PYPE-686] -- _(maya)_ assembly publishing and loading wasn't working correctly - - - - - - -## 2.4.0 ## - -_**release date:** 9 Dec 2019_ - -**change:** -- _(ftrack)_ version to status ftrack event can now be configured from Presets - - based on preset `presets/ftracc/ftrack_config.json["status_version_to_task"]` -- _(ftrack)_ sync to avalon event has been completely re-written. It now supports most of the project management situations on ftrack including moving, renaming and deleting entities, updating attributes and working with tasks. -- _(ftrack)_ sync to avalon action has been also re-writen. It is now much faster (up to 100 times depending on a project structure), has much better logging and reporting on encountered problems, and is able to handle much more complex situations. -- _(ftrack)_ sync to avalon trigger by checking `auto-sync` toggle on ftrack [PYPE-504] -- _(pype)_ various new features in the REST api -- _(pype)_ new visual identity used across pype -- _(pype)_ started moving all requirements to pip installation rather than vendorising them in pype repository. Due to a few yet unreleased packages, this means that pype can temporarily be only installed in the offline mode. - -**new:** -- _(nuke)_ support for publishing gizmos and loading them as viewer processes -- _(nuke)_ support for publishing nuke nodes from backdrops and loading them back -- _(pype)_ burnins can now work with start and end frames as keys - - use keys `{frame_start}`, `{frame_end}` and `{current_frame}` in burnin preset to use them. [PYPS-44,PYPS-73, PYPE-602] -- _(pype)_ option to filter logs by user and level in loggin GUI -- _(pype)_ image family added to standalone publisher [PYPE-574] -- _(pype)_ matchmove family added to standalone publisher [PYPE-574] -- _(nuke)_ validator for comparing arbitrary knobs with values from presets -- _(maya)_ option to force maya to copy textures in the new look publish rather than hardlinking them -- _(pype)_ comments from pyblish GUI are now being added to ftrack version -- _(maya)_ validator for checking outdated containers in the scene -- _(maya)_ option to publish and load arnold standin sequence [PYPE-579, PYPS-54] - -**fix**: -- _(pype)_ burnins were not respecting codec of the input video -- _(nuke)_ lot's of various nuke and nuke studio fixes across the board [PYPS-45] -- _(pype)_ workfiles app is not launching with the start of the app by default [PYPE-569] -- _(ftrack)_ ftrack integration during publishing was failing under certain situations [PYPS-66] -- _(pype)_ minor fixes in REST api -- _(ftrack)_ status change event was crashing when the target status was missing [PYPS-68] -- _(ftrack)_ actions will try to reconnect if they fail for some reason -- _(maya)_ problems with fps mapping when using float FPS values -- _(deadline)_ overall improvements to deadline publishing -- _(setup)_ environment variables are now remapped on the fly based on the platform pype is running on. This fixes many issues in mixed platform environments. - - - - -## 2.3.6 # - -_**release date:** 27 Nov 2019_ - -**hotfix**: -- _(ftrack)_ was hiding important debug logo -- _(nuke)_ crashes during workfile publishing -- _(ftrack)_ event server crashes because of signal problems -- _(muster)_ problems with muster render submissions -- _(ftrack)_ thumbnail update event syntax errors - - - - -## 2.3.0 ## - -_release date: 6 Oct 2019_ - -**new**: -- _(maya)_ support for yeti rigs and yeti caches -- _(maya)_ validator for comparing arbitrary attributes against ftrack -- _(pype)_ burnins can now show current date and time -- _(muster)_ pools can now be set in render globals in maya -- _(pype)_ Rest API has been implemented in beta stage -- _(nuke)_ LUT loader has been added -- _(pype)_ rudimentary user module has been added as preparation for user management -- _(pype)_ a simple logging GUI has been added to pype tray -- _(nuke)_ nuke can now bake input process into mov -- _(maya)_ imported models now have selection handle displayed by defaulting -- _(avalon)_ it's is now possible to load multiple assets at once using loader -- _(maya)_ added ability to automatically connect yeti rig to a mesh upon loading - -**changed**: -- _(ftrack)_ event server now runs two parallel processes and is able to keep queue of events to process. -- _(nuke)_ task name is now added to all rendered subsets -- _(pype)_ adding more families to standalone publisher -- _(pype)_ standalone publisher now uses pyblish-lite -- _(pype)_ standalone publisher can now create review quicktimes -- _(ftrack)_ queries to ftrack were sped up -- _(ftrack)_ multiple ftrack action have been deprecated -- _(avalon)_ avalon upstream has been updated to 5.5.0 -- _(nukestudio)_ published transforms can now be animated -- - -**fix**: -- _(maya)_ fps popup button didn't work in some cases -- _(maya)_ geometry instances and references in maya were losing shader assignments -- _(muster)_ muster rendering templates were not working correctly -- _(maya)_ arnold tx texture conversion wasn't respecting colorspace set by the artist -- _(pype)_ problems with avalon db sync -- _(maya)_ ftrack was rounding FPS making it inconsistent -- _(pype)_ wrong icon names in Creator -- _(maya)_ scene inventory wasn't showing anything if representation was removed from database after it's been loaded to the scene -- _(nukestudio)_ multiple bugs squashed -- _(loader)_ loader was taking long time to show all the loading action when first launcher in maya - -## 2.2.0 ## -_**release date:** 8 Sept 2019_ - -**new**: -- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts -- _(nuke)_ option to choose deadline chunk size on write nodes -- _(nukestudio)_ added option to publish soft effects (subTrackItems) from NukeStudio as subsets including LUT files. these can then be loaded in nuke or NukeStudio -- _(nuke)_ option to build nuke script from previously published latest versions of plate and render subsets. -- _(nuke)_ nuke writes now have deadline tab. -- _(ftrack)_ Prepare Project action can now be used for creating the base folder structure on disk and in ftrack, setting up all the initial project attributes and it automatically prepares `pype_project_config` folder for the given project. -- _(clockify)_ Added support for time tracking in clockify. This currently in addition to ftrack time logs, but does not completely replace them. -- _(pype)_ any attributes in Creator and Loader plugins can now be customised using pype preset system - -**changed**: -- nukestudio now uses workio API for workfiles -- _(maya)_ "FIX FPS" prompt in maya now appears in the middle of the screen -- _(muster)_ can now be configured with custom templates -- _(pype)_ global publishing plugins can now be configured using presets as well as host specific ones - - -**fix**: -- wrong version retrieval from path in certain scenarios -- nuke reset resolution wasn't working in certain scenarios - -## 2.1.0 ## -_release date: 6 Aug 2019_ - -A large cleanup release. Most of the change are under the hood. - -**new**: -- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts -- _(pype)_ Added configurable option to add burnins to any generated quicktimes -- _(ftrack)_ Action that identifies what machines pype is running on. -- _(system)_ unify subprocess calls -- _(maya)_ add audio to review quicktimes -- _(nuke)_ add crop before write node to prevent overscan problems in ffmpeg -- **Nuke Studio** publishing and workfiles support -- **Muster** render manager support -- _(nuke)_ Framerange, FPS and Resolution are set automatically at startup -- _(maya)_ Ability to load published sequences as image planes -- _(system)_ Ftrack event that sets asset folder permissions based on task assignees in ftrack. -- _(maya)_ Pyblish plugin that allow validation of maya attributes -- _(system)_ added better startup logging to tray debug, including basic connection information -- _(avalon)_ option to group published subsets to groups in the loader -- _(avalon)_ loader family filters are working now - -**changed**: -- change multiple key attributes to unify their behaviour across the pipeline - - `frameRate` to `fps` - - `startFrame` to `frameStart` - - `endFrame` to `frameEnd` - - `fstart` to `frameStart` - - `fend` to `frameEnd` - - `handle_start` to `handleStart` - - `handle_end` to `handleEnd` - - `resolution_width` to `resolutionWidth` - - `resolution_height` to `resolutionHeight` - - `pixel_aspect` to `pixelAspect` - -- _(nuke)_ write nodes are now created inside group with only some attributes editable by the artist -- rendered frames are now deleted from temporary location after their publishing is finished. -- _(ftrack)_ RV action can now be launched from any entity -- after publishing only refresh button is now available in pyblish UI -- added context instance pyblish-lite so that artist knows if context plugin fails -- _(avalon)_ allow opening selected files using enter key -- _(avalon)_ core updated to v5.2.9 with our forked changes on top - -**fix**: -- faster hierarchy retrieval from db -- _(nuke)_ A lot of stability enhancements -- _(nuke studio)_ A lot of stability enhancements -- _(nuke)_ now only renders a single write node on farm -- _(ftrack)_ pype would crash when launcher project level task -- work directory was sometimes not being created correctly -- major pype.lib cleanup. Removing of unused functions, merging those that were doing the same and general house cleaning. -- _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index 8ee3b7e85f..f11a2c3047 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -66,7 +66,7 @@ Another optional function is **get_current_context**. This function is handy in Main responsibility of create plugin is to create, update, collect and remove instance metadata and propagate changes to create context. Has access to **CreateContext** (`self.create_context`) that discovered the plugin so has also access to other creators and instances. Create plugins have a lot of responsibility so it is recommended to implement common code per host. #### *BaseCreator* -Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **AutoCreator** and **Creator** variants. +Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **HiddenCreator**, **AutoCreator** and **Creator** variants. **Abstractions** - **`family`** (class attr) - Tells what kind of instance will be created. @@ -92,7 +92,7 @@ def collect_instances(self): self._add_instance_to_context(instance) ``` -- **`create`** (method) - Create a new object of **CreatedInstance** store its metadata to the workfile and add the instance into the created context. Failed Creating should raise **CreatorError** if an error happens that artists can fix or give them some useful information. Triggers and implementation differs for **Creator** and **AutoCreator**. +- **`create`** (method) - Create a new object of **CreatedInstance** store its metadata to the workfile and add the instance into the created context. Failed Creating should raise **CreatorError** if an error happens that artists can fix or give them some useful information. Triggers and implementation differs for **Creator**, **HiddenCreator** and **AutoCreator**. - **`update_instances`** (method) - Update data of instances. Receives tuple with **instance** and **changes**. ```python @@ -172,11 +172,11 @@ class RenderLayerCreator(Creator): icon = "fa5.building" ``` -- **`get_instance_attr_defs`** (method) - Attribute definitions of instance. Creator can define attribute values with default values for each instance. These attributes may affect how instances will be instance processed during publishing. Attribute defiitions can be used from `openpype.pipeline.lib.attribute_definitions` (NOTE: Will be moved to `openpype.lib.attribute_definitions` soon). Attribute definitions define basic types of values for different cases e.g. boolean, number, string, enumerator, etc. Default implementation returns **instance_attr_defs**. +- **`get_instance_attr_defs`** (method) - Attribute definitions of instance. Creator can define attribute values with default values for each instance. These attributes may affect how instances will be instance processed during publishing. Attribute defiitions can be used from `openpype.lib.attribute_definitions`. Attribute definitions define basic types of values for different cases e.g. boolean, number, string, enumerator, etc. Default implementation returns **instance_attr_defs**. - **`instance_attr_defs`** (attr) - Attribute for default implementation of **get_instance_attr_defs**. ```python -from openpype.pipeline import attribute_definitions +from openpype.lib import attribute_definitions class RenderLayerCreator(Creator): @@ -199,6 +199,20 @@ class RenderLayerCreator(Creator): - **`get_dynamic_data`** (method) - Can be used to extend data for subset templates which may be required in some cases. +#### *HiddenCreator* +Creator which is not showed in UI so artist can't trigger it directly but is available for other creators. This creator is primarily meant for cases when creation should create different types of instances. For example during editorial publishing where input is single edl file but should create 2 or more kind of instances each with different family, attributes and abilities. Arguments for creation were limited to `instance_data` and `source_data`. Data of `instance_data` should follow what is sent to other creators and `source_data` can be used to send custom data defined by main creator. It is expected that `HiddenCreator` has specific main or "parent" creator. + +```python +def create(self, instance_data, source_data): + variant = instance_data["variant"] + task_name = instance_data["task"] + asset_name = instance_data["asset"] + asset_doc = get_asset_by_name(self.project_name, asset_name) + self.get_subset_name( + variant, task_name, asset_doc, self.project_name, self.host_name) +``` + + #### *AutoCreator* Creator that is triggered on reset of create context. Can be used for families that are expected to be created automatically without artist interaction (e.g. **workfile**). Method `create` is triggered after collecting all creators. @@ -234,14 +248,14 @@ def create(self): # - variant can be filled from settings variant = self._variant_name # Only place where we can look for current context - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = self.project_name + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] # Create new instance if does not exist yet if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -264,7 +278,7 @@ def create(self): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -297,7 +311,8 @@ class BulkRenderCreator(Creator): - **`pre_create_attr_defs`** (attr) - Attribute for default implementation of **get_pre_create_attr_defs**. ```python -from openpype.pipeline import Creator, attribute_definitions +from openpype.lib import attribute_definitions +from openpype.pipeline.create import Creator class CreateRender(Creator): @@ -470,10 +485,8 @@ Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_ ```python import pyblish.api -from openpype.pipeline import ( - OpenPypePyblishPluginMixin, - attribute_definitions, -) +from openpype.lib import attribute_definitions +from openpype.pipeline import OpenPypePyblishPluginMixin # Example context plugin diff --git a/website/docs/dev_settings.md b/website/docs/dev_settings.md new file mode 100644 index 0000000000..94590345e8 --- /dev/null +++ b/website/docs/dev_settings.md @@ -0,0 +1,899 @@ +--- +id: dev_settings +title: Settings +sidebar_label: Settings +--- + +Settings give the ability to change how OpenPype behaves in certain situations. Settings are split into 3 categories **system settings**, **project anatomy** and **project settings**. Project anatomy and project settings are grouped into a single category but there is a technical difference (explained later). Only difference in system and project settings is that system settings can't be technically handled on a project level or their values must be available no matter in which project the values are received. Settings have headless entities or settings UI. + +There is one more category **local settings** but they don't have ability to be changed or defined easily. Local settings can change how settings work per machine, can affect both system and project settings but they're hardcoded for predefined values at this moment. + +## Settings schemas +System and project settings are defined by settings schemas. Schema defines the structure of output value, what value types output will contain, how settings are stored and how its UI input will look. + +## Settings values +Output of settings is a json serializable value. There are 3 possible types of value **default values**, **studio overrides** and **project overrides**. Default values must be always available for all settings schemas, their values are stored to code. Default values are what everyone who just installed OpenPype will use as default values. It is good practice to set example values but they should be actually relevant. + +Setting overrides is what makes settings a powerful tool. Overrides contain only a part of settings with additional metadata that describe which parts of settings values should be replaced from overrides values. Using overrides gives the ability to save only specific values and use default values for rest. It is super useful in project settings which have up to 2 levels of overrides. In project settings are used **default values** as base on which are applied **studio overrides** and then **project overrides**. In practice it is possible to save only studio overrides which affect all projects. Changes in studio overrides are then propagated to all projects without project overrides. But values can be locked on project level so studio overrides are not used. + +## Settings storage +As was mentioned default values are stored into repository files. Overrides are stored in the Mongo database. The value in mongo contain only overrides with metadata so their content on it's own is useless and must be used with combination of default values. System settings and project settings are stored into special collection. Single document represents one set of overrides with OpenPype version for which is stored. Settings are versioned and are loaded in specific order - current OpenPype version overrides or first lower available. If there are any overrides with the same or lower version then the first higher version is used. If there are any overrides then no overrides are applied. + +Project anatomy is stored into a project document thus is not versioned and its values are always overridden. Any changes in anatomy schema may have a drastic effect on production and OpenPype updates. + +## Settings schema items +As was mentioned schema items define output type of values, how they are stored and how they look in UI. +- schemas are (by default) defined by json files +- OpenPype core system settings schemas are stored in `~/openpype/settings/entities/schemas/system_schema/` and project settings in `~/openpype/settings/entities/schemas/projects_schema/` + - both contain `schema_main.json` which are entry points +- OpenPype modules/addons can define their settings schemas using `BaseModuleSettingsDef` in that case some functionality may be slightly modified +- single schema item is represented by dictionary (object) in json which has `"type"` key. + - **type** is only common key which is required for all schema items +- each item may have "input modifiers" (other keys in dictionary) and they may be required or optional based on the type +- there are special keys across all items + - `"is_file"` - this key is used when defaults values are stored in the file. Its value matches the filename where values are stored + - key is validated, must be unique in hierarchy otherwise it won't be possible to store default values + - make sense to fill it only if it's value if `true` + + - `"is_group"` - define that all values under a key in settings hierarchy will be overridden if any value is modified + - this key is not allowed for all inputs as they may not have technical ability to handle it + - key is validated, must be unique in hierarchy and is automatically filled on last possible item if is not defined in schemas + - make sense to fill it only if it's value if `true` +- all entities can have set `"tooltip"` key with description which will be shown in UI on hover + +### Inner schema +Settings schemas are big json files which would become unmanageable if they were in a single file. To be able to split them into multiple files to help organize them special types `schema` and `template` were added. Both types are related to a different file by filename. If a json file contains a dictionary it is considered as `schema` if it contains a list it is considered as a `template`. + +#### schema +Schema item is replaced by content of entered schema name. It is recommended that the schema file is used only once in settings hierarchy. Templates are meant for reusing. +- schema must have `"name"` key which is name of schema that should be used + +```javascript +{ + "type": "schema", + "name": "my_schema_name" +} +``` + +#### template +Templates are almost the same as schema items but can contain one or more items which can be formatted with additional data or some keys can be skipped if needed. Templates are meant for reusing the same schemas with ability to modify content. + +- legacy name is `schema_template` (still usable) +- template must have `"name"` key which is name of template file that should be used +- to fill formatting keys use `"template_data"` +- all items in template, except `__default_values__`, will replace `template` item in original schema +- template may contain other templates + +```javascript +// Example template json file content +[ + { + // Define default values for formatting values + // - gives ability to set the value but have default value + "__default_values__": { + "multipath_executables": true + } + }, { + "type": "raw-json", + "label": "{host_label} Environments", + "key": "{host_name}_environments" + }, { + "type": "path", + "key": "{host_name}_executables", + "label": "{host_label} - Full paths to executables", + "multiplatform": "{multipath_executables}", + "multipath": true + } +] +``` +```javascript +// Example usage of the template in schema +{ + "type": "dict", + "key": "template_examples", + "label": "Schema template examples", + "children": [ + { + "type": "template", + "name": "example_template", + "template_data": [ + { + "host_label": "Maya 2019", + "host_name": "maya_2019", + "multipath_executables": false + }, + { + "host_label": "Maya 2020", + "host_name": "maya_2020" + }, + { + "host_label": "Maya 2021", + "host_name": "maya_2021" + } + ] + } + ] +} +``` +```javascript +// The same schema defined without templates +{ + "type": "dict", + "key": "template_examples", + "label": "Schema template examples", + "children": [ + { + "type": "raw-json", + "label": "Maya 2019 Environments", + "key": "maya_2019_environments" + }, { + "type": "path", + "key": "maya_2019_executables", + "label": "Maya 2019 - Full paths to executables", + "multiplatform": false, + "multipath": true + }, { + "type": "raw-json", + "label": "Maya 2020 Environments", + "key": "maya_2020_environments" + }, { + "type": "path", + "key": "maya_2020_executables", + "label": "Maya 2020 - Full paths to executables", + "multiplatform": true, + "multipath": true + }, { + "type": "raw-json", + "label": "Maya 2021 Environments", + "key": "maya_2021_environments" + }, { + "type": "path", + "key": "maya_2021_executables", + "label": "Maya 2021 - Full paths to executables", + "multiplatform": true, + "multipath": true + } + ] +} +``` + +Template data can be used only to fill templates in values but not in keys. It is also possible to define default values for unfilled fields to do so one of the items in the list must be a dictionary with key "__default_values__"` and value as dictionary with default key: values (as in example above). +```javascript +{ + ... + // Allowed + "key": "{to_fill}" + ... + // Not allowed + "{to_fill}": "value" + ... +} +``` + +Because formatting values can be only string it is possible to use formatting values which are replaced with different types. +```javascript +// Template data +{ + "template_data": { + "executable_multiplatform": { + "type": "schema", + "name": "my_multiplatform_schema" + } + } +} +// Template content +{ + ... + // Allowed - value is replaced with dictionary + "multiplatform": "{executable_multiplatform}" + ... + // Not allowed - there is no way how it could be replaced + "multiplatform": "{executable_multiplatform}_enhanced_string" + ... +} +``` + +#### dynamic_schema +Dynamic schema item marks a place in settings schema where schemas defined by `BaseModuleSettingsDef` can be placed. +- example: +```javascript +{ + "type": "dynamic_schema", + "name": "project_settings/global" +} +``` +- `BaseModuleSettingsDef` with implemented `get_settings_schemas` can return a dictionary where key defines a dynamic schema name and value schemas that will be put there +- dynamic schemas work almost the same way as templates + - one item can be replaced by multiple items (or by 0 items) +- goal is to dynamically load settings of OpenPype modules without having their schemas or default values in core repository + - values of these schemas are saved using the `BaseModuleSettingsDef` methods +- we recommend to use `JsonFilesSettingsDef` which has full implementation of storing default values to json files + - requires only to implement method `get_settings_root_path` which should return path to root directory where settings schema can be found and default values will be saved + +### Basic Dictionary inputs +These inputs wraps another inputs into {key: value} relation + +#### dict +- this is dictionary type wrapping more inputs with keys defined in schema +- may be used as dynamic children (e.g. in [list](#list) or [dict-modifiable](#dict-modifiable)) + - in that case the only key modifier is `children` which is a list of its keys + - USAGE: e.g. List of dictionaries where each dictionary has the same structure. +- if is not used as dynamic children then must have defined `"key"` under which are it's values stored +- may be with or without `"label"` (only for GUI) + - `"label"` must be set to be able to mark item as group with `"is_group"` key set to True +- item with label can visually wrap its children + - this option is enabled by default to turn off set `"use_label_wrap"` to `False` + - label wrap is by default collapsible + - that can be set with key `"collapsible"` to `True`/`False` + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add lighter background with `"highlight_content"` (Default: `False`) + - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color + - output is dictionary `{the "key": children values}` +```javascript +// Example +{ + "key": "applications", + "type": "dict", + "label": "Applications", + "collapsible": true, + "highlight_content": true, + "is_group": true, + "is_file": true, + "children": [ + ...ITEMS... + ] +} + +// Without label +{ + "type": "dict", + "key": "global", + "children": [ + ...ITEMS... + ] +} + +// When used as widget +{ + "type": "list", + "key": "profiles", + "label": "Profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, { + "key": "hosts", + "label": "Hosts", + "type": "list", + "object_type": "text" + } + ... + ] + } +} +``` + +#### dict-roots +- entity can be used only in Project settings +- keys of dictionary are based on current project roots +- they are not updated "live" it is required to save root changes and then + modify values on this entity + # TODO do live updates +```javascript +{ + "type": "dict-roots", + "key": "roots", + "label": "Roots", + "object_type": { + "type": "path", + "multiplatform": true, + "multipath": false + } +} +``` + +#### dict-conditional +- is similar to `dict` but has always available one enum entity + - the enum entity has single selection and it's value define other children entities +- each value of enumerator have defined children that will be used + - there is no way how to have shared entities across multiple enum items +- value from enumerator is also stored next to other values + - to define the key under which will be enum value stored use `enum_key` + - `enum_key` must match key regex and any enum item can't have children with same key + - `enum_label` is label of the entity for UI purposes +- enum items are define with `enum_children` + - it's a list where each item represents single item for the enum + - all items in `enum_children` must have at least `key` key which represents value stored under `enum_key` + - enum items can define `label` for UI purposes + - most important part is that item can define `children` key where are definitions of it's children (`children` value works the same way as in `dict`) +- to set default value for `enum_key` set it with `enum_default` +- entity must have defined `"label"` if is not used as widget +- is set as group if any parent is not group (can't have children as group) +- may be with or without `"label"` (only for GUI) + - `"label"` must be set to be able to mark item as group with `"is_group"` key set to True +- item with label can visually wrap its children + - this option is enabled by default to turn off set `"use_label_wrap"` to `False` + - label wrap is by default collapsible + - that can be set with key `"collapsible"` to `True`/`False` + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add lighter background with `"highlight_content"` (Default: `False`) + - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color +- for UI purposes was added `enum_is_horizontal` which will make combobox appear next to children inputs instead of on top of them (Default: `False`) + - this has extended ability of `enum_on_right` which will move combobox to right side next to children widgets (Default: `False`) +- output is dictionary `{the "key": children values}` +- using this type as template item for list type can be used to create infinite hierarchies + +```javascript +// Example +{ + "type": "dict-conditional", + "key": "my_key", + "label": "My Key", + "enum_key": "type", + "enum_label": "label", + "enum_children": [ + // Each item must be a dictionary with 'key' + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "command", + "label": "Comand" + } + ] + }, + { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": "text" + } + ] + }, + { + // Separator does not have children as "separator" value is enough + "key": "separator", + "label": "Separator" + } + ] +} +``` + +How output of the schema could look like on save: +```javascript +{ + "type": "separator" +} + +{ + "type": "action", + "key": "action_1", + "label": "Action 1", + "command": "run command -arg" +} + +{ + "type": "menu", + "children": [ + "child_1", + "child_2" + ] +} +``` + +### Inputs for setting any kind of value (`Pure` inputs) +- all inputs must have defined `"key"` if are not used as dynamic item + - they can also have defined `"label"` + +#### boolean +- simple checkbox, nothing more to set +```javascript +{ + "type": "boolean", + "key": "my_boolean_key", + "label": "Do you want to use Pype?" +} +``` + +#### number +- number input, can be used for both integer and float + - key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`) + - key `"minimum"` as minimum allowed number to enter (Default: `-99999`) + - key `"maxium"` as maximum allowed number to enter (Default: `99999`) +- key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll) +- for UI it is possible to show slider to enable this option set `show_slider` to `true` +```javascript +{ + "type": "number", + "key": "fps", + "label": "Frame rate (FPS)" + "decimal": 2, + "minimum": 1, + "maximum": 300000 +} +``` + +```javascript +{ + "type": "number", + "key": "ratio", + "label": "Ratio" + "decimal": 3, + "minimum": 0, + "maximum": 1, + "show_slider": true +} +``` + +#### text +- simple text input + - key `"multiline"` allows to enter multiple lines of text (Default: `False`) + - key `"placeholder"` allows to show text inside input when is empty (Default: `None`) + +```javascript +{ + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" +} +``` + +#### path-input +- Do not use this input in schema please (use `path` instead) +- this input is implemented to add additional features to text input +- this is meant to be used in proxy input `path` + +#### raw-json +- a little bit enhanced text input for raw json +- can store dictionary (`{}`) or list (`[]`) but not both + - by default stores dictionary to change it to list set `is_list` to `True` +- has validations of json format +- output can be stored as string + - this is to allow any keys in dictionary + - set key `store_as_string` to `true` + - code using that setting must expected that value is string and use json module to convert it to python types + +```javascript +{ + "type": "raw-json", + "key": "profiles", + "label": "Extract Review profiles", + "is_list": true +} +``` + +#### enum +- enumeration of values that are predefined in schema +- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) +- values are defined under value of key `"enum_items"` as list + - each item in list is simple dictionary where value is label and key is value which will be stored + - should be possible to enter single dictionary if order of items doesn't matter +- it is possible to set default selected value/s with `default` attribute + - it is recommended to use this option only in single selection mode + - at the end this option is used only when defying default settings value or in dynamic items + +```javascript +{ + "key": "tags", + "label": "Tags", + "type": "enum", + "multiselection": true, + "enum_items": [ + {"burnin": "Add burnins"}, + {"ftrackreview": "Add to Ftrack"}, + {"delete": "Delete output"}, + {"slate-frame": "Add slate frame"}, + {"no-handles": "Skip handle frames"} + ] +} +``` + +#### anatomy-templates-enum +- enumeration of all available anatomy template keys +- have only single selection mode +- it is possible to define default value `default` + - `"work"` is used if default value is not specified +- enum values are not updated on the fly it is required to save templates and + reset settings to recache values +```javascript +{ + "key": "host", + "label": "Host name", + "type": "anatomy-templates-enum", + "default": "publish" +} +``` + +#### hosts-enum +- enumeration of available hosts +- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) +- it is possible to add empty value (represented with empty string) with setting `"use_empty_value"` to `True` (Default: `False`) +- it is possible to set `"custom_labels"` for host names where key `""` is empty value (Default: `{}`) +- to filter host names it is required to define `"hosts_filter"` which is list of host names that will be available + - do not pass empty string if `use_empty_value` is enabled + - ignoring host names would be more dangerous in some cases +```javascript +{ + "key": "host", + "label": "Host name", + "type": "hosts-enum", + "multiselection": false, + "use_empty_value": true, + "custom_labels": { + "": "N/A", + "nuke": "Nuke" + }, + "hosts_filter": [ + "nuke" + ] +} +``` + +#### apps-enum +- enumeration of available application and their variants from system settings + - applications without host name are excluded +- can be used only in project settings +- has only `multiselection` +- used only in project anatomy +```javascript +{ + "type": "apps-enum", + "key": "applications", + "label": "Applications" +} +``` + +#### tools-enum +- enumeration of available tools and their variants from system settings +- can be used only in project settings +- has only `multiselection` +- used only in project anatomy +```javascript +{ + "type": "tools-enum", + "key": "tools_env", + "label": "Tools" +} +``` + +#### task-types-enum +- enumeration of task types from current project +- enum values are not updated on the fly and modifications of task types on project require save and reset to be propagated to this enum +- has set `multiselection` to `True` but can be changed to `False` in schema + +#### deadline_url-enum +- deadline module specific enumerator using deadline system settings to fill it's values +- TODO: move this type to deadline module + +### Inputs for setting value using Pure inputs +- these inputs also have required `"key"` +- attribute `"label"` is required in few conditions + - when item is marked `as_group` or when `use_label_wrap` +- they use Pure inputs "as widgets" + +#### list +- output is list +- items can be added and removed +- items in list must be the same type +- to wrap item in collapsible widget with label on top set `use_label_wrap` to `True` + - when this is used `collapsible` and `collapsed` can be set (same as `dict` item does) +- type of items is defined with key `"object_type"` +- there are 2 possible ways how to set the type: + 1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below) + 2.) item type name as string without modifiers (e.g. [text](#text)) + 3.) enhancement of 1.) there is also support of `template` type but be carefull about endless loop of templates + - goal of using `template` is to easily change same item definitions in multiple lists + +1.) with item modifiers +```javascript +{ + "type": "list", + "key": "exclude_ports", + "label": "Exclude ports", + "object_type": { + "type": "number", # number item type + "minimum": 1, # minimum modifier + "maximum": 65535 # maximum modifier + } +} +``` + +2.) without modifiers +```javascript +{ + "type": "list", + "key": "exclude_ports", + "label": "Exclude ports", + "object_type": "text" +} +``` + +3.) with template definition +```javascript +// Schema of list item where template is used +{ + "type": "list", + "key": "menu_items", + "label": "Menu Items", + "object_type": { + "type": "template", + "name": "template_object_example" + } +} + +// WARNING: +// In this example the template use itself inside which will work in `list` +// but may cause an issue in other entity types (e.g. `dict`). + +'template_object_example.json' : +[ + { + "type": "dict-conditional", + "use_label_wrap": true, + "collapsible": true, + "key": "menu_items", + "label": "Menu items", + "enum_key": "type", + "enum_label": "Type", + "enum_children": [ + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + } + ] + }, { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": { + "type": "template", + "name": "template_object_example" + } + } + ] + } + ] + } +] +``` + +#### dict-modifiable +- one of dictionary inputs, this is only used as value input +- items in this input can be removed and added same way as in `list` input +- value items in dictionary must be the same type +- required keys may be defined under `"required_keys"` + - required keys must be defined as a list (e.g. `["key_1"]`) and are moved to the top + - these keys can't be removed or edited (it is possible to edit label if item is collapsible) +- type of items is defined with key `"object_type"` + - there are 2 possible ways how to set the object type (Examples below): + 1. just a type name as string without modifiers (e.g. `"text"`) + 2. full types with modifiers as dictionary(`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` +- this input can be collapsible + - `"use_label_wrap"` must be set to `True` (Default behavior) + - that can be set with key `"collapsible"` as `True`/`False` (Default: `True`) + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + +1. **Object type** without modifiers +```javascript +{ + "type": "dict-modifiable", + "object_type": "text", + "is_group": true, + "key": "templates_mapping", + "label": "Muster - Templates mapping", + "is_file": true +} +``` + +2. **Object type** with item modifiers +```javascript +{ + "type": "dict-modifiable", + "object_type": { + "type": "number", + "minimum": 0, + "maximum": 300 + }, + "is_group": true, + "key": "templates_mapping", + "label": "Muster - Templates mapping", + "is_file": true +} +``` + +#### path +- input for paths, use `path-input` internally +- has 2 input modifiers `"multiplatform"` and `"multipath"` + - `"multiplatform"` - adds `"windows"`, `"linux"` and `"darwin"` path inputs (result is dictionary) + - `"multipath"` - it is possible to enter multiple paths + - if both are enabled result is dictionary with lists + +```javascript +{ + "type": "path", + "key": "ffmpeg_path", + "label": "FFmpeg path", + "multiplatform": true, + "multipath": true +} +``` + +#### list-strict +- input for strict number of items in list +- each child item can be different type with different possible modifiers +- it is possible to display them in horizontal or vertical layout + - key `"horizontal"` as `True`/`False` (Default: `True`) +- each child may have defined `"label"` which is shown next to input + - label does not reflect modifications or overrides (TODO) +- children item are defined under key `"object_types"` which is list of dictionaries + - key `"children"` is not used because is used for hierarchy validations in schema +- USAGE: For colors, transformations, etc. Custom number and different modifiers + give ability to define if color is HUE or RGB, 0-255, 0-1, 0-100 etc. + +```javascript +{ + "type": "list-strict", + "key": "color", + "label": "Color", + "object_types": [ + { + "label": "Red", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Green", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Blue", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Alpha", + "type": "number", + "minimum": 0, + "maximum": 1, + "decimal": 6 + } + ] +} +``` + +#### color +- pre implemented entity to store and load color values +- entity store and expect list of 4 integers in range 0-255 + - integers represents rgba [Red, Green, Blue, Alpha] +- has modifier `"use_alpha"` which can be `True`/`False` + - alpha is always `255` if set to `True` and alpha slider is not visible in UI + +```javascript +{ + "type": "color", + "key": "bg_color", + "label": "Background Color" +} +``` + +### Anatomy +Anatomy represents data stored on project document. Item cares about **Project Anatomy**. + +#### anatomy +- entity is just enhanced [dict](#dict) item +- anatomy has always all keys overridden with overrides + +### Noninteractive items +Items used only for UI purposes. + +#### label +- add label with note or explanations +- it is possible to use html tags inside the label +- set `work_wrap` to `true`/`false` if you want to enable word wrapping in UI (default: `false`) + +```javascript +{ + "type": "label", + "label": "RED LABEL: Normal label" +} +``` + +#### separator +- legacy name is `splitter` (still usable) +- visual separator of items (more divider than separator) + +```javascript +{ + "type": "separator" +} +``` + +### Proxy wrappers +- should wrap multiple inputs only visually +- these do not have `"key"` key and do not allow to have `"is_file"` or `"is_group"` modifiers enabled +- can't be used as a widget (first item in e.g. `list`, `dict-modifiable`, etc.) + +#### form +- wraps inputs into form look layout +- should be used only for Pure inputs + +```javascript +{ + "type": "dict-form", + "children": [ + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, { + ... + } + ] +} +``` + + +#### collapsible-wrap +- wraps inputs into collapsible widget + - looks like `dict` but does not hold `"key"` +- should be used only for Pure inputs + +```javascript +{ + "type": "collapsible-wrap", + "label": "Collapsible example" + "children": [ + { + "type": "text", + "key": "_example_input_collapsible", + "label": "Example input in collapsible wrapper" + }, { + ... + } + ] +} +``` + + +## How to add new settings +Always start with modifying or adding a new schema and don't worry about values. When you think schema is ready to use launch OpenPype settings in development mode using `poetry run python ./start.py settings --dev` or prepared script in `~/openpype/tools/run_settings(.sh|.ps1)`. Settings opened in development mode have the checkbox `Modify defaults` available in the bottom left corner. When checked default values are modified and saved on `Save`. This is a recommended approach on how default settings should be created instead of direct modification of files. + +![Modify default settings](assets/settings_dev.png) diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md index 667782754f..6d5529b512 100644 --- a/website/docs/module_ftrack.md +++ b/website/docs/module_ftrack.md @@ -13,7 +13,7 @@ Ftrack is currently the main project management option for OpenPype. This docume ## Prepare Ftrack for OpenPype ### Server URL -If you want to connect Ftrack to OpenPype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch OpenPype settings, enable [Ftrack module](admin_settings_system.md#Ftrack), and enter the address to your Ftrack server. +If you want to connect Ftrack to OpenPype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch OpenPype settings, enable [Ftrack module](admin_settings_system.md#Ftrack), and enter the address to your Ftrack server. ### Login Once your server is configured, restart OpenPype and you should be prompted to enter your [Ftrack credentials](artist_ftrack.md#How-to-use-Ftrack-in-OpenPype) to be able to run our Ftrack actions. If you are already logged in to Ftrack in your browser, it is enough to press `Ftrack login` and it will connect automatically. @@ -26,7 +26,7 @@ You can only use our Ftrack Actions and publish to Ftrack if each artist is logg ### Custom Attributes After successfully connecting OpenPype with you Ftrack, you can right click on any project in Ftrack and you should see a bunch of actions available. The most important one is called `OpenPype Admin` and contains multiple options inside. -To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Create/Update Custom Attributes](manager_ftrack_actions.md#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary for OpenPype to function. +To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Create/Update Custom Attributes](manager_ftrack_actions.md#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary for OpenPype to function. @@ -34,7 +34,7 @@ To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Ftrack Event Server is the key to automation of many tasks like _status change_, _thumbnail update_, _automatic synchronization to Avalon database_ and many more. Event server should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with enough certainty. ### Running event server -There are specific launch arguments for event server. With `openpype_console eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. +There are specific launch arguments for event server. With `openpype_console module ftrack eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. @@ -53,7 +53,7 @@ There are specific launch arguments for event server. With `openpype_console eve - **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key - `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_ -So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe eventserver`. +So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe module ftrack eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe module ftrack eventserver`. @@ -72,7 +72,7 @@ We do not recommend setting your Ftrack user and api key environments in a persi ### Where to run event server -We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. +We recommend you to run event server on stable server machine with ability to connect to OpenPype database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. :::important Event server should **not** run more than once! It may cause major issues. @@ -99,11 +99,10 @@ Event server should **not** run more than once! It may cause major issues. - add content to the file: ```sh #!/usr/bin/env bash -export OPENPYPE_DEBUG=1 export OPENPYPE_MONGO= pushd /mnt/path/to/openpype -./openpype_console eventserver --ftrack-user --ftrack-api-key +./openpype_console module ftrack eventserver --ftrack-user --ftrack-api-key --debug ``` - change file permission: `sudo chmod 0755 /opt/openpype/run_event_server.sh` @@ -140,14 +139,13 @@ WantedBy=multi-user.target - create service file: `openpype-ftrack-eventserver.bat` -- add content to the service file: +- add content to the service file: ```sh @echo off -set OPENPYPE_DEBUG=1 set OPENPYPE_MONGO= pushd \\path\to\openpype -openpype_console.exe eventserver --ftrack-user --ftrack-api-key +openpype_console.exe module ftrack eventserver --ftrack-user --ftrack-api-key --debug ``` - download and install `nssm.cc` - create Windows service according to nssm.cc manual @@ -174,7 +172,7 @@ This event updates entities on their changes Ftrack. When new entity is created Deleting an entity by Ftrack's default is not processed for security reasons _(to delete entity use [Delete Asset/Subset action](manager_ftrack_actions.md#delete-asset-subset))_. ::: -### Synchronize Hierarchical and Entity Attributes +### Synchronize Hierarchical and Entity Attributes Auto-synchronization of hierarchical attributes from Ftrack entities. @@ -190,7 +188,7 @@ Change status of next task from `Not started` to `Ready` when previous task is a Multiple detailed rules for next task update can be configured in the settings. -### Delete Avalon ID from new entity +### Delete Avalon ID from new entity Is used to remove value from `Avalon/Mongo Id` Custom Attribute when entity is created. @@ -215,7 +213,7 @@ This event handler allows setting of different status to a first created Asset V This is useful for example if first version publish doesn't contain any actual reviewable work, but is only used for roundtrip conform check, in which case this version could receive status `pending conform` instead of standard `pending review` ### Update status on next task -Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From → To. Some status can be ignored. +Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From → To. Some status can be ignored. ## Publish plugins @@ -238,7 +236,7 @@ Add Ftrack Family: enabled #### Advanced adding if additional families present -In special cases adding 'ftrack' based on main family ('Families' set higher) is not enough. +In special cases adding 'ftrack' based on main family ('Families' set higher) is not enough. (For example upload to Ftrack for 'plate' main family should only happen if 'review' is contained in instance 'families', not added in other cases. ) -![Collect Ftrack Family](assets/ftrack/ftrack-collect-advanced.png) \ No newline at end of file +![Collect Ftrack Family](assets/ftrack/ftrack-collect-advanced.png) diff --git a/website/docs/system_introduction.md b/website/docs/system_introduction.md index 71c5d64aa8..05627b5359 100644 --- a/website/docs/system_introduction.md +++ b/website/docs/system_introduction.md @@ -17,7 +17,7 @@ various usage scenarios. You can find detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able to operate in most studios fairly quickly. The main obstacles are usually related to workflows and habits, that -might now be fully compatible with what OpenPype is expecting or enforcing. +might not be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get idea about basics. Keep in mind that if you run into any workflows that are not supported, it's usually just because we haven't hit that particular case and it can most likely be added upon request. @@ -48,24 +48,3 @@ to the table - Some DCCs do not support using Environment variables in file paths. This will make it very hard to maintain full multiplatform compatibility as well variable storage roots. - Relying on VPN connection and using it to work directly of network storage will be painfully slow. - - -## Repositories - -### [OpenPype](https://github.com/pypeclub/pype) - -This is where vast majority of the code that works with your data lives. It acts -as Avalon-Config, if we're speaking in avalon terms. - -Avalon gives us the ability to work with a certain host, say Maya, in a standardized manner, but OpenPype defines **how** we work with all the data, allows most of the behavior to be configured on a very granular level and provides a comprehensive build and installation tools for it. - -Thanks to that, we are able to maintain one codebase for vast majority of the features across all our clients deployments while keeping the option to tailor the pipeline to each individual studio. - -### [Avalon-core](https://github.com/pypeclub/avalon-core) - -Avalon-core is the heart of OpenPype. It provides the base functionality including key GUIs (albeit expanded and modified by us), database connection, standards for data structures, working with entities and some universal tools. - -Avalon is being actively developed and maintained by a community of studios and TDs from around the world, with Pype Club team being an active contributor as well. - -Due to the extensive work we've done on OpenPype and the need to react quickly to production needs, we -maintain our own fork of avalon-core, which is kept up to date with upstream changes as much as possible. diff --git a/website/docs/upgrade_notes.md b/website/docs/upgrade_notes.md deleted file mode 100644 index 8231cf997d..0000000000 --- a/website/docs/upgrade_notes.md +++ /dev/null @@ -1,165 +0,0 @@ ---- -id: update_notes -title: Update Notes -sidebar_label: Update Notes ---- - - - -## **Updating to 2.13.0** ## - -### MongoDB - -**Must** - -Due to changes in how tasks are stored in the database (we added task types and possibility of more arbitrary data.), we must take a few precautions when updating. -1. Make sure that ftrack event server with sync to avalon is NOT running during the update. -2. Any project that is to be worked on with 2.13 must be synced from ftrack to avalon with the updated sync to avalon action, or using and updated event server sync to avalon event. - -If 2.12 event servers runs when trying to update the project sync with 2.13, it will override any changes. - -### Nuke Studio / hiero - -Make sure to re-generate pype tags and replace any `task` tags on your shots with the new ones. This will allow you to make multiple tasks of the same type, but with different task name at the same time. - -### Nuke - -Due to a minor update to nuke write node, artists will be prompted to update their write nodes before being able to publish any old shots. There is a "repair" action for this in the publisher, so it doesn't have to be done manually. - - - - -## **Updating to 2.12.0** ## - -### Apps and tools - -**Must** - -run Create/Update Custom attributes action (to update custom attributes group) -check if studio has set custom intent values and move values to ~/config/presets/global/intent.json - -**Optional** - -Set true/false on application and tools by studio usage (eliminate app list in Ftrack and time for registering Ftrack ations) - - - - -## **Updating to 2.11.0** ## - -### Maya in deadline - -We added or own maya deadline plugin to make render management easier. It operates the same as standard mayaBatch in deadline, but allow us to separate Pype sumitted jobs from standard submitter. You'll need to follow this guide to update this [install pype deadline](https://pype.club/docs/admin_hosts#pype-dealine-supplement-code) - - - - -## **Updating to 2.9.0** ## - -### Review and Burnin PRESETS - -This release introduces a major update to working with review and burnin presets. They can now be much more granular and can target extremely specific usecases. The change is backwards compatible with previous format of review and burnin presets, however we highly recommend updating all the presets to the new format. Documentation on what this looks like can be found on pype main [documentation page](https://pype.club/docs/admin_presets_plugins#publishjson). - -### Multiroot and storages - -With the support of multiroot projects, we removed the old `storage.json` from configuration and replaced it with simpler `config/anatomy/roots.json`. This is a required change, but only needs to be done once per studio during the update to 2.9.0. [Read More](https://pype.club/docs/next/admin_config#roots) - - - - -## **Updating to 2.7.0** ## - -### Master Versions -To activate `master` version workflow you need to activate `integrateMasterVersion` plugin in the `config/presets/plugins/global/publish.json` - -``` -"IntegrateMasterVersion": {"enabled": true}, -``` - -### Ftrack - -Make sure that `intent` attributes in ftrack is set correctly. It should follow this setup unless you have your custom values -``` -{ - "label": "Intent", - "key": "intent", - "type": "enumerator", - "entity_type": "assetversion", - "group": "avalon", - "config": { - "multiselect": false, - "data": [ - {"test": "Test"}, - {"wip": "WIP"}, - {"final": "Final"} - ] - } -``` - - - - -## **Updating to 2.6.0** ## - -### Dev vs Prod - -If you want to differentiate between dev and prod deployments of pype, you need to add `config.ini` file to `pype-setup/pypeapp` folder with content. - -``` -[Default] -dev=true -``` - -### Ftrack - -You will have to log in to ftrack in pype after the update. You should be automatically prompted with the ftrack login window when you launch 2.6 release for the first time. - -Event server has to be restarted after the update to enable the ability to control it via action. - -### Presets - -There is a major change in the way how burnin presets are being stored. We simplified the preset format, however that means the currently running production configs need to be tweaked to match the new format. - -:::note Example of converting burnin preset from 2.5 to 2.6 - -2.5 burnin preset - -``` -"burnins":{ - "TOP_LEFT": { - "function": "text", - "text": "{dd}/{mm}/{yyyy}" - }, - "TOP_CENTERED": { - "function": "text", - "text": "" - }, - "TOP_RIGHT": { - "function": "text", - "text": "v{version:0>3}" - }, - "BOTTOM_LEFT": { - "function": "text", - "text": "{frame_start}-{current_frame}-{frame_end}" - }, - "BOTTOM_CENTERED": { - "function": "text", - "text": "{asset}" - }, - "BOTTOM_RIGHT": { - "function": "frame_numbers", - "text": "{username}" - } -``` - -2.6 burnin preset -``` -"burnins":{ - "TOP_LEFT": "{dd}/{mm}/{yyyy}", - "TOP_CENTER": "", - "TOP_RIGHT": "v{version:0>3}" - "BOTTOM_LEFT": "{frame_start}-{current_frame}-{frame_end}", - "BOTTOM_CENTERED": "{asset}", - "BOTTOM_RIGHT": "{username}" -} -``` diff --git a/website/sidebars.js b/website/sidebars.js index 9d60a5811c..920a3134f6 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -109,11 +109,7 @@ module.exports = { "admin_hosts_tvpaint" ], }, - { - type: "category", - label: "Releases", - items: ["changelog", "update_notes"], - }, + "admin_releases", { type: "category", collapsed: false, @@ -152,6 +148,7 @@ module.exports = { "dev_build", "dev_testing", "dev_contribute", + "dev_settings", { type: "category", label: "Hosts integrations", diff --git a/website/src/css/custom.css b/website/src/css/custom.css index e8dd86256b..58c9305bc7 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -196,12 +196,12 @@ html[data-theme='dark'] .header-github-link::before { padding: 20px } -.showcase .client { +.showcase .studio { display: flex; justify-content: space-between; } -.showcase .client img { +.showcase .studio img { max-height: 110px; padding: 20px; max-width: 160px; diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 0886706015..52302ec285 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -65,13 +65,17 @@ const collab = [ image: '/img/clothcat.png', infoLink: 'https://www.clothcatanimation.com/' }, { - title: 'Ellipse Studio', - image: '/img/ellipse-studio.png', - infoLink: 'http://www.dargaudmedia.com' + title: 'Ellipse Animation', + image: '/img/ellipse_animation.svg', + infoLink: 'http://www.ellipseanimation.com' }, { title: 'J Cube Inc', image: '/img/jcube_logo_bw.png', infoLink: 'https://j-cube.jp' + }, { + title: 'Normaal Animation', + image: '/img/logo_normaal.png', + infoLink: 'https://j-cube.jp' } ]; @@ -153,7 +157,32 @@ const studios = [ title: "IGG Canada", image: "/img/igg-logo.png", infoLink: "https://www.igg.com/", - } + }, + { + title: "Agora Studio", + image: "/img/agora_studio.png", + infoLink: "https://agora.studio/", + }, + { + title: "Lucan Visuals", + image: "/img/lucan_Logo_On_White-HR.png", + infoLink: "https://www.lucan.tv/", + }, + { + title: "No Ghost", + image: "/img/noghost.png", + infoLink: "https://www.noghost.co.uk/", + }, + { + title: "Static VFX", + image: "/img/staticvfx.png", + infoLink: "http://www.staticvfx.com/", + }, + { + title: "Method n Madness", + image: "/img/methodmadness.png", + infoLink: "https://www.methodnmadness.com/", +} ]; function Service({imageUrl, title, description}) { @@ -166,10 +195,10 @@ function Service({imageUrl, title, description}) { ); } -function Client({title, image, infoLink}) { +function Studio({title, image, infoLink}) { const imgUrl = useBaseUrl(image); return ( - + ); @@ -465,7 +494,7 @@ function Home() {

Studios using openPype

{studios.map((props, idx) => ( - + ))}
diff --git a/website/static/img/NoGhost_Logo_black.svg b/website/static/img/NoGhost_Logo_black.svg new file mode 100644 index 0000000000..b499b1621f --- /dev/null +++ b/website/static/img/NoGhost_Logo_black.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + diff --git a/website/static/img/agora_studio.png b/website/static/img/agora_studio.png new file mode 100644 index 0000000000..48b07b8775 Binary files /dev/null and b/website/static/img/agora_studio.png differ diff --git a/website/static/img/ellipse_animation.svg b/website/static/img/ellipse_animation.svg new file mode 100644 index 0000000000..c1caaa6726 --- /dev/null +++ b/website/static/img/ellipse_animation.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/website/static/img/igg-logo.png b/website/static/img/igg-logo.png index 3c7f7718f7..9fc7a7f84f 100644 Binary files a/website/static/img/igg-logo.png and b/website/static/img/igg-logo.png differ diff --git a/website/static/img/logo_normaal.png b/website/static/img/logo_normaal.png new file mode 100644 index 0000000000..711847c9f2 Binary files /dev/null and b/website/static/img/logo_normaal.png differ diff --git a/website/static/img/lucan_Logo_On_White-HR.png b/website/static/img/lucan_Logo_On_White-HR.png new file mode 100644 index 0000000000..c86030e1e7 Binary files /dev/null and b/website/static/img/lucan_Logo_On_White-HR.png differ diff --git a/website/static/img/methodmadness.png b/website/static/img/methodmadness.png new file mode 100644 index 0000000000..9dd0681d4a Binary files /dev/null and b/website/static/img/methodmadness.png differ diff --git a/website/static/img/noghost.png b/website/static/img/noghost.png new file mode 100644 index 0000000000..febaedcae8 Binary files /dev/null and b/website/static/img/noghost.png differ diff --git a/website/static/img/staticvfx.png b/website/static/img/staticvfx.png new file mode 100644 index 0000000000..41efd7f120 Binary files /dev/null and b/website/static/img/staticvfx.png differ