diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index d0853e74d6..82f9a6ae9d 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -47,7 +47,7 @@ jobs: enhancementLabel: '**πŸš€ Enhancements**' bugsLabel: '**πŸ› Bug fixes**' deprecatedLabel: '**⚠️ Deprecations**' - addSections: '{"documentation":{"prefix":"### πŸ“– Documentation","labels":["documentation"]},"tests":{"prefix":"### βœ… Testing","labels":["tests"]}}' + addSections: '{"documentation":{"prefix":"### πŸ“– Documentation","labels":["documentation"]},"tests":{"prefix":"### βœ… Testing","labels":["tests"]},"feature":{"prefix":"### πŸ†• New features","labels":["feature"]},}' issues: false issuesWoLabels: false sinceTag: "3.0.0" diff --git a/.gitmodules b/.gitmodules index 52f2fc0750..e1b0917e9d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,9 +4,9 @@ [submodule "repos/avalon-unreal-integration"] path = repos/avalon-unreal-integration url = https://github.com/pypeclub/avalon-unreal-integration.git -[submodule "openpype/modules/ftrack/python2_vendor/ftrack-python-api"] - path = openpype/modules/ftrack/python2_vendor/ftrack-python-api - url = https://bitbucket.org/ftrack/ftrack-python-api.git -[submodule "openpype/modules/ftrack/python2_vendor/arrow"] - path = openpype/modules/ftrack/python2_vendor/arrow - url = https://github.com/arrow-py/arrow.git \ No newline at end of file +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"] + path = openpype/modules/default_modules/ftrack/python2_vendor/arrow + url = https://github.com/arrow-py/arrow.git +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"] + path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api + url = https://bitbucket.org/ftrack/ftrack-python-api.git \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ef4ddeeb59..e1737458b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,35 @@ # Changelog -## [3.3.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.4.0-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD) -**πŸ› Bug fixes** +**Merged pull requests:** + +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) + +## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1) + +**Merged pull requests:** - TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) - Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) @@ -15,81 +40,57 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0) -**πŸš€ Enhancements** +**Merged pull requests:** - Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) - Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) - Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) - Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) - Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) - Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) - submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Add support for multiple Deadline β˜ οΈβž– servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) - Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) - Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) - Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) - Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) - Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) - Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) - Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) - TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) - Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) - Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) - Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) -- Maya: support for configurable `dirmap` πŸ—ΊοΈ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) -- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) - -**πŸ› Bug fixes** - -- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) -- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) -- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) -- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) -- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) -- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) -- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) -- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) -- Fix - validators for textures workfiles trigger only for textures workfiles [\#1913](https://github.com/pypeclub/OpenPype/pull/1913) -- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) -- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) -- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) -- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) -- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) -- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) -- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) -- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) -- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) -- Maya: don't add reference members as connections to the container set πŸ“¦ [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) -- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) - -**Merged pull requests:** - -- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) -- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) -- Add support for multiple Deadline β˜ οΈβž– servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) - Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space πŸš€ [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) -- Maya: expected files -\> render products βš™οΈ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- Maya: support for configurable `dirmap` πŸ—ΊοΈ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Maya: don't add reference members as connections to the container set πŸ“¦ [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) -**πŸš€ Enhancements** - -- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) -- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) -- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) - -**πŸ› Bug fixes** - -- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) -- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) - -**Merged pull requests:** - -- Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808) - ## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4) diff --git a/Dockerfile b/Dockerfile index 74ab06a114..cef83b5811 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,9 @@ # Build Pype docker image -FROM centos:7 AS system_builder -ARG OPENPYPE_PYTHON_VERSION=3.7.10 +FROM debian:bookworm-slim AS builder +ARG OPENPYPE_PYTHON_VERSION=3.7.12 +LABEL maintainer="info@openpype.io" +LABEL description="Docker Image to build and run OpenPype" LABEL org.opencontainers.image.name="pypeclub/openpype" LABEL org.opencontainers.image.title="OpenPype Docker Image" LABEL org.opencontainers.image.url="https://openpype.io/" @@ -9,73 +11,59 @@ LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype" USER root -# update base -RUN yum -y install deltarpm \ - && yum -y update \ - && yum clean all +ARG DEBIAN_FRONTEND=noninteractive -# add tools we need -RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \ - && yum -y install centos-release-scl \ - && yum -y install \ +# update base +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ bash \ - which \ git \ - devtoolset-7-gcc* \ - gcc-c++ \ - make \ cmake \ + make \ curl \ wget \ - gcc \ - zlib-devel \ - bzip2 \ - bzip2-devel \ - readline-devel \ - sqlite sqlite-devel \ - openssl-devel \ - tk-devel libffi-devel \ - qt5-qtbase-devel \ - autoconf \ - automake \ - ncurses-libs \ - && yum clean all + build-essential \ + checkinstall \ + libssl-dev \ + zlib1g-dev \ + libbz2-dev \ + libreadline-dev \ + libsqlite3-dev \ + llvm \ + libncursesw5-dev \ + xz-utils \ + tk-dev \ + libxml2-dev \ + libxmlsec1-dev \ + libffi-dev \ + liblzma-dev \ + patchelf -# we need to build our own patchelf -WORKDIR /temp-patchelf -RUN git clone https://github.com/NixOS/patchelf.git . \ - && source scl_source enable devtoolset-7 \ - && ./bootstrap.sh \ - && ./configure \ - && make \ - && make install - +SHELL ["/bin/bash", "-c"] -RUN curl https://pyenv.run | bash -ENV PYTHON_CONFIGURE_OPTS --enable-shared +RUN mkdir /opt/openpype -RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ +RUN curl https://pyenv.run | bash \ + && echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ && echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \ && echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \ && echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc \ - && source $HOME/.bashrc \ - && pyenv install ${OPENPYPE_PYTHON_VERSION} + && source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION} + +COPY . /opt/openpype/ + +RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh WORKDIR /opt/openpype -COPY . /opt/openpype/ -RUN rm -rf /opt/openpype/.poetry || echo "No Poetry installed yet." \ - && chmod +x /opt/openpype/tools/create_env.sh \ - && chmod +x /opt/openpype/tools/build.sh \ + +RUN cd /opt/openpype \ && source $HOME/.bashrc \ && pyenv local ${OPENPYPE_PYTHON_VERSION} RUN source $HOME/.bashrc \ && ./tools/create_env.sh \ - && source $HOME/.bashrc \ && ./tools/fetch_thirdparty_libs.sh RUN source $HOME/.bashrc \ - && bash ./tools/build.sh \ - && cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib + && bash ./tools/build.sh diff --git a/Dockerfile.centos7 b/Dockerfile.centos7 new file mode 100644 index 0000000000..0095ddff53 --- /dev/null +++ b/Dockerfile.centos7 @@ -0,0 +1,98 @@ +# Build Pype docker image +FROM centos:7 AS builder +ARG OPENPYPE_PYTHON_VERSION=3.7.10 + +LABEL org.opencontainers.image.name="pypeclub/openpype" +LABEL org.opencontainers.image.title="OpenPype Docker Image" +LABEL org.opencontainers.image.url="https://openpype.io/" +LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype" + +USER root + +# update base +RUN yum -y install deltarpm \ + && yum -y update \ + && yum clean all + +# add tools we need +RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \ + && yum -y install centos-release-scl \ + && yum -y install \ + bash \ + which \ + git \ + make \ + devtoolset-7 \ + cmake \ + curl \ + wget \ + gcc \ + zlib-devel \ + bzip2 \ + bzip2-devel \ + readline-devel \ + sqlite sqlite-devel \ + openssl-devel \ + openssl-libs \ + tk-devel libffi-devel \ + qt5-qtbase-devel \ + patchelf \ + automake \ + autoconf \ + ncurses \ + ncurses-devel \ + && yum clean all + +# we need to build our own patchelf +WORKDIR /temp-patchelf +RUN git clone https://github.com/NixOS/patchelf.git . \ + && source scl_source enable devtoolset-7 \ + && ./bootstrap.sh \ + && ./configure \ + && make \ + && make install + +RUN mkdir /opt/openpype +# RUN useradd -m pype +# RUN chown pype /opt/openpype +# USER pype + +RUN curl https://pyenv.run | bash +# ENV PYTHON_CONFIGURE_OPTS --enable-shared + +RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ + && echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \ + && echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \ + && echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc +RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION} + +COPY . /opt/openpype/ +RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet." +# USER root +# RUN chown -R pype /opt/openpype +RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh + +# USER pype + +WORKDIR /opt/openpype + +RUN cd /opt/openpype \ + && source $HOME/.bashrc \ + && pyenv local ${OPENPYPE_PYTHON_VERSION} + +RUN source $HOME/.bashrc \ + && ./tools/create_env.sh + +RUN source $HOME/.bashrc \ + && ./tools/fetch_thirdparty_libs.sh + +RUN source $HOME/.bashrc \ + && bash ./tools/build.sh + +RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ + && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ + && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \ + && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib + +RUN cd /opt/openpype \ + rm -rf ./vendor/bin diff --git a/README.md b/README.md index 209af24c75..0e450fc48d 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,12 @@ Easiest way to build OpenPype on Linux is using [Docker](https://www.docker.com/ sudo ./tools/docker_build.sh ``` +This will by default use Debian as base image. If you need to make Centos 7 compatible build, please run: + +```sh +sudo ./tools/docker_build.sh centos7 +``` + If all is successful, you'll find built OpenPype in `./build/` folder. #### Manual build @@ -158,6 +164,11 @@ you'll need also additional libraries for Qt5: ```sh sudo apt install qt5-default ``` +or if you are on Ubuntu > 20.04, there is no `qt5-default` packages so you need to install its content individually: + +```sh +sudo apt-get install qtbase5-dev qtchooser qt5-qmake qtbase5-dev-tools +```
diff --git a/igniter/__init__.py b/igniter/__init__.py index 20bf9be106..defd45e233 100644 --- a/igniter/__init__.py +++ b/igniter/__init__.py @@ -12,6 +12,9 @@ from .version import __version__ as version def open_dialog(): """Show Igniter dialog.""" + if os.getenv("OPENPYPE_HEADLESS_MODE"): + print("!!! Can't open dialog in headless mode. Exiting.") + sys.exit(1) from Qt import QtWidgets, QtCore from .install_dialog import InstallDialog @@ -28,8 +31,31 @@ def open_dialog(): return d.result() +def open_update_window(openpype_version): + """Open update window.""" + if os.getenv("OPENPYPE_HEADLESS_MODE"): + print("!!! Can't open dialog in headless mode. Exiting.") + sys.exit(1) + from Qt import QtWidgets, QtCore + from .update_window import UpdateWindow + + scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None) + if scale_attr is not None: + QtWidgets.QApplication.setAttribute(scale_attr) + + app = QtWidgets.QApplication(sys.argv) + + d = UpdateWindow(version=openpype_version) + d.open() + + app.exec_() + version_path = d.get_version_path() + return version_path + + __all__ = [ "BootstrapRepos", "open_dialog", + "open_update_window", "version" ] diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 8c081b8614..f7f35824c8 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -9,6 +9,7 @@ import sys import tempfile from pathlib import Path from typing import Union, Callable, List, Tuple +import hashlib from zipfile import ZipFile, BadZipFile @@ -28,6 +29,25 @@ LOG_WARNING = 1 LOG_ERROR = 3 +def sha256sum(filename): + """Calculate sha256 for content of the file. + + Args: + filename (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(filename, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() + + class OpenPypeVersion(semver.VersionInfo): """Class for storing information about OpenPype version. @@ -261,7 +281,8 @@ class BootstrapRepos: self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos") @staticmethod - def get_version_path_from_list(version: str, version_list: list) -> Path: + def get_version_path_from_list( + version: str, version_list: list) -> Union[Path, None]: """Get path for specific version in list of OpenPype versions. Args: @@ -275,6 +296,7 @@ class BootstrapRepos: for v in version_list: if str(v) == version: return v.path + return None @staticmethod def get_local_live_version() -> str: @@ -487,6 +509,7 @@ class BootstrapRepos: openpype_root = openpype_path.resolve() # generate list of filtered paths dir_filter = [openpype_root / f for f in self.openpype_filter] + checksums = [] file: Path for file in openpype_list: @@ -508,12 +531,119 @@ class BootstrapRepos: processed_path = file self._print(f"- processing {processed_path}") - zip_file.write(file, file.relative_to(openpype_root)) + checksums.append( + ( + sha256sum(file.as_posix()), + file.resolve().relative_to(openpype_root) + ) + ) + zip_file.write( + file, file.resolve().relative_to(openpype_root)) + checksums_str = "" + for c in checksums: + checksums_str += "{}:{}\n".format(c[0], c[1]) + zip_file.writestr("checksums", checksums_str) # test if zip is ok zip_file.testzip() self._progress_callback(100) + def validate_openpype_version(self, path: Path) -> tuple: + """Validate version directory or zip file. + + This will load `checksums` file if present, calculate checksums + of existing files in given path and compare. It will also compare + lists of files together for missing files. + + Args: + path (Path): Path to OpenPype version to validate. + + Returns: + tuple(bool, str): with version validity as first item + and string with reason as second. + + """ + if not path.exists(): + return False, "Path doesn't exist" + + if path.is_file(): + return self._validate_zip(path) + return self._validate_dir(path) + + @staticmethod + def _validate_zip(path: Path) -> tuple: + """Validate content of zip file.""" + with ZipFile(path, "r") as zip_file: + # read checksums + try: + checksums_data = str(zip_file.read("checksums")) + except IOError: + # FIXME: This should be set to False sometimes in the future + return True, "Cannot read checksums for archive." + + # split it to the list of tuples + checksums = [ + tuple(line.split(":")) + for line in checksums_data.split("\n") if line + ] + + # calculate and compare checksums in the zip file + for file in checksums: + h = hashlib.sha256() + try: + h.update(zip_file.read(file[1])) + except FileNotFoundError: + return False, f"Missing file [ {file[1]} ]" + if h.hexdigest() != file[0]: + return False, f"Invalid checksum on {file[1]}" + + # get list of files in zip minus `checksums` file itself + # and turn in to set to compare against list of files + # from checksum file. If difference exists, something is + # wrong + files_in_zip = zip_file.namelist() + files_in_zip.remove("checksums") + files_in_zip = set(files_in_zip) + files_in_checksum = set([file[1] for file in checksums]) + diff = files_in_zip.difference(files_in_checksum) + if diff: + return False, f"Missing files {diff}" + + return True, "All ok" + + @staticmethod + def _validate_dir(path: Path) -> tuple: + checksums_file = Path(path / "checksums") + if not checksums_file.exists(): + # FIXME: This should be set to False sometimes in the future + return True, "Cannot read checksums for archive." + checksums_data = checksums_file.read_text() + checksums = [ + tuple(line.split(":")) + for line in checksums_data.split("\n") if line + ] + files_in_dir = [ + file.relative_to(path).as_posix() + for file in path.iterdir() if file.is_file() + ] + files_in_dir.remove("checksums") + files_in_dir = set(files_in_dir) + files_in_checksum = set([file[1] for file in checksums]) + + for file in checksums: + try: + current = sha256sum((path / file[1]).as_posix()) + except FileNotFoundError: + return False, f"Missing file [ {file[1]} ]" + + if file[0] != current: + return False, f"Invalid checksum on {file[1]}" + diff = files_in_dir.difference(files_in_checksum) + if diff: + return False, f"Missing files {diff}" + + return True, "All ok" + @staticmethod def add_paths_from_archive(archive: Path) -> None: """Add first-level directory and 'repos' as paths to :mod:`sys.path`. @@ -837,6 +967,7 @@ class BootstrapRepos: # test if destination directory already exist, if so lets delete it. if destination.exists() and force: + self._print("removing existing directory") try: shutil.rmtree(destination) except OSError as e: @@ -846,6 +977,7 @@ class BootstrapRepos: raise OpenPypeVersionIOError( f"cannot remove existing {destination}") from e elif destination.exists() and not force: + self._print("destination directory already exists") raise OpenPypeVersionExists(f"{destination} already exist.") else: # create destination parent directories even if they don't exist. @@ -855,6 +987,7 @@ class BootstrapRepos: if openpype_version.path.is_dir(): # create zip inside temporary directory. self._print("Creating zip from directory ...") + self._progress_callback(0) with tempfile.TemporaryDirectory() as temp_dir: temp_zip = \ Path(temp_dir) / f"openpype-v{openpype_version}.zip" @@ -880,13 +1013,16 @@ class BootstrapRepos: raise OpenPypeVersionInvalid("Invalid file format") if not self.is_inside_user_data(openpype_version.path): + self._progress_callback(35) openpype_version.path = self._copy_zip( openpype_version.path, destination) # extract zip there self._print("extracting zip to destination ...") with ZipFile(openpype_version.path, "r") as zip_ref: + self._progress_callback(75) zip_ref.extractall(destination) + self._progress_callback(100) return destination diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index 1ec8cc6768..1fe67e3397 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -14,21 +14,13 @@ from .tools import ( validate_mongo_connection, get_openpype_path_from_db ) + +from .nice_progress_bar import NiceProgressBar from .user_settings import OpenPypeSecureRegistry +from .tools import load_stylesheet from .version import __version__ -def load_stylesheet(): - stylesheet_path = os.path.join( - os.path.dirname(__file__), - "stylesheet.css" - ) - with open(stylesheet_path, "r") as file_stream: - stylesheet = file_stream.read() - - return stylesheet - - class ButtonWithOptions(QtWidgets.QFrame): option_clicked = QtCore.Signal(str) @@ -91,25 +83,6 @@ class ButtonWithOptions(QtWidgets.QFrame): self.option_clicked.emit(self._default_value) -class NiceProgressBar(QtWidgets.QProgressBar): - def __init__(self, parent=None): - super(NiceProgressBar, self).__init__(parent) - self._real_value = 0 - - def setValue(self, value): - self._real_value = value - if value != 0 and value < 11: - value = 11 - - super(NiceProgressBar, self).setValue(value) - - def value(self): - return self._real_value - - def text(self): - return "{} %".format(self._real_value) - - class ConsoleWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(ConsoleWidget, self).__init__(parent) diff --git a/igniter/nice_progress_bar.py b/igniter/nice_progress_bar.py new file mode 100644 index 0000000000..47d695a101 --- /dev/null +++ b/igniter/nice_progress_bar.py @@ -0,0 +1,20 @@ +from Qt import QtCore, QtGui, QtWidgets # noqa + + +class NiceProgressBar(QtWidgets.QProgressBar): + def __init__(self, parent=None): + super(NiceProgressBar, self).__init__(parent) + self._real_value = 0 + + def setValue(self, value): + self._real_value = value + if value != 0 and value < 11: + value = 11 + + super(NiceProgressBar, self).setValue(value) + + def value(self): + return self._real_value + + def text(self): + return "{} %".format(self._real_value) diff --git a/igniter/tools.py b/igniter/tools.py index 529d535c25..c934289064 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -248,3 +248,15 @@ def get_openpype_path_from_db(url: str) -> Union[str, None]: if os.path.exists(path): return path return None + + +def load_stylesheet() -> str: + """Load css style sheet. + + Returns: + str: content of the stylesheet + + """ + stylesheet_path = Path(__file__).parent.resolve() / "stylesheet.css" + + return stylesheet_path.read_text() diff --git a/igniter/update_thread.py b/igniter/update_thread.py new file mode 100644 index 0000000000..f4fc729faf --- /dev/null +++ b/igniter/update_thread.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +"""Working thread for update.""" +from Qt.QtCore import QThread, Signal, QObject # noqa + +from .bootstrap_repos import ( + BootstrapRepos, + OpenPypeVersion +) + + +class UpdateThread(QThread): + """Install Worker thread. + + This class takes care of finding OpenPype version on user entered path + (or loading this path from database). If nothing is entered by user, + OpenPype will create its zip files from repositories that comes with it. + + If path contains plain repositories, they are zipped and installed to + user data dir. + + """ + progress = Signal(int) + message = Signal((str, bool)) + + def __init__(self, parent=None): + self._result = None + self._openpype_version = None + QThread.__init__(self, parent) + + def set_version(self, openpype_version: OpenPypeVersion): + self._openpype_version = openpype_version + + def result(self): + """Result of finished installation.""" + return self._result + + def _set_result(self, value): + if self._result is not None: + raise AssertionError("BUG: Result was set more than once!") + self._result = value + + def run(self): + """Thread entry point. + + Using :class:`BootstrapRepos` to either install OpenPype as zip files + or copy them from location specified by user or retrieved from + database. + """ + bs = BootstrapRepos( + progress_callback=self.set_progress, message=self.message) + version_path = bs.install_version(self._openpype_version) + self._set_result(version_path) + + def set_progress(self, progress: int) -> None: + """Helper to set progress bar. + + Args: + progress (int): Progress in percents. + + """ + self.progress.emit(progress) diff --git a/igniter/update_window.py b/igniter/update_window.py new file mode 100644 index 0000000000..d7908c240b --- /dev/null +++ b/igniter/update_window.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +"""Progress window to show when OpenPype is updating/installing locally.""" +import os +from .update_thread import UpdateThread +from Qt import QtCore, QtGui, QtWidgets # noqa +from .bootstrap_repos import OpenPypeVersion +from .nice_progress_bar import NiceProgressBar +from .tools import load_stylesheet + + +class UpdateWindow(QtWidgets.QDialog): + """OpenPype update window.""" + + _width = 500 + _height = 100 + + def __init__(self, version: OpenPypeVersion, parent=None): + super(UpdateWindow, self).__init__(parent) + self._openpype_version = version + self._result_version_path = None + + self.setWindowTitle( + f"OpenPype is updating ..." + ) + self.setModal(True) + self.setWindowFlags( + QtCore.Qt.WindowMinimizeButtonHint + ) + + current_dir = os.path.dirname(os.path.abspath(__file__)) + roboto_font_path = os.path.join(current_dir, "RobotoMono-Regular.ttf") + poppins_font_path = os.path.join(current_dir, "Poppins") + icon_path = os.path.join(current_dir, "openpype_icon.png") + + # Install roboto font + QtGui.QFontDatabase.addApplicationFont(roboto_font_path) + for filename in os.listdir(poppins_font_path): + if os.path.splitext(filename)[1] == ".ttf": + QtGui.QFontDatabase.addApplicationFont(filename) + + # Load logo + pixmap_openpype_logo = QtGui.QPixmap(icon_path) + # Set logo as icon of window + self.setWindowIcon(QtGui.QIcon(pixmap_openpype_logo)) + + self._pixmap_openpype_logo = pixmap_openpype_logo + + self._update_thread = None + + self.resize(QtCore.QSize(self._width, self._height)) + self._init_ui() + + # Set stylesheet + self.setStyleSheet(load_stylesheet()) + self._run_update() + + def _init_ui(self): + + # Main info + # -------------------------------------------------------------------- + main_label = QtWidgets.QLabel( + f"OpenPype is updating to {self._openpype_version}", self) + main_label.setWordWrap(True) + main_label.setObjectName("MainLabel") + + # Progress bar + # -------------------------------------------------------------------- + progress_bar = NiceProgressBar(self) + progress_bar.setAlignment(QtCore.Qt.AlignCenter) + progress_bar.setTextVisible(False) + + # add all to main + main = QtWidgets.QVBoxLayout(self) + main.addSpacing(15) + main.addWidget(main_label, 0) + main.addSpacing(15) + main.addWidget(progress_bar, 0) + main.addSpacing(15) + + self._progress_bar = progress_bar + + def _run_update(self): + """Start install process. + + This will once again validate entered path and mongo if ok, start + working thread that will do actual job. + """ + # Check if install thread is not already running + if self._update_thread and self._update_thread.isRunning(): + return + self._progress_bar.setRange(0, 0) + update_thread = UpdateThread(self) + update_thread.set_version(self._openpype_version) + update_thread.message.connect(self.update_console) + update_thread.progress.connect(self._update_progress) + update_thread.finished.connect(self._installation_finished) + + self._update_thread = update_thread + + update_thread.start() + + def get_version_path(self): + return self._result_version_path + + def _installation_finished(self): + status = self._update_thread.result() + self._result_version_path = status + self._progress_bar.setRange(0, 1) + self._update_progress(100) + QtWidgets.QApplication.processEvents() + self.done(0) + + def _update_progress(self, progress: int): + # not updating progress as we are not able to determine it + # correctly now. Progress bar is set to un-deterministic mode + # until we are able to get progress in better way. + """ + self._progress_bar.setRange(0, 0) + self._progress_bar.setValue(progress) + text_visible = self._progress_bar.isTextVisible() + if progress == 0: + if text_visible: + self._progress_bar.setTextVisible(False) + elif not text_visible: + self._progress_bar.setTextVisible(True) + """ + return + + def update_console(self, msg: str, error: bool = False) -> None: + """Display message in console. + + Args: + msg (str): message. + error (bool): if True, print it red. + """ + print(msg) diff --git a/openpype/__init__.py b/openpype/__init__.py index e7462e14e9..9d55006a67 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -68,6 +68,10 @@ def patched_discover(superclass): def install(): """Install Pype to Avalon.""" from pyblish.lib import MessageHandler + from openpype.modules import load_modules + + # Make sure modules are loaded + load_modules() def modified_emit(obj, record): """Method replacing `emit` in Pyblish's MessageHandler.""" diff --git a/openpype/cli.py b/openpype/cli.py index ec5b04c468..18cc1c63cd 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -18,6 +18,8 @@ from .pype_commands import PypeCommands @click.option("--list-versions", is_flag=True, expose_value=False, help=("list all detected versions. Use With `--use-staging " "to list staging versions.")) +@click.option("--validate-version", expose_value=False, + help="validate given version integrity") def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -94,6 +96,31 @@ def eventserver(debug, ) +@main.command() +@click.option("-d", "--debug", is_flag=True, help="Print debug messages") +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) +@click.option("-e", "--executable", help="Executable") +@click.option("-u", "--upload_dir", help="Upload dir") +def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): + """Starts webserver for communication with Webpublish FR via command line + + OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND + FTRACK_BOT_API_KEY provided with api key from Ftrack. + + Expect "pype.club" user created on Ftrack. + """ + if debug: + os.environ['OPENPYPE_DEBUG'] = "3" + + PypeCommands().launch_webpublisher_webservercli( + upload_dir=upload_dir, + executable=executable, + host=host, + port=port + ) + + @main.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) @@ -131,6 +158,25 @@ def publish(debug, paths, targets): PypeCommands.publish(list(paths), targets) +@main.command() +@click.argument("path") +@click.option("-d", "--debug", is_flag=True, help="Print debug messages") +@click.option("-h", "--host", help="Host") +@click.option("-u", "--user", help="User email address") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def remotepublish(debug, project, path, host, targets=None, user=None): + """Start CLI publishing. + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + if debug: + os.environ['OPENPYPE_DEBUG'] = '3' + PypeCommands.remotepublish(project, path, host, user, targets=targets) + + @main.command() @click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-p", "--project", required=True, diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index de30da3319..50b73ade2b 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -5,11 +5,12 @@ from typing import Dict, List, Optional import bpy -from avalon import api -import avalon.blender +from avalon import api, blender +from avalon.blender import ops +from avalon.blender.pipeline import AVALON_CONTAINERS from openpype.api import PypeCreatorMixin -VALID_EXTENSIONS = [".blend", ".json", ".abc"] +VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] def asset_name( @@ -27,32 +28,24 @@ def get_unique_number( asset: str, subset: str ) -> str: """Return a unique number based on the asset name.""" - avalon_containers = [ - c for c in bpy.data.collections - if c.name == 'AVALON_CONTAINERS' - ] - containers = [] - # First, add the children of avalon containers - for c in avalon_containers: - containers.extend(c.children) - # then keep looping to include all the children - for c in containers: - containers.extend(c.children) - container_names = [ - c.name for c in containers - ] + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + return "01" + asset_groups = avalon_container.all_objects + + container_names = [c.name for c in asset_groups if c.type == 'EMPTY'] count = 1 - name = f"{asset}_{count:0>2}_{subset}_CON" + name = f"{asset}_{count:0>2}_{subset}" while name in container_names: count += 1 - name = f"{asset}_{count:0>2}_{subset}_CON" + name = f"{asset}_{count:0>2}_{subset}" return f"{count:0>2}" def prepare_data(data, container_name): name = data.name local_data = data.make_local() - local_data.name = f"{name}:{container_name}" + local_data.name = f"{container_name}:{name}" return local_data @@ -102,7 +95,7 @@ def get_local_collection_with_name(name): return None -class Creator(PypeCreatorMixin, avalon.blender.Creator): +class Creator(PypeCreatorMixin, blender.Creator): pass @@ -173,6 +166,16 @@ class AssetLoader(api.Loader): name: Optional[str] = None, namespace: Optional[str] = None, options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + """ Run the loader on Blender main thread""" + mti = ops.MainThreadItem(self._load, context, name, namespace, options) + ops.execute_in_main_thread(mti) + + def _load(self, + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[bpy.types.Collection]: """Load asset via database Arguments: @@ -218,16 +221,26 @@ class AssetLoader(api.Loader): # loader=self.__class__.__name__, # ) - asset = context["asset"]["name"] - subset = context["subset"]["name"] - instance_name = asset_name(asset, subset, unique_number) + '_CON' + # asset = context["asset"]["name"] + # subset = context["subset"]["name"] + # instance_name = asset_name(asset, subset, unique_number) + '_CON' - return self._get_instance_collection(instance_name, nodes) + # return self._get_instance_collection(instance_name, nodes) + + def exec_update(self, container: Dict, representation: Dict): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") def update(self, container: Dict, representation: Dict): + """ Run the update on Blender main thread""" + mti = ops.MainThreadItem(self.exec_update, container, representation) + ops.execute_in_main_thread(mti) + + def exec_remove(self, container: Dict) -> bool: """Must be implemented by a sub-class""" raise NotImplementedError("Must be implemented by a sub-class") def remove(self, container: Dict) -> bool: - """Must be implemented by a sub-class""" - raise NotImplementedError("Must be implemented by a sub-class") + """ Run the remove on Blender main thread""" + mti = ops.MainThreadItem(self.exec_remove, container) + ops.execute_in_main_thread(mti) diff --git a/openpype/hosts/blender/hooks/pre_windows_console.py b/openpype/hosts/blender/hooks/pre_windows_console.py new file mode 100644 index 0000000000..d6be45b225 --- /dev/null +++ b/openpype/hosts/blender/hooks/pre_windows_console.py @@ -0,0 +1,28 @@ +import subprocess +from openpype.lib import PreLaunchHook + + +class BlenderConsoleWindows(PreLaunchHook): + """Foundry applications have specific way how to launch them. + + Blender is executed "like" python process so it is required to pass + `CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console. + At the same time the newly created console won't create it's own stdout + and stderr handlers so they should not be redirected to DEVNULL. + """ + + # Should be as last hook because must change launch arguments to string + order = 1000 + app_groups = ["blender"] + platforms = ["windows"] + + def execute(self): + # Change `creationflags` to CREATE_NEW_CONSOLE + # - on Windows will blender create new window using it's console + # Set `stdout` and `stderr` to None so new created console does not + # have redirected output to DEVNULL in build + self.launch_context.kwargs.update({ + "creationflags": subprocess.CREATE_NEW_CONSOLE, + "stdout": None, + "stderr": None + }) diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index 9aebf7e9b7..f7887b7e80 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -2,11 +2,13 @@ import bpy -from avalon import api, blender -import openpype.hosts.blender.api.plugin +from avalon import api +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateAnimation(openpype.hosts.blender.api.plugin.Creator): +class CreateAnimation(plugin.Creator): """Animation output for character rigs""" name = "animationMain" @@ -15,16 +17,36 @@ class CreateAnimation(openpype.hosts.blender.api.plugin.Creator): icon = "male" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object + # name = self.name + # if not name: asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + # asset_group = bpy.data.objects.new(name=name, object_data=None) + # asset_group.empty_display_type = 'SINGLE_ARROW' + asset_group = bpy.data.collections.new(name=name) + instances.children.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - blender.lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) if (self.options or {}).get("useSelection"): - for obj in blender.lib.get_selection(): - collection.objects.link(obj) + selected = lib.get_selection() + for obj in selected: + asset_group.objects.link(obj) + elif (self.options or {}).get("asset_group"): + obj = (self.options or {}).get("asset_group") + asset_group.objects.link(obj) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index 5404cec587..831261f027 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -3,11 +3,12 @@ import bpy from avalon import api -from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateLayout(openpype.hosts.blender.api.plugin.Creator): +class CreateLayout(plugin.Creator): """Layout output for character rigs""" name = "layoutMain" @@ -16,13 +17,34 @@ class CreateLayout(openpype.hosts.blender.api.plugin.Creator): icon = "cubes" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.context.collection - collection.name = name + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) - return collection + # Add selected objects to instance + if (self.options or {}).get("useSelection"): + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + bpy.ops.object.parent_set(keep_transform=True) + + return asset_group diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 921d86513b..e778f5b74f 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -3,11 +3,12 @@ import bpy from avalon import api -from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateModel(openpype.hosts.blender.api.plugin.Creator): +class CreateModel(plugin.Creator): """Polygonal static geometry""" name = "modelMain" @@ -16,17 +17,34 @@ class CreateModel(openpype.hosts.blender.api.plugin.Creator): icon = "cube" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) + # Add selected objects to instance if (self.options or {}).get("useSelection"): - for obj in lib.get_selection(): - collection.objects.link(obj) + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + bpy.ops.object.parent_set(keep_transform=True) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 116fb9f742..2e1c71f570 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -3,11 +3,12 @@ import bpy from avalon import api -from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateRig(openpype.hosts.blender.api.plugin.Creator): +class CreateRig(plugin.Creator): """Artist-friendly rig with controls to direct motion""" name = "rigMain" @@ -16,26 +17,34 @@ class CreateRig(openpype.hosts.blender.api.plugin.Creator): icon = "wheelchair" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) - - # Add the rig object and all the children meshes to - # a set and link them all at the end to avoid duplicates. - # Blender crashes if trying to link an object that is already linked. - # This links automatically the children meshes if they were not - # selected, and doesn't link them twice if they, insted, - # were manually selected by the user. + lib.imprint(asset_group, self.data) + # Add selected objects to instance if (self.options or {}).get("useSelection"): - for obj in lib.get_selection(): - for child in obj.users_collection[0].children: - collection.children.link(child) - collection.objects.link(obj) + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + bpy.ops.object.parent_set(keep_transform=True) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 4248cffd69..92656fac9e 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -4,9 +4,14 @@ from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender import lib +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin class CacheModelLoader(plugin.AssetLoader): @@ -21,24 +26,30 @@ class CacheModelLoader(plugin.AssetLoader): families = ["model", "pointcache"] representations = ["abc"] - label = "Link Alembic" + label = "Load Alembic" icon = "code-fork" color = "orange" - def _remove(self, objects, container): - for obj in list(objects): + def _remove(self, asset_group): + objects = list(asset_group.children) + empties = [] + + for obj in objects: if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) elif obj.type == 'EMPTY': - bpy.data.objects.remove(obj) + objects.extend(obj.children) + empties.append(obj) - bpy.data.collections.remove(container) + for empty in empties: + bpy.data.objects.remove(empty) - def _process(self, libpath, container_name, parent_collection): + def _process(self, libpath, asset_group, group_name): bpy.ops.object.select_all(action='DESELECT') - view_layer = bpy.context.view_layer - view_layer_collection = view_layer.active_layer_collection.collection + collection = bpy.context.view_layer.active_layer_collection.collection relative = bpy.context.preferences.filepaths.use_relative_paths bpy.ops.wm.alembic_import( @@ -46,34 +57,61 @@ class CacheModelLoader(plugin.AssetLoader): relative_path=relative ) - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + imported = lib.get_selection() - model_container = bpy.data.collections.new(container_name) - parent.children.link(model_container) - for obj in bpy.context.selected_objects: - model_container.objects.link(obj) - view_layer_collection.objects.unlink(obj) + empties = [obj for obj in imported if obj.type == 'EMPTY'] + container = None + + for empty in empties: + if not empty.parent: + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + bpy.data.objects.remove(container) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + collection.objects.unlink(obj) + + for obj in objects: name = obj.name - obj.name = f"{name}:{container_name}" + obj.name = f"{group_name}:{name}" + if obj.type != 'EMPTY': + name_data = obj.data.name + obj.data.name = f"{group_name}:{name_data}" - # Groups are imported as Empty objects in Blender - if obj.type == 'MESH': - data_name = obj.data.name - obj.data.name = f"{data_name}:{container_name}" + for material_slot in obj.material_slots: + name_mat = material_slot.material.name + material_slot.material.name = f"{group_name}:{name_mat}" - if not obj.get(blender.pipeline.AVALON_PROPERTY): - obj[blender.pipeline.AVALON_PROPERTY] = dict() + if not obj.get(AVALON_PROPERTY): + obj[AVALON_PROPERTY] = dict() - avalon_info = obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) + avalon_info = obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) bpy.ops.object.select_all(action='DESELECT') - return model_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -91,47 +129,41 @@ class CacheModelLoader(plugin.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container + objects = self._process(libpath, asset_group, group_name) - obj_container = self._process( - libpath, container_name, None) + bpy.context.scene.collection.objects.link(asset_group) - container_metadata["obj_container"] = obj_container + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } - # Save the list of objects in the metadata container - container_metadata["objects"] = obj_container.all_objects + self[:] = objects + return objects - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes - - def update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -143,9 +175,8 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -155,12 +186,9 @@ class CacheModelLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -171,45 +199,34 @@ class CacheModelLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return - parent = plugin.get_parent_collection(obj_container) + mat = asset_group.matrix_basis.copy() + self._remove(asset_group) - self._remove(objects, obj_container) + self._process(str(libpath), asset_group, object_name) + asset_group.matrix_basis = mat - obj_container = self._process( - str(libpath), container_name, parent) + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) - - def remove(self, container: Dict) -> bool: + def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. Arguments: @@ -222,25 +239,14 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + bpy.data.objects.remove(asset_group) return True diff --git a/openpype/hosts/blender/plugins/load/load_animation.py b/openpype/hosts/blender/plugins/load/load_animation.py index 4025fdfa74..47c48248b2 100644 --- a/openpype/hosts/blender/plugins/load/load_animation.py +++ b/openpype/hosts/blender/plugins/load/load_animation.py @@ -1,20 +1,19 @@ """Load an animation in Blender.""" import logging -from pathlib import Path -from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin + +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin logger = logging.getLogger("openpype").getChild( "blender").getChild("load_animation") -class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): +class BlendAnimationLoader(plugin.AssetLoader): """Load animations from a .blend file. Warning: @@ -29,67 +28,6 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, lib_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - - bpy.data.collections.remove(bpy.data.collections[lib_container]) - - def _process(self, libpath, lib_container, container_name): - - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - - scene.collection.children.link(bpy.data.collections[lib_container]) - - anim_container = scene.collection.children[lib_container].make_local() - - meshes = [obj for obj in anim_container.objects if obj.type == 'MESH'] - armatures = [ - obj for obj in anim_container.objects if obj.type == 'ARMATURE'] - - # Should check if there is only an armature? - - objects_list = [] - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in meshes + armatures: - - obj = obj.make_local() - - obj.data.make_local() - - anim_data = obj.animation_data - - if anim_data is not None and anim_data.action is not None: - - anim_data.action.make_local() - - if not obj.get(blender.pipeline.AVALON_PROPERTY): - - obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - objects_list.append(obj) - - anim_container.pop(blender.pipeline.AVALON_PROPERTY) - - bpy.ops.object.select_all(action='DESELECT') - - return objects_list - def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, options: Optional[Dict] = None @@ -101,148 +39,32 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - container_name = openpype.hosts.blender.api.plugin.asset_name( - asset, subset, namespace - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + with bpy.data.libraries.load( + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects + data_to.actions = data_from.actions - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) + container = data_to.objects[0] - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container + assert container, "No asset group found" - objects_list = self._process( - libpath, lib_container, container_name) + target_namespace = container.get(AVALON_PROPERTY).get('namespace') - # Save the list of objects in the metadata container - container_metadata["objects"] = objects_list + action = data_to.actions[0].make_local().copy() - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes + for obj in bpy.data.objects: + if obj.get(AVALON_PROPERTY) and obj.get(AVALON_PROPERTY).get( + 'namespace') == target_namespace: + if obj.children[0]: + if not obj.children[0].animation_data: + obj.children[0].animation_data_create() + obj.children[0].animation_data.action = action + break - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. + bpy.data.objects.remove(container) - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - logger.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in openpype.hosts.blender.api.plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - - collection_libpath = collection_metadata["libpath"] - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - logger.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - logger.info("Library already loaded, not updating...") - return - - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - - self._remove(objects, lib_container) - - objects_list = self._process( - str(libpath), lib_container, collection.name) - - # Save the list of objects in the metadata container - collection_metadata["objects"] = objects_list - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) - - bpy.ops.object.select_all(action='DESELECT') - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: - return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - - self._remove(objects, lib_container) - - bpy.data.collections.remove(collection) - - return True + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py new file mode 100644 index 0000000000..b80dc69adc --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -0,0 +1,273 @@ +"""Load an asset in Blender from an Alembic file.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import bpy + +from avalon import api +from avalon.blender import lib +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin + + +class FbxModelLoader(plugin.AssetLoader): + """Load FBX models. + + Stores the imported asset in an empty named after the asset. + """ + + families = ["model", "rig"] + representations = ["fbx"] + + label = "Load FBX" + icon = "code-fork" + color = "orange" + + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + if material_slot.material: + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) + + def _process(self, libpath, asset_group, group_name, action): + bpy.ops.object.select_all(action='DESELECT') + + collection = bpy.context.view_layer.active_layer_collection.collection + + bpy.ops.import_scene.fbx(filepath=libpath) + + parent = bpy.context.scene.collection + + imported = lib.get_selection() + + empties = [obj for obj in imported if obj.type == 'EMPTY'] + + container = None + + for empty in empties: + if not empty.parent: + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + bpy.data.objects.remove(container) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + collection.objects.unlink(obj) + + for obj in objects: + name = obj.name + obj.name = f"{group_name}:{name}" + if obj.type != 'EMPTY': + name_data = obj.data.name + obj.data.name = f"{group_name}:{name_data}" + + if obj.type == 'MESH': + for material_slot in obj.material_slots: + name_mat = material_slot.material.name + material_slot.material.name = f"{group_name}:{name_mat}" + elif obj.type == 'ARMATURE': + anim_data = obj.animation_data + if action is not None: + anim_data.action = action + elif anim_data.action is not None: + name_action = anim_data.action.name + anim_data.action.name = f"{group_name}:{name_action}" + + if not obj.get(AVALON_PROPERTY): + obj[AVALON_PROPERTY] = dict() + + avalon_info = obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + bpy.ops.object.select_all(action='DESELECT') + + return objects + + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) + + objects = self._process(libpath, asset_group, group_name, None) + + objects = [] + nodes = list(asset_group.children) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = objects + return objects + + def exec_update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + # Get the armature of the rig + objects = asset_group.children + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + action = None + + if armatures: + armature = armatures[0] + + if armature.animation_data and armature.animation_data.action: + action = armature.animation_data.action + + mat = asset_group.matrix_basis.copy() + self._remove(asset_group) + + self._process(str(libpath), asset_group, object_name, action) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_layout.py b/openpype/hosts/blender/plugins/load/load_layout.py deleted file mode 100644 index 2092be9139..0000000000 --- a/openpype/hosts/blender/plugins/load/load_layout.py +++ /dev/null @@ -1,664 +0,0 @@ -"""Load a layout in Blender.""" - -import json -from logging import log, warning -import math - -import logging -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -from avalon import api, blender, pipeline -import bpy -import openpype.hosts.blender.api.plugin as plugin -from openpype.lib import get_creator_by_name - - -class BlendLayoutLoader(plugin.AssetLoader): - """Load layout from a .blend file.""" - - families = ["layout"] - representations = ["blend"] - - label = "Link Layout" - icon = "code-fork" - color = "orange" - - def _remove(self, objects, obj_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - elif obj.type == 'CAMERA': - bpy.data.cameras.remove(obj.data) - elif obj.type == 'CURVE': - bpy.data.curves.remove(obj.data) - - for element_container in obj_container.children: - for child in element_container.children: - bpy.data.collections.remove(child) - bpy.data.collections.remove(element_container) - - bpy.data.collections.remove(obj_container) - - def _process(self, libpath, lib_container, container_name, actions): - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - - scene.collection.children.link(bpy.data.collections[lib_container]) - - layout_container = scene.collection.children[lib_container].make_local() - layout_container.name = container_name - - objects_local_types = ['MESH', 'CAMERA', 'CURVE'] - - objects = [] - armatures = [] - - containers = list(layout_container.children) - - for container in layout_container.children: - if container.name == blender.pipeline.AVALON_CONTAINERS: - containers.remove(container) - - for container in containers: - container.make_local() - objects.extend([ - obj for obj in container.objects - if obj.type in objects_local_types - ]) - armatures.extend([ - obj for obj in container.objects - if obj.type == 'ARMATURE' - ]) - containers.extend(list(container.children)) - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in objects + armatures: - local_obj = obj.make_local() - if obj.data: - obj.data.make_local() - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - action = actions.get(local_obj.name, None) - - if local_obj.type == 'ARMATURE' and action is not None: - local_obj.animation_data.action = action - - layout_container.pop(blender.pipeline.AVALON_PROPERTY) - - bpy.ops.object.select_all(action='DESELECT') - - return layout_container - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) - namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - - obj_container = self._process( - libpath, lib_container, container_name, {}) - - container_metadata["obj_container"] = obj_container - - # Save the list of objects in the metadata container - container_metadata["objects"] = obj_container.all_objects - - # nodes = list(container.objects) - # nodes.append(container) - nodes = [container] - self[:] = nodes - return nodes - - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - collection = bpy.data.collections.get( - container["objectName"] - ) - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - obj_container = collection_metadata["obj_container"] - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - actions = {} - - for obj in objects: - if obj.type == 'ARMATURE': - if obj.animation_data and obj.animation_data.action: - actions[obj.name] = obj.animation_data.action - - self._remove(objects, obj_container) - - obj_container = self._process( - str(libpath), lib_container, collection.name, actions) - - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) - - bpy.ops.object.select_all(action='DESELECT') - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: - return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - objects = collection_metadata["objects"] - obj_container = collection_metadata["obj_container"] - - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) - - return True - - -class UnrealLayoutLoader(plugin.AssetLoader): - """Load layout published from Unreal.""" - - families = ["layout"] - representations = ["json"] - - label = "Link Layout" - icon = "code-fork" - color = "orange" - - animation_creator_name = "CreateAnimation" - - def _remove_objects(self, objects): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - elif obj.type == 'CAMERA': - bpy.data.cameras.remove(obj.data) - elif obj.type == 'CURVE': - bpy.data.curves.remove(obj.data) - else: - self.log.error( - f"Object {obj.name} of type {obj.type} not recognized.") - - def _remove_collections(self, collection): - if collection.children: - for child in collection.children: - self._remove_collections(child) - bpy.data.collections.remove(child) - - def _remove(self, layout_container): - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - - if layout_container.children: - for child in layout_container.children: - child_container = child.get(blender.pipeline.AVALON_PROPERTY) - child_container['objectName'] = child.name - api.remove(child_container) - - for c in bpy.data.collections: - metadata = c.get('avalon') - if metadata: - print("metadata.get('id')") - print(metadata.get('id')) - if metadata and metadata.get('id') == 'pyblish.avalon.instance': - print("metadata.get('dependencies')") - print(metadata.get('dependencies')) - print("layout_container_metadata.get('representation')") - print(layout_container_metadata.get('representation')) - if metadata.get('dependencies') == layout_container_metadata.get('representation'): - - for child in c.children: - bpy.data.collections.remove(child) - bpy.data.collections.remove(c) - break - - def _get_loader(self, loaders, family): - name = "" - if family == 'rig': - name = "BlendRigLoader" - elif family == 'model': - name = "BlendModelLoader" - - if name == "": - return None - - for loader in loaders: - if loader.__name__ == name: - return loader - - return None - - def set_transform(self, obj, transform): - location = transform.get('translation') - rotation = transform.get('rotation') - scale = transform.get('scale') - - # Y position is inverted in sign because Unreal and Blender have the - # Y axis mirrored - obj.location = ( - location.get('x'), - location.get('y'), - location.get('z') - ) - obj.rotation_euler = ( - rotation.get('x'), - rotation.get('y'), - rotation.get('z') - ) - obj.scale = ( - scale.get('x'), - scale.get('y'), - scale.get('z') - ) - - def _process( - self, libpath, layout_container, container_name, representation, - actions, parent_collection - ): - with open(libpath, "r") as fp: - data = json.load(fp) - - scene = bpy.context.scene - layout_collection = bpy.data.collections.new(container_name) - scene.collection.children.link(layout_collection) - - parent = parent_collection - - if parent is None: - parent = scene.collection - - all_loaders = api.discover(api.Loader) - - avalon_container = bpy.data.collections.get( - blender.pipeline.AVALON_CONTAINERS) - - for element in data: - reference = element.get('reference') - family = element.get('family') - - loaders = api.loaders_from_representation(all_loaders, reference) - loader = self._get_loader(loaders, family) - - if not loader: - continue - - instance_name = element.get('instance_name') - - element_container = api.load( - loader, - reference, - namespace=instance_name - ) - - if not element_container: - continue - - avalon_container.children.unlink(element_container) - layout_container.children.link(element_container) - - element_metadata = element_container.get( - blender.pipeline.AVALON_PROPERTY) - - # Unlink the object's collection from the scene collection and - # link it in the layout collection - element_collection = element_metadata.get('obj_container') - scene.collection.children.unlink(element_collection) - layout_collection.children.link(element_collection) - - objects = element_metadata.get('objects') - element_metadata['instance_name'] = instance_name - - objects_to_transform = [] - - creator_plugin = get_creator_by_name(self.animation_creator_name) - if not creator_plugin: - raise ValueError("Creator plugin \"{}\" was not found.".format( - self.animation_creator_name - )) - - if family == 'rig': - for o in objects: - if o.type == 'ARMATURE': - objects_to_transform.append(o) - # Create an animation subset for each rig - o.select_set(True) - asset = api.Session["AVALON_ASSET"] - c = api.create( - creator_plugin, - name="animation_" + element_collection.name, - asset=asset, - options={"useSelection": True}, - data={"dependencies": representation}) - scene.collection.children.unlink(c) - parent.children.link(c) - o.select_set(False) - break - elif family == 'model': - objects_to_transform = objects - - for o in objects_to_transform: - self.set_transform(o, element.get('transform')) - - if actions: - if o.type == 'ARMATURE': - action = actions.get(instance_name, None) - - if action: - if o.animation_data is None: - o.animation_data_create() - o.animation_data.action = action - - return layout_collection - - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None): - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) - namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - - layout_container = bpy.data.collections.new(container_name) - blender.pipeline.containerise_existing( - layout_container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - - layout_collection = self._process( - libpath, layout_container, container_name, - str(context["representation"]["_id"]), None, None) - - container_metadata["obj_container"] = layout_collection - - # Save the list of objects in the metadata container - container_metadata["objects"] = layout_collection.all_objects - - nodes = [layout_container] - self[:] = nodes - return nodes - - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - """ - layout_container = bpy.data.collections.get( - container["objectName"] - ) - if not layout_container: - return False - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert layout_container, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = layout_container_metadata["libpath"] - lib_container = layout_container_metadata["lib_container"] - obj_container = plugin.get_local_collection_with_name( - layout_container_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - actions = {} - - for obj in objects: - if obj.type == 'ARMATURE': - if obj.animation_data and obj.animation_data.action: - obj_cont_name = obj.get( - blender.pipeline.AVALON_PROPERTY).get('container_name') - obj_cont = plugin.get_local_collection_with_name( - obj_cont_name) - element_metadata = obj_cont.get( - blender.pipeline.AVALON_PROPERTY) - instance_name = element_metadata.get('instance_name') - actions[instance_name] = obj.animation_data.action - - self._remove(layout_container) - - bpy.data.collections.remove(obj_container) - - creator_plugin = get_creator_by_name(self.setdress_creator_name) - if not creator_plugin: - raise ValueError("Creator plugin \"{}\" was not found.".format( - self.setdress_creator_name - )) - - parent = api.create( - creator_plugin, - name="animation", - asset=api.Session["AVALON_ASSET"], - options={"useSelection": True}, - data={"dependencies": str(representation["_id"])}) - - layout_collection = self._process( - libpath, layout_container, container_name, - str(representation["_id"]), actions, parent) - - layout_container_metadata["obj_container"] = layout_collection - layout_container_metadata["objects"] = layout_collection.all_objects - layout_container_metadata["libpath"] = str(libpath) - layout_container_metadata["representation"] = str( - representation["_id"]) - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - """ - layout_container = bpy.data.collections.get( - container["objectName"] - ) - if not layout_container: - return False - - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - obj_container = plugin.get_local_collection_with_name( - layout_container_metadata["obj_container"].name - ) - - self._remove(layout_container) - - bpy.data.collections.remove(obj_container) - bpy.data.collections.remove(layout_container) - - return True diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py new file mode 100644 index 0000000000..85cb4dfbd3 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -0,0 +1,337 @@ +"""Load a layout in Blender.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import bpy + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin + + +class BlendLayoutLoader(plugin.AssetLoader): + """Load layout from a .blend file.""" + + families = ["layout"] + representations = ["blend"] + + label = "Link Layout" + icon = "code-fork" + color = "orange" + + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + if material_slot.material: + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) + + def _remove_asset_and_library(self, asset_group): + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) + + def _process(self, libpath, asset_group, group_name, actions): + with bpy.data.libraries.load( + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects + + parent = bpy.context.scene.collection + + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] + + container = None + + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + constraints = [] + + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + + for armature in armatures: + for bone in armature.pose.bones: + for constraint in bone.constraints: + if hasattr(constraint, 'target'): + constraints.append(constraint) + + for obj in objects: + parent.objects.link(obj) + + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) + + action = None + + if actions: + action = actions.get(local_obj.name, None) + + if local_obj.type == 'MESH': + plugin.prepare_data(local_obj.data, group_name) + + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj + + for material_slot in local_obj.material_slots: + if material_slot.material: + plugin.prepare_data(material_slot.material, group_name) + elif local_obj.type == 'ARMATURE': + plugin.prepare_data(local_obj.data, group_name) + + if action is not None: + local_obj.animation_data.action = action + elif local_obj.animation_data.action is not None: + plugin.prepare_data( + local_obj.animation_data.action, group_name) + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj + + if not local_obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() + + bpy.data.orphans_purge(do_local_ids=False) + + bpy.ops.object.select_all(action='DESELECT') + + return objects + + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) + + objects = self._process(libpath, asset_group, group_name, None) + + for child in asset_group.children: + if child.get(AVALON_PROPERTY): + avalon_container.objects.link(child) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = objects + return objects + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in asset_group.children: + obj_meta = obj.get(AVALON_PROPERTY) + if obj_meta.get('family') == 'rig': + rig = None + for child in obj.children: + if child.type == 'ARMATURE': + rig = child + break + if not rig: + raise Exception("No armature in the rig asset group.") + if rig.animation_data and rig.animation_data.action: + instance_name = obj_meta.get('instance_name') + actions[instance_name] = rig.animation_data.action + + mat = asset_group.matrix_basis.copy() + + # Remove the children of the asset_group first + for child in list(asset_group.children): + self._remove_asset_and_library(child) + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 + + self._remove(asset_group) + + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) + + self._process(str(libpath), asset_group, object_name, actions) + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + for child in asset_group.children: + if child.get(AVALON_PROPERTY): + avalon_container.objects.link(child) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + # Remove the children of the asset_group first + for child in list(asset_group.children): + self._remove_asset_and_library(child) + + self._remove_asset_and_library(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py new file mode 100644 index 0000000000..1a4dbbb5cb --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -0,0 +1,259 @@ +"""Load a layout in Blender.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, Optional + +import bpy +import json + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin + + +class JsonLayoutLoader(plugin.AssetLoader): + """Load layout published from Unreal.""" + + families = ["layout"] + representations = ["json"] + + label = "Load Layout" + icon = "code-fork" + color = "orange" + + animation_creator_name = "CreateAnimation" + + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + api.remove(obj.get(AVALON_PROPERTY)) + + def _remove_animation_instances(self, asset_group): + instances = bpy.data.collections.get(AVALON_INSTANCES) + if instances: + for obj in list(asset_group.children): + anim_collection = instances.children.get( + obj.name + "_animation") + if anim_collection: + bpy.data.collections.remove(anim_collection) + + def _get_loader(self, loaders, family): + name = "" + if family == 'rig': + name = "BlendRigLoader" + elif family == 'model': + name = "BlendModelLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def _process(self, libpath, asset, asset_group, actions): + bpy.ops.object.select_all(action='DESELECT') + + with open(libpath, "r") as fp: + data = json.load(fp) + + all_loaders = api.discover(api.Loader) + + for element in data: + reference = element.get('reference') + family = element.get('family') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = self._get_loader(loaders, family) + + if not loader: + continue + + instance_name = element.get('instance_name') + + action = None + + if actions: + action = actions.get(instance_name, None) + + options = { + 'parent': asset_group, + 'transform': element.get('transform'), + 'action': action, + 'create_animation': True if family == 'rig' else False, + 'animation_asset': asset + } + + # This should return the loaded asset, but the load call will be + # added to the queue to run in the Blender main thread, so + # at this time it will not return anything. The assets will be + # loaded in the next Blender cycle, so we use the options to + # set the transform, parent and assign the action, if there is one. + api.load( + loader, + reference, + namespace=instance_name, + options=options + ) + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) + + self._process(libpath, asset, asset_group, None) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = asset_group.children + return asset_group.children + + def exec_update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in asset_group.children: + obj_meta = obj.get(AVALON_PROPERTY) + if obj_meta.get('family') == 'rig': + rig = None + for child in obj.children: + if child.type == 'ARMATURE': + rig = child + break + if not rig: + raise Exception("No armature in the rig asset group.") + if rig.animation_data and rig.animation_data.action: + namespace = obj_meta.get('namespace') + actions[namespace] = rig.animation_data.action + + mat = asset_group.matrix_basis.copy() + + self._remove_animation_instances(asset_group) + + self._remove(asset_group) + + self._process(str(libpath), asset_group, actions) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + self._remove_animation_instances(asset_group) + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 35a241b98e..af5591c299 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -1,13 +1,16 @@ """Load a model asset in Blender.""" -import logging from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin class BlendModelLoader(plugin.AssetLoader): @@ -24,52 +27,75 @@ class BlendModelLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, container): - for obj in list(objects): - for material_slot in list(obj.material_slots): - bpy.data.materials.remove(material_slot.material) - bpy.data.meshes.remove(obj.data) + def _remove(self, asset_group): + objects = list(asset_group.children) - bpy.data.collections.remove(container) + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) - def _process( - self, libpath, lib_container, container_name, - parent_collection - ): - relative = bpy.context.preferences.filepaths.use_relative_paths + def _process(self, libpath, asset_group, group_name): with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] - parent.children.link(bpy.data.collections[lib_container]) + container = None - model_container = parent.children[lib_container].make_local() - model_container.name = container_name + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break - for obj in model_container.objects: - local_obj = plugin.prepare_data(obj, container_name) - plugin.prepare_data(local_obj.data, container_name) + assert container, "No asset group found" - for material_slot in local_obj.material_slots: - plugin.prepare_data(material_slot.material, container_name) + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) - if not obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + for obj in nodes: + obj.parent = asset_group - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) - model_container.pop(blender.pipeline.AVALON_PROPERTY) + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) + if local_obj.type != 'EMPTY': + plugin.prepare_data(local_obj.data, group_name) + + for material_slot in local_obj.material_slots: + plugin.prepare_data(material_slot.material, group_name) + + if not local_obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() + + bpy.data.orphans_purge(do_local_ids=False) bpy.ops.object.select_all(action='DESELECT') - return model_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -82,54 +108,80 @@ class BlendModelLoader(plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) - metadata["libpath"] = libpath - metadata["lib_container"] = lib_container + bpy.ops.object.select_all(action='DESELECT') - obj_container = self._process( - libpath, lib_container, container_name, None) + if options is not None: + parent = options.get('parent') + transform = options.get('transform') - metadata["obj_container"] = obj_container + if parent and transform: + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') - # Save the list of objects in the metadata container - metadata["objects"] = obj_container.all_objects + asset_group.location = ( + location.get('x'), + location.get('y'), + location.get('z') + ) + asset_group.rotation_euler = ( + rotation.get('x'), + rotation.get('y'), + rotation.get('z') + ) + asset_group.scale = ( + scale.get('x'), + scale.get('y'), + scale.get('z') + ) - metadata["parent"] = str(context["representation"]["parent"]) - metadata["family"] = context["representation"]["context"]["family"] + bpy.context.view_layer.objects.active = parent + asset_group.select_set(True) - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes + bpy.ops.object.parent_set(keep_transform=True) - def update(self, container: Dict, representation: Dict): + bpy.ops.object.select_all(action='DESELECT') + + objects = self._process(libpath, asset_group, group_name) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = objects + return objects + + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -137,13 +189,9 @@ class BlendModelLoader(plugin.AssetLoader): If the objects of the collection are used in another collection they will not be removed, only unlinked. Normally this should not be the case though. - - Warning: - No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -153,12 +201,9 @@ class BlendModelLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -169,47 +214,47 @@ class BlendModelLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - lib_container = collection_metadata["lib_container"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return - parent = plugin.get_parent_collection(obj_container) + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 - self._remove(objects, obj_container) + mat = asset_group.matrix_basis.copy() - obj_container = self._process( - str(libpath), lib_container, container_name, parent) + self._remove(asset_group) - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) - def remove(self, container: Dict) -> bool: + self._process(str(libpath), asset_group, object_name) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. Arguments: @@ -218,29 +263,27 @@ class BlendModelLoader(plugin.AssetLoader): Returns: bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects + bpy.data.objects.remove(asset_group) - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) return True diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index b6be8f4cf6..5573c081e1 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -1,21 +1,21 @@ """Load a rig asset in Blender.""" -import logging from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype import lib +from openpype.hosts.blender.api import plugin class BlendRigLoader(plugin.AssetLoader): - """Load rigs from a .blend file. - - Because they come from a .blend file we can simply link the collection that - contains the model. There is no further need to 'containerise' it. - """ + """Load rigs from a .blend file.""" families = ["rig"] representations = ["blend"] @@ -24,105 +24,113 @@ class BlendRigLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, obj_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + if material_slot.material: + bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) elif obj.type == 'CURVE': bpy.data.curves.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) - for child in obj_container.children: - bpy.data.collections.remove(child) - - bpy.data.collections.remove(obj_container) - - def make_local_and_metadata(self, obj, collection_name): - local_obj = plugin.prepare_data(obj, collection_name) - plugin.prepare_data(local_obj.data, collection_name) - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": collection_name + '_CON'}) - - return local_obj - - def _process( - self, libpath, lib_container, collection_name, - action, parent_collection - ): - relative = bpy.context.preferences.filepaths.use_relative_paths + def _process(self, libpath, asset_group, group_name, action): with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] - parent.children.link(bpy.data.collections[lib_container]) + container = None - rig_container = parent.children[lib_container].make_local() - rig_container.name = collection_name + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break objects = [] - armatures = [ - obj for obj in rig_container.objects - if obj.type == 'ARMATURE' - ] + nodes = list(container.children) - for child in rig_container.children: - local_child = plugin.prepare_data(child, collection_name) - objects.extend(local_child.objects) + for obj in nodes: + obj.parent = asset_group - # for obj in bpy.data.objects: - # obj.select_set(False) + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() constraints = [] + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + for armature in armatures: for bone in armature.pose.bones: for constraint in bone.constraints: if hasattr(constraint, 'target'): constraints.append(constraint) - # Link armatures after other objects. - # The armature is unparented for all the non-local meshes, - # when it is made local. for obj in objects: - local_obj = self.make_local_and_metadata(obj, collection_name) + parent.objects.link(obj) - if obj != local_obj: - for constraint in constraints: - if constraint.target == obj: - constraint.target = local_obj + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) - for armature in armatures: - local_obj = self.make_local_and_metadata(armature, collection_name) + if local_obj.type == 'MESH': + plugin.prepare_data(local_obj.data, group_name) - if action is not None: - local_obj.animation_data.action = action - elif local_obj.animation_data.action is not None: - plugin.prepare_data( - local_obj.animation_data.action, collection_name) + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj - # Set link the drivers to the local object - if local_obj.data.animation_data: - for d in local_obj.data.animation_data.drivers: - for v in d.driver.variables: - for t in v.targets: - t.id = local_obj + for material_slot in local_obj.material_slots: + if material_slot.material: + plugin.prepare_data(material_slot.material, group_name) + elif local_obj.type == 'ARMATURE': + plugin.prepare_data(local_obj.data, group_name) - rig_container.pop(blender.pipeline.AVALON_PROPERTY) + if action is not None: + local_obj.animation_data.action = action + elif local_obj.animation_data.action is not None: + plugin.prepare_data( + local_obj.animation_data.action, group_name) + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj + + if not local_obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() + + bpy.data.orphans_purge(do_local_ids=False) bpy.ops.object.select_all(action='DESELECT') - return rig_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -138,61 +146,111 @@ class BlendRigLoader(plugin.AssetLoader): libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - collection_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(collection_name) - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) - metadata["libpath"] = libpath - metadata["lib_container"] = lib_container + action = None - obj_container = self._process( - libpath, lib_container, collection_name, None, None) + bpy.ops.object.select_all(action='DESELECT') - metadata["obj_container"] = obj_container - # Save the list of objects in the metadata container - metadata["objects"] = obj_container.all_objects + create_animation = False - metadata["parent"] = str(context["representation"]["parent"]) - metadata["family"] = context["representation"]["context"]["family"] + if options is not None: + parent = options.get('parent') + transform = options.get('transform') + action = options.get('action') + create_animation = options.get('create_animation') - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes + if parent and transform: + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') - def update(self, container: Dict, representation: Dict): + asset_group.location = ( + location.get('x'), + location.get('y'), + location.get('z') + ) + asset_group.rotation_euler = ( + rotation.get('x'), + rotation.get('y'), + rotation.get('z') + ) + asset_group.scale = ( + scale.get('x'), + scale.get('y'), + scale.get('z') + ) + + bpy.context.view_layer.objects.active = parent + asset_group.select_set(True) + + bpy.ops.object.parent_set(keep_transform=True) + + bpy.ops.object.select_all(action='DESELECT') + + objects = self._process(libpath, asset_group, group_name, action) + + if create_animation: + creator_plugin = lib.get_creator_by_name("CreateAnimation") + if not creator_plugin: + raise ValueError("Creator plugin \"CreateAnimation\" was " + "not found.") + + asset_group.select_set(True) + + animation_asset = options.get('animation_asset') + + api.create( + creator_plugin, + name=namespace + "_animation", + # name=f"{unique_number}_{subset}_animation", + asset=animation_asset, + options={"useSelection": False, "asset_group": asset_group}, + data={"dependencies": str(context["representation"]["_id"])} + ) + + bpy.ops.object.select_all(action='DESELECT') + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = objects + return objects + + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! + This will remove all children of the asset group, load the new ones + and add them as children of the group. """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -202,12 +260,9 @@ class BlendRigLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -218,89 +273,84 @@ class BlendRigLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - lib_container = collection_metadata["lib_container"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 + # Get the armature of the rig - armatures = [obj for obj in objects if obj.type == 'ARMATURE'] - assert(len(armatures) == 1) + objects = asset_group.children + armature = [obj for obj in objects if obj.type == 'ARMATURE'][0] action = None - if armatures[0].animation_data and armatures[0].animation_data.action: - action = armatures[0].animation_data.action + if armature.animation_data and armature.animation_data.action: + action = armature.animation_data.action - parent = plugin.get_parent_collection(obj_container) + mat = asset_group.matrix_basis.copy() - self._remove(objects, obj_container) + self._remove(asset_group) - obj_container = self._process( - str(libpath), lib_container, container_name, action, parent) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + self._process(str(libpath), asset_group, object_name, action) - bpy.ops.object.select_all(action='DESELECT') + asset_group.matrix_basis = mat - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing asset group from a Blender scene. Arguments: container (openpype:container-1.0): Container to remove, from `host.ls()`. Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! + bool: Whether the asset group was deleted. """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects + bpy.data.objects.remove(asset_group) - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) return True diff --git a/openpype/hosts/blender/plugins/publish/collect_instances.py b/openpype/hosts/blender/plugins/publish/collect_instances.py index 1d3693216d..0d683dace4 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instances.py +++ b/openpype/hosts/blender/plugins/publish/collect_instances.py @@ -5,6 +5,7 @@ import json import pyblish.api from avalon.blender.pipeline import AVALON_PROPERTY +from avalon.blender.pipeline import AVALON_INSTANCES class CollectInstances(pyblish.api.ContextPlugin): @@ -14,6 +15,20 @@ class CollectInstances(pyblish.api.ContextPlugin): label = "Collect Instances" order = pyblish.api.CollectorOrder + @staticmethod + def get_asset_groups() -> Generator: + """Return all 'model' collections. + + Check if the family is 'model' and if it doesn't have the + representation set. If the representation is set, it is a loaded model + and we don't want to publish it. + """ + instances = bpy.data.collections.get(AVALON_INSTANCES) + for obj in instances.objects: + avalon_prop = obj.get(AVALON_PROPERTY) or dict() + if avalon_prop.get('id') == 'pyblish.avalon.instance': + yield obj + @staticmethod def get_collections() -> Generator: """Return all 'model' collections. @@ -29,8 +44,35 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): """Collect the models from the current Blender scene.""" + asset_groups = self.get_asset_groups() collections = self.get_collections() + for group in asset_groups: + avalon_prop = group[AVALON_PROPERTY] + asset = avalon_prop['asset'] + family = avalon_prop['family'] + subset = avalon_prop['subset'] + task = avalon_prop['task'] + name = f"{asset}_{subset}" + instance = context.create_instance( + name=name, + family=family, + families=[family], + subset=subset, + asset=asset, + task=task, + ) + objects = list(group.children) + members = set() + for obj in objects: + objects.extend(list(obj.children)) + members.add(obj) + members.add(group) + instance[:] = list(members) + self.log.debug(json.dumps(instance.data, indent=4)) + for obj in instance: + self.log.debug(obj) + for collection in collections: avalon_prop = collection[AVALON_PROPERTY] asset = avalon_prop['asset'] @@ -47,6 +89,12 @@ class CollectInstances(pyblish.api.ContextPlugin): task=task, ) members = list(collection.objects) + if family == "animation": + for obj in collection.objects: + if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY): + for child in obj.children: + if child.type == 'ARMATURE': + members.append(child) members.append(collection) instance[:] = members self.log.debug(json.dumps(instance.data, indent=4)) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index a6315908fc..4696da3db4 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -1,12 +1,13 @@ import os -import openpype.api -import openpype.hosts.blender.api.plugin +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY import bpy -class ExtractABC(openpype.api.Extractor): +class ExtractABC(api.Extractor): """Extract as ABC.""" label = "Extract ABC" @@ -16,7 +17,6 @@ class ExtractABC(openpype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) filename = f"{instance.name}.abc" filepath = os.path.join(stagingdir, filename) @@ -28,57 +28,29 @@ class ExtractABC(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] - - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" - - old_active_layer_collection = view_layer.active_layer_collection - - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - - old_scale = scene.unit_settings.scale_length - bpy.ops.object.select_all(action='DESELECT') - selected = list() + selected = [] + asset_group = None for obj in instance: - try: - obj.select_set(True) - selected.append(obj) - except: - continue + obj.select_set(True) + selected.append(obj) + if obj.get(AVALON_PROPERTY): + asset_group = obj - new_context = openpype.hosts.blender.api.plugin.create_blender_context( - active=selected[0], selected=selected) - - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 + context = plugin.create_blender_context( + active=asset_group, selected=selected) # We export the abc bpy.ops.wm.alembic_export( - new_context, + context, filepath=filepath, - selected=True + selected=True, + flatten=False ) - view_layer.active_layer_collection = old_active_layer_collection - - scene.unit_settings.scale_length = old_scale + bpy.ops.object.select_all(action='DESELECT') if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 890c8b5ffd..6687c9fe76 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -1,6 +1,8 @@ import os -import avalon.blender.workio +import bpy + +# import avalon.blender.workio import openpype.api @@ -9,7 +11,7 @@ class ExtractBlend(openpype.api.Extractor): label = "Extract Blend" hosts = ["blender"] - families = ["model", "camera", "rig", "action", "layout", "animation"] + families = ["model", "camera", "rig", "action", "layout"] optional = True def process(self, instance): @@ -22,15 +24,12 @@ class ExtractBlend(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction..") - # Just save the file to a temporary location. At least for now it's no - # problem to have (possibly) extra stuff in the file. - avalon.blender.workio.save_file(filepath, copy=True) - # - # # Store reference for integration - # if "files" not in instance.data: - # instance.data["files"] = list() - # - # # instance.data["files"].append(filename) + data_blocks = set() + + for obj in instance: + data_blocks.add(obj) + + bpy.data.libraries.write(filepath, data_blocks) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py new file mode 100644 index 0000000000..239ca53f98 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py @@ -0,0 +1,53 @@ +import os + +import bpy + +import openpype.api + + +class ExtractBlendAnimation(openpype.api.Extractor): + """Extract a blend file.""" + + label = "Extract Blend" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.blend" + filepath = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.info("Performing extraction..") + + data_blocks = set() + + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == 'EMPTY': + child = obj.children[0] + if child and child.type == 'ARMATURE': + if not obj.animation_data: + obj.animation_data_create() + obj.animation_data.action = child.animation_data.action + obj.animation_data_clear() + data_blocks.add(child.animation_data.action) + data_blocks.add(obj) + + bpy.data.libraries.write(filepath, data_blocks) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'blend', + 'ext': 'blend', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index 05149eacc1..b91f2a75ef 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -1,11 +1,13 @@ import os -import openpype.api +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY import bpy -class ExtractFBX(openpype.api.Extractor): +class ExtractFBX(api.Extractor): """Extract as FBX.""" label = "Extract FBX" @@ -15,71 +17,56 @@ class ExtractFBX(openpype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) filename = f"{instance.name}.fbx" filepath = os.path.join(stagingdir, filename) - context = bpy.context - scene = context.scene - view_layer = context.view_layer - # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] + bpy.ops.object.select_all(action='DESELECT') - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" + selected = [] + asset_group = None - old_active_layer_collection = view_layer.active_layer_collection + for obj in instance: + obj.select_set(True) + selected.append(obj) + if obj.get(AVALON_PROPERTY): + asset_group = obj - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - - old_scale = scene.unit_settings.scale_length - - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 + context = plugin.create_blender_context( + active=asset_group, selected=selected) new_materials = [] + new_materials_objs = [] + objects = list(asset_group.children) - for obj in collections[0].all_objects: - if obj.type == 'MESH': + for obj in objects: + objects.extend(obj.children) + if obj.type == 'MESH' and len(obj.data.materials) == 0: mat = bpy.data.materials.new(obj.name) obj.data.materials.append(mat) new_materials.append(mat) + new_materials_objs.append(obj) # We export the fbx bpy.ops.export_scene.fbx( + context, filepath=filepath, - use_active_collection=True, + use_active_collection=False, + use_selection=True, mesh_smooth_type='FACE', add_leaf_bones=False ) - view_layer.active_layer_collection = old_active_layer_collection - - scene.unit_settings.scale_length = old_scale + bpy.ops.object.select_all(action='DESELECT') for mat in new_materials: bpy.data.materials.remove(mat) - for obj in collections[0].all_objects: - if obj.type == 'MESH': - obj.data.materials.pop() + for obj in new_materials_objs: + obj.data.materials.pop() if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 8312114c7b..16443b760c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -1,14 +1,16 @@ import os import json -import openpype.api - import bpy import bpy_extras import bpy_extras.anim_utils +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY -class ExtractAnimationFBX(openpype.api.Extractor): + +class ExtractAnimationFBX(api.Extractor): """Extract as animation.""" label = "Extract FBX" @@ -20,33 +22,26 @@ class ExtractAnimationFBX(openpype.api.Extractor): # Define extract output file path stagingdir = self.staging_dir(instance) - context = bpy.context - scene = context.scene - # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] + # The first collection object in the instance is taken, as there + # should be only one that contains the asset group. + collection = [ + obj for obj in instance if type(obj) is bpy.types.Collection][0] - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" + # Again, the first object in the collection is taken , as there + # should be only the asset group in the collection. + asset_group = collection.objects[0] - old_scale = scene.unit_settings.scale_length + armature = [ + obj for obj in asset_group.children if obj.type == 'ARMATURE'][0] - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 - - armatures = [ - obj for obj in collections[0].objects if obj.type == 'ARMATURE'] - - assert len(collections) == 1, "There should be one and only one " \ - "armature collected for this asset" - - armature = armatures[0] + asset_group_name = asset_group.name + asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name") armature_name = armature.name - original_name = armature_name.split(':')[0] + original_name = armature_name.split(':')[1] armature.name = original_name object_action_pairs = [] @@ -89,27 +84,29 @@ class ExtractAnimationFBX(openpype.api.Extractor): for obj in bpy.data.objects: obj.select_set(False) + asset_group.select_set(True) armature.select_set(True) fbx_filename = f"{instance.name}_{armature.name}.fbx" filepath = os.path.join(stagingdir, fbx_filename) - override = bpy.context.copy() - override['selected_objects'] = [armature] + override = plugin.create_blender_context( + active=asset_group, selected=[asset_group, armature]) bpy.ops.export_scene.fbx( override, filepath=filepath, + use_active_collection=False, use_selection=True, bake_anim_use_nla_strips=False, bake_anim_use_all_actions=False, add_leaf_bones=False, armature_nodetype='ROOT', - object_types={'ARMATURE'} + object_types={'EMPTY', 'ARMATURE'} ) armature.name = armature_name + asset_group.name = asset_group_name + asset_group.select_set(False) armature.select_set(False) - scene.unit_settings.scale_length = old_scale - # We delete the baked action and set the original one back for i in range(0, len(object_action_pairs)): pair = object_action_pairs[i] @@ -125,18 +122,20 @@ class ExtractAnimationFBX(openpype.api.Extractor): json_filename = f"{instance.name}.json" json_path = os.path.join(stagingdir, json_filename) - json_dict = {} + json_dict = { + "instance_name": asset_group.get(AVALON_PROPERTY).get("namespace") + } - collection = instance.data.get("name") - container = None - for obj in bpy.data.collections[collection].objects: - if obj.type == "ARMATURE": - container_name = obj.get("avalon").get("container_name") - container = bpy.data.collections[container_name] - if container: - json_dict = { - "instance_name": container.get("avalon").get("instance_name") - } + # collection = instance.data.get("name") + # container = None + # for obj in bpy.data.collections[collection].objects: + # if obj.type == "ARMATURE": + # container_name = obj.get("avalon").get("container_name") + # container = bpy.data.collections[container_name] + # if container: + # json_dict = { + # "instance_name": container.get("avalon").get("instance_name") + # } with open(json_path, "w+") as file: json.dump(json_dict, fp=file, indent=2) @@ -159,6 +158,5 @@ class ExtractAnimationFBX(openpype.api.Extractor): instance.data["representations"].append(fbx_representation) instance.data["representations"].append(json_representation) - self.log.info("Extracted instance '{}' to: {}".format( instance.name, fbx_representation)) diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index c6c9bf67f5..cd081b4479 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -3,7 +3,8 @@ import json import bpy -from avalon import blender, io +from avalon import io +from avalon.blender.pipeline import AVALON_PROPERTY import openpype.api @@ -24,52 +25,49 @@ class ExtractLayout(openpype.api.Extractor): json_data = [] - for collection in instance: - for asset in collection.children: - collection = bpy.data.collections[asset.name] - container = bpy.data.collections[asset.name + '_CON'] - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects[str(instance)] - parent = metadata["parent"] - family = metadata["family"] + for asset in asset_group.children: + metadata = asset.get(AVALON_PROPERTY) - self.log.debug("Parent: {}".format(parent)) - blend = io.find_one( - { - "type": "representation", - "parent": io.ObjectId(parent), - "name": "blend" - }, - projection={"_id": True}) - blend_id = blend["_id"] + parent = metadata["parent"] + family = metadata["family"] - json_element = {} - json_element["reference"] = str(blend_id) - json_element["family"] = family - json_element["instance_name"] = asset.name - json_element["asset_name"] = metadata["lib_container"] - json_element["file_path"] = metadata["libpath"] + self.log.debug("Parent: {}".format(parent)) + blend = io.find_one( + { + "type": "representation", + "parent": io.ObjectId(parent), + "name": "blend" + }, + projection={"_id": True}) + blend_id = blend["_id"] - obj = collection.objects[0] + json_element = {} + json_element["reference"] = str(blend_id) + json_element["family"] = family + json_element["instance_name"] = asset.name + json_element["asset_name"] = metadata["asset_name"] + json_element["file_path"] = metadata["libpath"] - json_element["transform"] = { - "translation": { - "x": obj.location.x, - "y": obj.location.y, - "z": obj.location.z - }, - "rotation": { - "x": obj.rotation_euler.x, - "y": obj.rotation_euler.y, - "z": obj.rotation_euler.z, - }, - "scale": { - "x": obj.scale.x, - "y": obj.scale.y, - "z": obj.scale.z - } + json_element["transform"] = { + "translation": { + "x": asset.location.x, + "y": asset.location.y, + "z": asset.location.z + }, + "rotation": { + "x": asset.rotation_euler.x, + "y": asset.rotation_euler.y, + "z": asset.rotation_euler.z, + }, + "scale": { + "x": asset.scale.x, + "y": asset.scale.y, + "z": asset.scale.z } - json_data.append(json_element) + } + json_data.append(json_element) json_filename = "{}.json".format(instance.name) json_path = os.path.join(stagingdir, json_filename) diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py new file mode 100644 index 0000000000..261ff864d5 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -0,0 +1,39 @@ +from typing import List + +import pyblish.api +import openpype.hosts.blender.api.action + + +class ValidateNoColonsInName(pyblish.api.InstancePlugin): + """There cannot be colons in names + + Object or bone names cannot include colons. Other software do not + handle colons correctly. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["blender"] + families = ["model", "rig"] + version = (0, 1, 0) + label = "No Colons in names" + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + for obj in [obj for obj in instance]: + if ':' in obj.name: + invalid.append(obj) + if obj.type == 'ARMATURE': + for bone in obj.data.bones: + if ':' in bone.name: + invalid.append(obj) + break + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + f"Objects found with colon in name: {invalid}") diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py new file mode 100644 index 0000000000..7456dbc423 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -0,0 +1,40 @@ +from typing import List + +import mathutils + +import pyblish.api +import openpype.hosts.blender.api.action + + +class ValidateTransformZero(pyblish.api.InstancePlugin): + """Transforms can't have any values + + To solve this issue, try freezing the transforms. So long + as the transforms, rotation and scale values are zero, + you're all good. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["blender"] + families = ["model"] + category = "geometry" + version = (0, 1, 0) + label = "Transform Zero" + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + + _identity = mathutils.Matrix() + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + for obj in [obj for obj in instance]: + if obj.matrix_basis != cls._identity: + invalid.append(obj) + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + f"Object found in instance is not in Object Mode: {invalid}") diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index 21f4ae41c3..7328236b97 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,17 +1,21 @@ import os +import sys import logging +import contextlib import hou from pyblish import api as pyblish - from avalon import api as avalon -from avalon.houdini import pipeline as houdini import openpype.hosts.houdini from openpype.hosts.houdini.api import lib -from openpype.lib import any_outdated +from openpype.lib import ( + any_outdated +) + +from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") @@ -22,6 +26,7 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + def install(): pyblish.register_plugin_path(PUBLISH_PATH) @@ -29,19 +34,28 @@ def install(): avalon.register_plugin_path(avalon.Creator, CREATE_PATH) log.info("Installing callbacks ... ") - avalon.on("init", on_init) + # avalon.on("init", on_init) avalon.before("save", before_save) avalon.on("save", on_save) avalon.on("open", on_open) + avalon.on("new", on_new) pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) log.info("Setting default family states for loader..") - avalon.data["familiesStateToggled"] = ["imagesequence"] + avalon.data["familiesStateToggled"] = [ + "imagesequence", + "review" + ] + # add houdini vendor packages + hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor") -def on_init(*args): - houdini.on_houdini_initialize() + sys.path.append(hou_pythonpath) + + # Set asset FPS for the empty scene directly after launch of Houdini + # so it initializes into the correct scene FPS + _set_asset_fps() def before_save(*args): @@ -59,10 +73,18 @@ def on_save(*args): def on_open(*args): + if not hou.isUIAvailable(): + log.debug("Batch mode detected, ignoring `on_open` callbacks..") + return + avalon.logger.info("Running callback on open..") + # Validate FPS after update_task_from_path to + # ensure it is using correct FPS for the asset + lib.validate_fps() + if any_outdated(): - from ..widgets import popup + from openpype.widgets import popup log.warning("Scene has outdated content.") @@ -70,7 +92,7 @@ def on_open(*args): parent = hou.ui.mainQtWindow() if parent is None: log.info("Skipping outdated content pop-up " - "because Maya window can't be found.") + "because Houdini window can't be found.") else: # Show outdated pop-up @@ -79,15 +101,52 @@ def on_open(*args): tool.show(parent=parent) dialog = popup.Popup(parent=parent) - dialog.setWindowTitle("Maya scene has outdated content") + dialog.setWindowTitle("Houdini scene has outdated content") dialog.setMessage("There are outdated containers in " - "your Maya scene.") - dialog.on_show.connect(_on_show_inventory) + "your Houdini scene.") + dialog.on_clicked.connect(_on_show_inventory) dialog.show() +def on_new(_): + """Set project resolution and fps when create a new file""" + avalon.logger.info("Running callback on new..") + _set_asset_fps() + + +def _set_asset_fps(): + """Set Houdini scene FPS to the default required for current asset""" + + # Set new scene fps + fps = get_asset_fps() + print("Setting scene FPS to %i" % fps) + lib.set_scene_fps(fps) + + def on_pyblish_instance_toggled(instance, new_value, old_value): """Toggle saver tool passthrough states on instance toggles.""" + @contextlib.contextmanager + def main_take(no_update=True): + """Enter root take during context""" + original_take = hou.takes.currentTake() + original_update_mode = hou.updateModeSetting() + root = hou.takes.rootTake() + has_changed = False + try: + if original_take != root: + has_changed = True + if no_update: + hou.setUpdateMode(hou.updateMode.Manual) + hou.takes.setCurrentTake(root) + yield + finally: + if has_changed: + if no_update: + hou.setUpdateMode(original_update_mode) + hou.takes.setCurrentTake(original_take) + + if not instance.data.get("_allowToggleBypass", True): + return nodes = instance[:] if not nodes: @@ -96,8 +155,20 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): # Assume instance node is first node instance_node = nodes[0] + if not hasattr(instance_node, "isBypassed"): + # Likely not a node that can actually be bypassed + log.debug("Can't bypass node: %s", instance_node.path()) + return + if instance_node.isBypassed() != (not old_value): print("%s old bypass state didn't match old instance state, " "updating anyway.." % instance_node.path()) - instance_node.bypass(not new_value) + try: + # Go into the main take, because when in another take changing + # the bypass state of a note cannot be done due to it being locked + # by default. + with main_take(no_update=True): + instance_node.bypass(not new_value) + except hou.PermissionError as exc: + log.warning("%s - %s", instance_node.path(), exc) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 1f0f90811f..53f0e59ea9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,14 +1,19 @@ import uuid - +import logging from contextlib import contextmanager -import hou - -from openpype import lib - +from openpype.api import get_asset from avalon import api, io from avalon.houdini import lib as houdini +import hou + +log = logging.getLogger(__name__) + + +def get_asset_fps(): + """Return current asset fps.""" + return get_asset()["data"].get("fps") def set_id(node, unique_id, overwrite=False): @@ -171,10 +176,10 @@ def get_output_parameter(node): node_type = node.type().name() if node_type == "geometry": return node.parm("sopoutput") - elif node_type == "alembic": return node.parm("filename") - + elif node_type == "comp": + return node.parm("copoutput") else: raise TypeError("Node type '%s' not supported" % node_type) @@ -205,7 +210,7 @@ def validate_fps(): """ - fps = lib.get_asset()["data"]["fps"] + fps = get_asset_fps() current_fps = hou.fps() # returns float if current_fps != fps: @@ -217,18 +222,123 @@ def validate_fps(): if parent is None: pass else: - dialog = popup.Popup2(parent=parent) + dialog = popup.Popup(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Houdini scene not in line with project") - dialog.setMessage("The FPS is out of sync, please fix it") + dialog.setWindowTitle("Houdini scene does not match project FPS") + dialog.setMessage("Scene %i FPS does not match project %i FPS" % + (current_fps, fps)) + dialog.setButtonText("Fix") - # Set new text for button (add optional argument for the popup?) - toggle = dialog.widgets["toggle"] - toggle.setEnabled(False) - dialog.on_show.connect(lambda: set_scene_fps(fps)) + # on_show is the Fix button clicked callback + dialog.on_clicked.connect(lambda: set_scene_fps(fps)) dialog.show() return False return True + + +def create_remote_publish_node(force=True): + """Function to create a remote publish node in /out + + This is a hacked "Shell" node that does *nothing* except for triggering + `colorbleed.lib.publish_remote()` as pre-render script. + + All default attributes of the Shell node are hidden to the Artist to + avoid confusion. + + Additionally some custom attributes are added that can be collected + by a Collector to set specific settings for the publish, e.g. whether + to separate the jobs per instance or process in one single job. + + """ + + cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()" + + existing = hou.node("/out/REMOTE_PUBLISH") + if existing: + if force: + log.warning("Removing existing '/out/REMOTE_PUBLISH' node..") + existing.destroy() + else: + raise RuntimeError("Node already exists /out/REMOTE_PUBLISH. " + "Please remove manually or set `force` to " + "True.") + + # Create the shell node + out = hou.node("/out") + node = out.createNode("shell", node_name="REMOTE_PUBLISH") + node.moveToGoodPosition() + + # Set color make it stand out (avalon/pyblish color) + node.setColor(hou.Color(0.439, 0.709, 0.933)) + + # Set the pre-render script + node.setParms({ + "prerender": cmd, + "lprerender": "python" # command language + }) + + # Lock the attributes to ensure artists won't easily mess things up. + node.parm("prerender").lock(True) + node.parm("lprerender").lock(True) + + # Lock up the actual shell command + command_parm = node.parm("command") + command_parm.set("") + command_parm.lock(True) + shellexec_parm = node.parm("shellexec") + shellexec_parm.set(False) + shellexec_parm.lock(True) + + # Get the node's parm template group so we can customize it + template = node.parmTemplateGroup() + + # Hide default tabs + template.hideFolder("Shell", True) + template.hideFolder("Scripts", True) + + # Hide default settings + template.hide("execute", True) + template.hide("renderdialog", True) + template.hide("trange", True) + template.hide("f", True) + template.hide("take", True) + + # Add custom settings to this node. + parm_folder = hou.FolderParmTemplate("folder", "Submission Settings") + + # Separate Jobs per Instance + parm = hou.ToggleParmTemplate(name="separateJobPerInstance", + label="Separate Job per Instance", + default_value=False) + parm_folder.addParmTemplate(parm) + + # Add our custom Submission Settings folder + template.append(parm_folder) + + # Apply template back to the node + node.setParmTemplateGroup(template) + + +def render_rop(ropnode): + """Render ROP node utility for Publishing. + + This renders a ROP node with the settings we want during Publishing. + """ + # Print verbose when in batch mode without UI + verbose = not hou.isUIAvailable() + + # Render + try: + ropnode.render(verbose=verbose, + # Allow Deadline to capture completion percentage + output_progress=verbose) + except hou.Error as exc: + # The hou.Error is not inherited from a Python Exception class, + # so we explicitly capture the houdini error, otherwise pyblish + # will remain hanging. + import traceback + traceback.print_exc() + raise RuntimeError("Render failed: {0}".format(exc)) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 9820ed49c3..efdaa60084 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,6 +1,26 @@ +# -*- coding: utf-8 -*- +"""Houdini specific Avalon/Pyblish plugin definitions.""" +import sys from avalon import houdini +import six + +import hou from openpype.api import PypeCreatorMixin -class Creator(PypeCreatorMixin, houdini.Creator): +class OpenPypeCreatorError(Exception): pass + + +class Creator(PypeCreatorMixin, houdini.Creator): + def process(self): + try: + # re-raise as standard Python exception so + # Avalon can catch it + instance = super(Creator, self).process() + self._process(instance) + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py new file mode 100644 index 0000000000..850ffb60e5 --- /dev/null +++ b/openpype/hosts/houdini/api/usd.py @@ -0,0 +1,255 @@ +"""Houdini-specific USD Library functions.""" + +import contextlib + +import logging +from Qt import QtCore, QtGui +from avalon.tools.widgets import AssetWidget +from avalon import style + +from pxr import Sdf + + +log = logging.getLogger(__name__) + + +def pick_asset(node): + """Show a user interface to select an Asset in the project + + When double clicking an asset it will set the Asset value in the + 'asset' parameter. + + """ + + pos = QtGui.QCursor.pos() + + parm = node.parm("asset_name") + if not parm: + log.error("Node has no 'asset' parameter: %s", node) + return + + # Construct the AssetWidget as a frameless popup so it automatically + # closes when clicked outside of it. + global tool + tool = AssetWidget(silo_creatable=False) + tool.setContentsMargins(5, 5, 5, 5) + tool.setWindowTitle("Pick Asset") + tool.setStyleSheet(style.load_stylesheet()) + tool.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) + tool.refresh() + + # Select the current asset if there is any + name = parm.eval() + if name: + from avalon import io + + db_asset = io.find_one({"name": name, "type": "asset"}) + if db_asset: + silo = db_asset.get("silo") + if silo: + tool.set_silo(silo) + tool.select_assets([name], expand=True) + + # Show cursor (top right of window) near cursor + tool.resize(250, 400) + tool.move(tool.mapFromGlobal(pos) - QtCore.QPoint(tool.width(), 0)) + + def set_parameter_callback(index): + name = index.data(tool.model.DocumentRole)["name"] + parm.set(name) + tool.close() + + tool.view.doubleClicked.connect(set_parameter_callback) + tool.show() + + +def add_usd_output_processor(ropnode, processor): + """Add USD Output Processor to USD Rop node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processor (str): The output processor name. This is the basename of + the python file that contains the Houdini USD Output Processor. + + """ + + import loputils + + loputils.handleOutputProcessorAdd( + { + "node": ropnode, + "parm": ropnode.parm("outputprocessors"), + "script_value": processor, + } + ) + + +def remove_usd_output_processor(ropnode, processor): + """Removes USD Output Processor from USD Rop node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processor (str): The output processor name. This is the basename of + the python file that contains the Houdini USD Output Processor. + + """ + import loputils + + parm = ropnode.parm(processor + "_remove") + if not parm: + raise RuntimeError( + "Output Processor %s does not " + "exist on %s" % (processor, ropnode.name()) + ) + + loputils.handleOutputProcessorRemove({"node": ropnode, "parm": parm}) + + +@contextlib.contextmanager +def outputprocessors(ropnode, processors=tuple(), disable_all_others=True): + """Context manager to temporarily add Output Processors to USD ROP node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processors (tuple or list): The processors to add. + disable_all_others (bool, Optional): Whether to disable all + output processors currently on the ROP node that are not in the + `processors` list passed to this function. + + """ + # TODO: Add support for forcing the correct Order of the processors + + original = [] + prefix = "enableoutputprocessor_" + processor_parms = ropnode.globParms(prefix + "*") + for parm in processor_parms: + original.append((parm, parm.eval())) + + if disable_all_others: + for parm in processor_parms: + parm.set(False) + + added = [] + for processor in processors: + + parm = ropnode.parm(prefix + processor) + if parm: + # If processor already exists, just enable it + parm.set(True) + + else: + # Else add the new processor + add_usd_output_processor(ropnode, processor) + added.append(processor) + + try: + yield + finally: + + # Remove newly added processors + for processor in added: + remove_usd_output_processor(ropnode, processor) + + # Revert to original values + for parm, value in original: + if parm: + parm.set(value) + + +def get_usd_rop_loppath(node): + + # Get sop path + node_type = node.type().name() + if node_type == "usd": + return node.parm("loppath").evalAsNode() + + elif node_type in {"usd_rop", "usdrender_rop"}: + # Inside Solaris e.g. /stage (not in ROP context) + # When incoming connection is present it takes it directly + inputs = node.inputs() + if inputs: + return inputs[0] + else: + return node.parm("loppath").evalAsNode() + + +def get_layer_save_path(layer): + """Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer. + + Args: + layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + + Returns: + str or None: Path to save to when data exists. + + """ + hou_layer_info = layer.rootPrims.get("HoudiniLayerInfo") + if not hou_layer_info: + return + + save_path = hou_layer_info.customData.get("HoudiniSavePath", None) + if save_path: + # Unfortunately this doesn't actually resolve the full absolute path + return layer.ComputeAbsolutePath(save_path) + + +def get_referenced_layers(layer): + """Return SdfLayers for all external references of the current layer + + Args: + layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + + Returns: + list: List of pxr.Sdf.Layer that are external references to this layer + + """ + + layers = [] + for layer_id in layer.GetExternalReferences(): + layer = Sdf.Layer.Find(layer_id) + if not layer: + # A file may not be in memory and is + # referenced from disk. As such it cannot + # be found. We will ignore those layers. + continue + + layers.append(layer) + + return layers + + +def iter_layer_recursive(layer): + """Recursively iterate all 'external' referenced layers""" + + layers = get_referenced_layers(layer) + traversed = set(layers) # Avoid recursion to itself (if even possible) + traverse = list(layers) + for layer in traverse: + + # Include children layers (recursion) + children_layers = get_referenced_layers(layer) + children_layers = [x for x in children_layers if x not in traversed] + traverse.extend(children_layers) + traversed.update(children_layers) + + yield layer + + +def get_configured_save_layers(usd_rop): + + lop_node = get_usd_rop_loppath(usd_rop) + stage = lop_node.stage(apply_viewport_overrides=False) + if not stage: + raise RuntimeError( + "No valid USD stage for ROP node: " "%s" % usd_rop.path() + ) + + root_layer = stage.GetRootLayer() + + save_layers = [] + for layer in iter_layer_recursive(root_layer): + save_path = get_layer_save_path(layer) + if save_path is not None: + save_layers.append(layer) + + return save_layers diff --git a/openpype/hosts/houdini/hooks/set_paths.py b/openpype/hosts/houdini/hooks/set_paths.py new file mode 100644 index 0000000000..cd2f98fb76 --- /dev/null +++ b/openpype/hosts/houdini/hooks/set_paths.py @@ -0,0 +1,18 @@ +from openpype.lib import PreLaunchHook +import os + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["houdini"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + os.chdir(workdir) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index adcfb48539..eef86005f5 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -18,28 +18,29 @@ class CreateAlembicCamera(plugin.Creator): # Set node type to create for output self.data.update({"node_type": "alembic"}) - def process(self): - instance = super(CreateAlembicCamera, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { "filename": "$HIP/pyblish/%s.abc" % self.name, - "use_sop_path": False + "use_sop_path": False, } if self.nodes: node = self.nodes[0] path = node.path() - # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) - parms.update({ - "root": "/" + root, - "objects": remainder - }) + parms.update({"root": "/" + root, "objects": remainder}) instance.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. instance.parm("use_sop_path").lock(True) + instance.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py new file mode 100644 index 0000000000..e278708076 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -0,0 +1,44 @@ +from openpype.hosts.houdini.api import plugin + + +class CreateCompositeSequence(plugin.Creator): + """Composite ROP to Image Sequence""" + + label = "Composite (Image Sequence)" + family = "imagesequence" + icon = "gears" + + def __init__(self, *args, **kwargs): + super(CreateCompositeSequence, self).__init__(*args, **kwargs) + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + # Type of ROP node to create + self.data.update({"node_type": "comp"}) + + def _process(self, instance): + """Creator main entry point. + + Args: + instance (hou.Node): Created Houdini instance. + + """ + parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} + + if self.nodes: + node = self.nodes[0] + parms.update({"coppath": node.path()}) + + instance.setParms(parms) + + # Lock any parameters in this list + to_lock = ["prim_to_detail_pattern"] + for name in to_lock: + try: + parm = instance.parm(name) + parm.lock(True) + except AttributeError: + # missing lock pattern + self.log.debug( + "missing lock pattern {}".format(name)) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 6be854ac28..feb683edf6 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -17,21 +17,29 @@ class CreatePointCache(plugin.Creator): self.data.update({"node_type": "alembic"}) - def process(self): - instance = super(CreatePointCache, self).process() + def _process(self, instance): + """Creator main entry point. - parms = {"use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output - "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "filename": "$HIP/pyblish/%s.abc" % self.name} + Args: + instance (hou.Node): Created Houdini instance. + + """ + parms = { + "use_sop_path": True, # Export single node from SOP Path + "build_from_path": True, # Direct path of primitive in output + "path_attrib": "path", # Pass path attribute for output + "prim_to_detail_pattern": "cbId", + "format": 2, # Set format to Ogawa + "facesets": 0, # No face sets (by default exclude them) + "filename": "$HIP/pyblish/%s.abc" % self.name, + } if self.nodes: node = self.nodes[0] parms.update({"sop_path": node.path()}) instance.setParms(parms) + instance.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py new file mode 100644 index 0000000000..6949ca169b --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -0,0 +1,70 @@ +import hou +from openpype.hosts.houdini.api import plugin + + +class CreateRedshiftROP(plugin.Creator): + """Redshift ROP""" + + label = "Redshift ROP" + family = "redshift_rop" + icon = "magic" + defaults = ["master"] + + def __init__(self, *args, **kwargs): + super(CreateRedshiftROP, self).__init__(*args, **kwargs) + + # Clear the family prefix from the subset + subset = self.data["subset"] + subset_no_prefix = subset[len(self.family):] + subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] + self.data["subset"] = subset_no_prefix + + # Add chunk size attribute + self.data["chunkSize"] = 10 + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "Redshift_ROP"}) + + def _process(self, instance): + """Creator main entry point. + + Args: + instance (hou.Node): Created Houdini instance. + + """ + basename = instance.name() + instance.setName(basename + "_ROP", unique_name=True) + + # Also create the linked Redshift IPR Rop + try: + ipr_rop = self.parent.createNode( + "Redshift_IPR", node_name=basename + "_IPR" + ) + except hou.OperationFailed: + raise Exception(("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) + + # Move it to directly under the Redshift ROP + ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + + # Set the linked rop to the Redshift ROP + ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) + + prefix = '${HIP}/render/${HIPNAME}/`chs("subset")`.${AOV}.$F4.exr' + parms = { + # Render frame range + "trange": 1, + # Redshift ROP settings + "RS_outputFileNamePrefix": prefix, + "RS_outputMultilayerMode": 0, # no multi-layered exr + "RS_outputBeautyAOVSuffix": "beauty", + } + instance.setParms(parms) + + # Lock some Avalon attributes + to_lock = ["family", "id"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py new file mode 100644 index 0000000000..5bcb7840c0 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -0,0 +1,47 @@ +from openpype.hosts.houdini.api import plugin + + +class CreateUSD(plugin.Creator): + """Universal Scene Description""" + + label = "USD (experimental)" + family = "usd" + icon = "gears" + enabled = False + + def __init__(self, *args, **kwargs): + super(CreateUSD, self).__init__(*args, **kwargs) + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "usd"}) + + def _process(self, instance): + """Creator main entry point. + + Args: + instance (hou.Node): Created Houdini instance. + + """ + parms = { + "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "enableoutputprocessor_simplerelativepaths": False, + } + + if self.nodes: + node = self.nodes[0] + parms.update({"loppath": node.path()}) + + instance.setParms(parms) + + # Lock any parameters in this list + to_lock = [ + "fileperframe", + # Lock some Avalon attributes + "family", + "id", + ] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py new file mode 100644 index 0000000000..cb3fe3f02b --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -0,0 +1,42 @@ +import hou +from openpype.hosts.houdini.api import plugin + + +class CreateUSDRender(plugin.Creator): + """USD Render ROP in /stage""" + + label = "USD Render (experimental)" + family = "usdrender" + icon = "magic" + + def __init__(self, *args, **kwargs): + super(CreateUSDRender, self).__init__(*args, **kwargs) + + self.parent = hou.node("/stage") + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "usdrender"}) + + def _process(self, instance): + """Creator main entry point. + + Args: + instance (hou.Node): Created Houdini instance. + + """ + parms = { + # Render frame range + "trange": 1 + } + if self.nodes: + node = self.nodes[0] + parms.update({"loppath": node.path()}) + instance.setParms(parms) + + # Lock some Avalon attributes + to_lock = ["family", "id"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index f8f3bbf9c3..242c21fc72 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -18,11 +18,18 @@ class CreateVDBCache(plugin.Creator): # Set node type to create for output self.data["node_type"] = "geometry" - def process(self): - instance = super(CreateVDBCache, self).process() + def _process(self, instance): + """Creator main entry point. - parms = {"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, - "initsim": True} + Args: + instance (hou.Node): Created Houdini instance. + + """ + parms = { + "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "initsim": True, + "trange": 1 + } if self.nodes: node = self.nodes[0] diff --git a/openpype/hosts/houdini/plugins/load/actions.py b/openpype/hosts/houdini/plugins/load/actions.py new file mode 100644 index 0000000000..6e9410ff58 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/actions.py @@ -0,0 +1,86 @@ +"""A module containing generic loader actions that will display in the Loader. + +""" + +from avalon import api + + +class SetFrameRangeLoader(api.Loader): + """Set Houdini frame range""" + + families = [ + "animation", + "camera", + "pointcache", + "vdbcache", + "usd", + ] + representations = ["abc", "vdb", "usd"] + + label = "Set frame range" + order = 11 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import hou + + version = context["version"] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print( + "Skipping setting frame range because start or " + "end frame data is missing.." + ) + return + + hou.playbar.setFrameRange(start, end) + hou.playbar.setPlaybackRange(start, end) + + +class SetFrameRangeWithHandlesLoader(api.Loader): + """Set Maya frame range including pre- and post-handles""" + + families = [ + "animation", + "camera", + "pointcache", + "vdbcache", + "usd", + ] + representations = ["abc", "vdb", "usd"] + + label = "Set frame range (with handles)" + order = 12 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import hou + + version = context["version"] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print( + "Skipping setting frame range because start or " + "end frame data is missing.." + ) + return + + # Include handles + handles = version_data.get("handles", 0) + start -= handles + end += handles + + hou.playbar.setFrameRange(start, end) + hou.playbar.setPlaybackRange(start, end) diff --git a/openpype/hosts/houdini/plugins/load/load_alembic.py b/openpype/hosts/houdini/plugins/load/load_alembic.py index 8fc2b6a61a..cd0f0f0d2d 100644 --- a/openpype/hosts/houdini/plugins/load/load_alembic.py +++ b/openpype/hosts/houdini/plugins/load/load_alembic.py @@ -6,9 +6,7 @@ from avalon.houdini import pipeline, lib class AbcLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" - families = ["model", - "animation", - "pointcache"] + families = ["model", "animation", "pointcache", "gpuCache"] label = "Load Alembic" representations = ["abc"] order = -10 @@ -68,8 +66,9 @@ class AbcLoader(api.Loader): null = container.createNode("null", node_name="OUT".format(name)) null.setInput(0, normal_node) - # Set display on last node - null.setDisplayFlag(True) + # Ensure display flag is on the Alembic input node and not on the OUT + # node to optimize "debug" displaying in the viewport. + alembic.setDisplayFlag(True) # Set new position for unpack node else it gets cluttered nodes = [container, alembic, unpack, normal_node, null] @@ -78,18 +77,22 @@ class AbcLoader(api.Loader): self[:] = nodes - return pipeline.containerise(node_name, - namespace, - nodes, - context, - self.__class__.__name__) + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) def update(self, container, representation): node = container["node"] try: - alembic_node = next(n for n in node.children() if - n.type().name() == "alembic") + alembic_node = next( + n for n in node.children() if n.type().name() == "alembic" + ) except StopIteration: self.log.error("Could not find node of type `alembic`") return diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index a3d67f6e5e..83246b7d97 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -1,8 +1,79 @@ from avalon import api - from avalon.houdini import pipeline, lib +ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")' + '.alembicGetCameraDict') + + +def transfer_non_default_values(src, dest, ignore=None): + """Copy parm from src to dest. + + Because the Alembic Archive rebuilds the entire node + hierarchy on triggering "Build Hierarchy" we want to + preserve any local tweaks made by the user on the camera + for ease of use. That could be a background image, a + resolution change or even Redshift camera parameters. + + We try to do so by finding all Parms that exist on both + source and destination node, include only those that both + are not at their default value, they must be visible, + we exclude those that have the special "alembic archive" + channel expression and ignore certain Parm types. + + """ + import hou + + src.updateParmStates() + + for parm in src.allParms(): + + if ignore and parm.name() in ignore: + continue + + # If destination parm does not exist, ignore.. + dest_parm = dest.parm(parm.name()) + if not dest_parm: + continue + + # Ignore values that are currently at default + if parm.isAtDefault() and dest_parm.isAtDefault(): + continue + + if not parm.isVisible(): + # Ignore hidden parameters, assume they + # are implementation details + continue + + expression = None + try: + expression = parm.expression() + except hou.OperationFailed: + # No expression present + pass + + if expression is not None and ARCHIVE_EXPRESSION in expression: + # Assume it's part of the automated connections that the + # Alembic Archive makes on loading of the camera and thus we do + # not want to transfer the expression + continue + + # Ignore folders, separators, etc. + ignore_types = { + hou.parmTemplateType.Toggle, + hou.parmTemplateType.Menu, + hou.parmTemplateType.Button, + hou.parmTemplateType.FolderSet, + hou.parmTemplateType.Separator, + hou.parmTemplateType.Label, + } + if parm.parmTemplate().type() in ignore_types: + continue + + print("Preserving attribute: %s" % parm.name()) + dest_parm.setFromParm(parm) + + class CameraLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" @@ -30,7 +101,7 @@ class CameraLoader(api.Loader): counter = 1 asset_name = context["asset"]["name"] - namespace = namespace if namespace else asset_name + namespace = namespace or asset_name formatted = "{}_{}".format(namespace, name) if namespace else name node_name = "{0}_{1:03d}".format(formatted, counter) @@ -59,7 +130,8 @@ class CameraLoader(api.Loader): namespace, nodes, context, - self.__class__.__name__) + self.__class__.__name__, + suffix="") def update(self, container, representation): @@ -73,14 +145,40 @@ class CameraLoader(api.Loader): node.setParms({"fileName": file_path, "representation": str(representation["_id"])}) + # Store the cam temporarily next to the Alembic Archive + # so that we can preserve parm values the user set on it + # after build hierarchy was triggered. + old_camera = self._get_camera(node) + temp_camera = old_camera.copyTo(node.parent()) + # Rebuild node.parm("buildHierarchy").pressButton() + # Apply values to the new camera + new_camera = self._get_camera(node) + transfer_non_default_values(temp_camera, + new_camera, + # The hidden uniform scale attribute + # gets a default connection to + # "icon_scale" just skip that completely + ignore={"scale"}) + + temp_camera.destroy() + def remove(self, container): node = container["node"] node.destroy() + def _get_camera(self, node): + import hou + cameras = node.recursiveGlob("*", + filter=hou.nodeTypeFilter.ObjCamera, + include_subnets=False) + + assert len(cameras) == 1, "Camera instance must have only one camera" + return cameras[0] + def create_and_connect(self, node, node_type, name=None): """Create a node within a node which and connect it to the input @@ -93,27 +191,10 @@ class CameraLoader(api.Loader): hou.Node """ + if name: + new_node = node.createNode(node_type, node_name=name) + else: + new_node = node.createNode(node_type) - import hou - - try: - - if name: - new_node = node.createNode(node_type, node_name=name) - else: - new_node = node.createNode(node_type) - - new_node.moveToGoodPosition() - - try: - input_node = next(i for i in node.allItems() if - isinstance(i, hou.SubnetIndirectInput)) - except StopIteration: - return new_node - - new_node.setInput(0, input_node) - return new_node - - except Exception: - raise RuntimeError("Could not created node type `%s` in node `%s`" - % (node_type, node)) + new_node.moveToGoodPosition() + return new_node diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py new file mode 100644 index 0000000000..4ff2777d77 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -0,0 +1,123 @@ +import os + +from avalon import api +from avalon.houdini import pipeline, lib + +import hou + + +def get_image_avalon_container(): + """The COP2 files must be in a COP2 network. + + So we maintain a single entry point within AVALON_CONTAINERS, + just for ease of use. + + """ + + path = pipeline.AVALON_CONTAINERS + avalon_container = hou.node(path) + if not avalon_container: + # Let's create avalon container secretly + # but make sure the pipeline still is built the + # way we anticipate it was built, asserting it. + assert path == "/obj/AVALON_CONTAINERS" + + parent = hou.node("/obj") + avalon_container = parent.createNode( + "subnet", node_name="AVALON_CONTAINERS" + ) + + image_container = hou.node(path + "/IMAGES") + if not image_container: + image_container = avalon_container.createNode( + "cop2net", node_name="IMAGES" + ) + image_container.moveToGoodPosition() + + return image_container + + +class ImageLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["colorbleed.imagesequence"] + label = "Load Image (COP2)" + representations = ["*"] + order = -10 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + file_path = self._get_file_sequence(file_path) + + # Get the root node + parent = get_image_avalon_container() + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + node = parent.createNode("file", node_name=node_name) + node.moveToGoodPosition() + + node.setParms({"filename1": file_path}) + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(node, data) + + return node + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + file_path = self._get_file_sequence(file_path) + + # Update attributes + node.setParms( + { + "filename1": file_path, + "representation": str(representation["_id"]), + } + ) + + def remove(self, container): + + node = container["node"] + + # Let's clean up the IMAGES COP2 network + # if it ends up being empty and we deleted + # the last file node. Store the parent + # before we delete the node. + parent = node.parent() + + node.destroy() + + if not parent.children(): + parent.destroy() + + def _get_file_sequence(self, root): + files = sorted(os.listdir(root)) + + first_fname = files[0] + prefix, padding, suffix = first_fname.rsplit(".", 2) + fname = ".".join([prefix, "$F{}".format(len(padding)), suffix]) + return os.path.join(root, fname).replace("\\", "/") diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py new file mode 100644 index 0000000000..7483101409 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -0,0 +1,80 @@ +from avalon import api +from avalon.houdini import pipeline, lib + + +class USDSublayerLoader(api.Loader): + """Sublayer USD file in Solaris""" + + families = [ + "colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera", + ] + label = "Sublayer USD" + representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + order = 1 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import hou + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + + # Get the root node + stage = hou.node("/stage") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create USD reference + container = stage.createNode("sublayer", node_name=node_name) + container.setParms({"filepath1": file_path}) + container.moveToGoodPosition() + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(container, data) + + return container + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + + # Update attributes + node.setParms( + { + "filepath1": file_path, + "representation": str(representation["_id"]), + } + ) + + # Reload files + node.parm("reload").pressButton() + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py new file mode 100644 index 0000000000..cab3cb5269 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -0,0 +1,80 @@ +from avalon import api +from avalon.houdini import pipeline, lib + + +class USDReferenceLoader(api.Loader): + """Reference USD file in Solaris""" + + families = [ + "colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera", + ] + label = "Reference USD" + representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + order = -8 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import hou + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + + # Get the root node + stage = hou.node("/stage") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create USD reference + container = stage.createNode("reference", node_name=node_name) + container.setParms({"filepath1": file_path}) + container.moveToGoodPosition() + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(container, data) + + return container + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + + # Update attributes + node.setParms( + { + "filepath1": file_path, + "representation": str(representation["_id"]), + } + ) + + # Reload files + node.parm("reload").pressButton() + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py new file mode 100644 index 0000000000..5f7e400b39 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -0,0 +1,110 @@ +import os +import re +from avalon import api + +from avalon.houdini import pipeline + + +class VdbLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["vdbcache"] + label = "Load VDB" + representations = ["vdb"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import hou + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a new geo node + container = obj.createNode("geo", node_name=node_name) + + # Remove the file node, it only loads static meshes + # Houdini 17 has removed the file node from the geo node + file_node = container.node("file1") + if file_node: + file_node.destroy() + + # Explicitly create a file node + file_node = container.createNode("file", node_name=node_name) + file_node.setParms({"file": self.format_path(self.fname)}) + + # Set display on last node + file_node.setDisplayFlag(True) + + nodes = [container, file_node] + self[:] = nodes + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + def format_path(self, path): + """Format file path correctly for single vdb or vdb sequence.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + # The path is either a single file or sequence in a folder. + is_single_file = os.path.isfile(path) + if is_single_file: + filename = path + else: + # The path points to the publish .vdb sequence folder so we + # find the first file in there that ends with .vdb + files = sorted(os.listdir(path)) + first = next((x for x in files if x.endswith(".vdb")), None) + if first is None: + raise RuntimeError( + "Couldn't find first .vdb file of " + "sequence in: %s" % path + ) + + # Set .vdb to $F.vdb + first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first) + + filename = os.path.join(path, first) + + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + + return filename + + def update(self, container, representation): + + node = container["node"] + try: + file_node = next( + n for n in node.children() if n.type().name() == "file" + ) + except StopIteration: + self.log.error("Could not find node of type `alembic`") + return + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = self.format_path(file_path) + + file_node.setParms({"fileName": file_path}) + + # Update attribute + node.setParms({"representation": str(representation["_id"])}) + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/show_usdview.py b/openpype/hosts/houdini/plugins/load/show_usdview.py new file mode 100644 index 0000000000..f23974094e --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/show_usdview.py @@ -0,0 +1,43 @@ +from avalon import api + + +class ShowInUsdview(api.Loader): + """Open USD file in usdview""" + + families = ["colorbleed.usd"] + label = "Show in usdview" + representations = ["usd", "usda", "usdlc", "usdnc"] + order = 10 + + icon = "code-fork" + color = "white" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import subprocess + + import avalon.lib as lib + + usdview = lib.which("usdview") + + filepath = os.path.normpath(self.fname) + filepath = filepath.replace("\\", "/") + + if not os.path.exists(filepath): + self.log.error("File does not exist: %s" % filepath) + return + + self.log.info("Start houdini variant of usdview...") + + # For now avoid some pipeline environment variables that initialize + # Avalon in Houdini as it is redundant for usdview and slows boot time + env = os.environ.copy() + env.pop("PYTHONPATH", None) + env.pop("HOUDINI_SCRIPT_PATH", None) + env.pop("HOUDINI_MENU_PATH", None) + + # Force string to avoid unicode issues + env = {str(key): str(value) for key, value in env.items()} + + subprocess.Popen([usdview, filepath, "--renderer", "GL"], env=env) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py new file mode 100644 index 0000000000..1193f0cd19 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -0,0 +1,38 @@ +import pyblish.api + + +class CollectInstanceActiveState(pyblish.api.InstancePlugin): + """Collect default active state for instance from its node bypass state. + + This is done at the very end of the CollectorOrder so that any required + collecting of data iterating over instances (with InstancePlugin) will + actually collect the data for when the user enables the state in the UI. + Otherwise potentially required data might have skipped collecting. + + """ + + order = pyblish.api.CollectorOrder + 0.299 + families = ["*"] + hosts = ["houdini"] + label = "Instance Active State" + + def process(self, instance): + + # Must have node to check for bypass state + if len(instance) == 0: + return + + # Check bypass state and reverse + node = instance[0] + active = not node.isBypassed() + + # Set instance active state + instance.data.update( + { + "active": active, + # temporarily translation of `active` to `publish` till + # issue has been resolved: + # https://github.com/pyblish/pyblish-base/issues/307 + "publish": active, + } + ) diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index b35a943833..c0b987ebbc 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -9,7 +9,7 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 label = "Houdini Current File" - hosts = ['houdini'] + hosts = ["houdini"] def process(self, context): """Inject the current working file""" @@ -27,8 +27,10 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): # could have existed already. We will allow it if the file exists, # but show a warning for this edge case to clarify the potential # false positive. - self.log.warning("Current file is 'untitled.hip' and we are " - "unable to detect whether the current scene is " - "saved correctly.") + self.log.warning( + "Current file is 'untitled.hip' and we are " + "unable to detect whether the current scene is " + "saved correctly." + ) - context.data['currentFile'] = filepath + context.data["currentFile"] = filepath diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 1d664aeaeb..ef77c3230b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -6,11 +6,11 @@ from openpype.hosts.houdini.api import lib class CollectFrames(pyblish.api.InstancePlugin): - """Collect all frames which would be a resukl""" + """Collect all frames which would be saved from the ROP nodes""" order = pyblish.api.CollectorOrder label = "Collect Frames" - families = ["vdbcache"] + families = ["vdbcache", "imagesequence"] def process(self, instance): @@ -19,10 +19,17 @@ class CollectFrames(pyblish.api.InstancePlugin): output_parm = lib.get_output_parameter(ropnode) output = output_parm.eval() + _, ext = os.path.splitext(output) file_name = os.path.basename(output) - match = re.match("(\w+)\.(\d+)\.vdb", file_name) result = file_name + # Get the filename pattern match from the output + # path so we can compute all frames that would + # come out from rendering the ROP node if there + # is a frame pattern in the name + pattern = r"\w+\.(\d+)" + re.escape(ext) + match = re.match(pattern, file_name) + start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) @@ -31,10 +38,12 @@ class CollectFrames(pyblish.api.InstancePlugin): # Check if frames are bigger than 1 (file collection) # override the result if end_frame - start_frame > 1: - result = self.create_file_list(match, - int(start_frame), - int(end_frame)) + result = self.create_file_list( + match, int(start_frame), int(end_frame) + ) + # todo: `frames` currently conflicts with "explicit frames" for a + # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) def create_file_list(self, match, start_frame, end_frame): @@ -50,17 +59,24 @@ class CollectFrames(pyblish.api.InstancePlugin): """ + # Get the padding length + frame = match.group(1) + padding = len(frame) + + # Get the parts of the filename surrounding the frame number + # so we can put our own frame numbers in. + span = match.span(1) + prefix = match.string[: span[0]] + suffix = match.string[span[1]:] + + # Generate filenames for all frames result = [] + for i in range(start_frame, end_frame + 1): - padding = len(match.group(2)) - name = match.group(1) - padding_format = "{number:0{width}d}" + # Format frame number by the padding amount + str_frame = "{number:0{width}d}".format(number=i, width=padding) - count = start_frame - while count <= end_frame: - str_count = padding_format.format(number=count, width=padding) - file_name = "{}.{}.vdb".format(name, str_count) + file_name = prefix + str_frame + suffix result.append(file_name) - count += 1 return result diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..39e2737e8c --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -0,0 +1,120 @@ +import avalon.api as api +import pyblish.api + + +def collect_input_containers(nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + + # Lookup by node ids + lookup = frozenset(nodes) + + containers = [] + host = api.registered_host() + for container in host.ls(): + + node = container["node"] + + # Usually the loaded containers don't have any complex references + # and the contained children should be all we need. So we disregard + # checking for .references() on the nodes. + members = set(node.allSubChildren()) + members.add(node) # include the node itself + + # If there's an intersection + if not lookup.isdisjoint(members): + containers.append(container) + + return containers + + +def iter_upstream(node): + """Yields all upstream inputs for the current node. + + This includes all `node.inputAncestors()` but also traverses through all + `node.references()` for the node itself and for any of the upstream nodes. + This method has no max-depth and will collect all upstream inputs. + + Yields: + hou.Node: The upstream nodes, including references. + + """ + + upstream = node.inputAncestors( + include_ref_inputs=True, follow_subnets=True + ) + + # Initialize process queue with the node's ancestors itself + queue = list(upstream) + collected = set(upstream) + + # Traverse upstream references for all nodes and yield them as we + # process the queue. + while queue: + upstream_node = queue.pop() + yield upstream_node + + # Find its references that are not collected yet. + references = upstream_node.references() + references = [n for n in references if n not in collected] + + queue.extend(references) + collected.update(references) + + # Include the references' ancestors that have not been collected yet. + for reference in references: + ancestors = reference.inputAncestors( + include_ref_inputs=True, follow_subnets=True + ) + ancestors = [n for n in ancestors if n not in collected] + + queue.extend(ancestors) + collected.update(ancestors) + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect source input containers used for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + + def process(self, instance): + # We can't get the "inputAncestors" directly from the ROP + # node, so we find the related output node (set in SOP/COP path) + # and include that together with its ancestors + output = instance.data.get("output_node") + + if output is None: + # If no valid output node is set then ignore it as validation + # will be checking those cases. + self.log.debug( + "No output node found, skipping " "collecting of inputs.." + ) + return + + # Collect all upstream parents + nodes = list(iter_upstream(output)) + nodes.append(output) + + # Collect containers for the given set of nodes + containers = collect_input_containers(nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 2e294face2..1b36526783 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -31,6 +31,13 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): nodes = hou.node("/out").children() + + # Include instances in USD stage only when it exists so it + # remains backwards compatible with version before houdini 18 + stage = hou.node("/stage") + if stage: + nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + for node in nodes: if not node.parm("id"): @@ -55,6 +62,8 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create nice name if the instance has a frame range. label = data.get("name", node.name()) + label += " (%s)" % data["asset"] # include asset in name + if "frameStart" in data and "frameEnd" in data: frames = "[{frameStart} - {frameEnd}]".format(**data) label = "{} {}".format(label, frames) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py new file mode 100644 index 0000000000..7df5e8b6f2 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -0,0 +1,152 @@ +import hou +import pyblish.api +from avalon.houdini import lib +import openpype.hosts.houdini.api.usd as hou_usdlib +import openpype.lib.usdlib as usdlib + + +class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): + """Collect Instances from a ROP Network and its configured layer paths. + + The output nodes of the ROP node will only be published when *any* of the + layers remain set to 'publish' by the user. + + This works differently from most of our Avalon instances in the pipeline. + As opposed to storing `pyblish.avalon.instance` as id on the node we store + `pyblish.avalon.usdlayered`. + + Additionally this instance has no need for storing family, asset, subset + or name on the nodes. Instead all information is retrieved solely from + the output filepath, which is an Avalon URI: + avalon://{asset}/{subset}.{representation} + + Each final ROP node is considered a dependency for any of the Configured + Save Path layers it sets along the way. As such, the instances shown in + the Pyblish UI are solely the configured layers. The encapsulating usd + files are generated whenever *any* of the dependencies is published. + + These dependency instances are stored in: + instance.data["publishDependencies"] + + """ + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect Instances (USD Configured Layers)" + hosts = ["houdini"] + + def process(self, context): + + stage = hou.node("/stage") + if not stage: + # Likely Houdini version <18 + return + + nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + for node in nodes: + + if not node.parm("id"): + continue + + if node.evalParm("id") != "pyblish.avalon.usdlayered": + continue + + has_family = node.evalParm("family") + assert has_family, "'%s' is missing 'family'" % node.name() + + self.process_node(node, context) + + def sort_by_family(instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) + + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=sort_by_family) + + return context + + def process_node(self, node, context): + + # Allow a single ROP node or a full ROP network of USD ROP nodes + # to be processed as a single entry that should "live together" on + # a publish. + if node.type().name() == "ropnet": + # All rop nodes inside ROP Network + ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + else: + # A single node + ropnodes = [node] + + data = lib.read(node) + + # Don't use the explicit "colorbleed.usd.layered" family for publishing + # instead use the "colorbleed.usd" family to integrate. + data["publishFamilies"] = ["colorbleed.usd"] + + # For now group ALL of them into USD Layer subset group + # Allow this subset to be grouped into a USD Layer on creation + data["subsetGroup"] = "USD Layer" + + instances = list() + dependencies = [] + for ropnode in ropnodes: + + # Create a dependency instance per ROP Node. + lopoutput = ropnode.evalParm("lopoutput") + dependency_save_data = self.get_save_data(lopoutput) + dependency = context.create_instance(dependency_save_data["name"]) + dependency.append(ropnode) + dependency.data.update(data) + dependency.data.update(dependency_save_data) + dependency.data["family"] = "colorbleed.usd.dependency" + dependency.data["optional"] = False + dependencies.append(dependency) + + # Hide the dependency instance from the context + context.pop() + + # Get all configured layers for this USD ROP node + # and create a Pyblish instance for each one + layers = hou_usdlib.get_configured_save_layers(ropnode) + for layer in layers: + save_path = hou_usdlib.get_layer_save_path(layer) + save_data = self.get_save_data(save_path) + if not save_data: + continue + self.log.info(save_path) + + instance = context.create_instance(save_data["name"]) + instance[:] = [node] + + # Set the instance data + instance.data.update(data) + instance.data.update(save_data) + instance.data["usdLayer"] = layer + + # Don't allow the Pyblish `instanceToggled` we have installed + # to set this node to bypass. + instance.data["_allowToggleBypass"] = False + + instances.append(instance) + + # Store the collected ROP node dependencies + self.log.debug("Collected dependencies: %s" % (dependencies,)) + for instance in instances: + instance.data["publishDependencies"] = dependencies + + def get_save_data(self, save_path): + + # Resolve Avalon URI + uri_data = usdlib.parse_avalon_uri(save_path) + if not uri_data: + self.log.warning("Non Avalon URI Layer Path: %s" % save_path) + return {} + + # Collect asset + subset from URI + name = "{subset} ({asset})".format(**uri_data) + fname = "{asset}_{subset}.{ext}".format(**uri_data) + + data = dict(uri_data) + data["usdSavePath"] = save_path + data["usdFilename"] = fname + data["name"] = name + return data diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index c0587d5336..938ee81cc3 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -2,13 +2,20 @@ import pyblish.api class CollectOutputSOPPath(pyblish.api.InstancePlugin): - """Collect the out node's SOP Path value.""" + """Collect the out node's SOP/COP Path value.""" order = pyblish.api.CollectorOrder - families = ["pointcache", - "vdbcache"] + families = [ + "pointcache", + "camera", + "vdbcache", + "imagesequence", + "usd", + "usdrender", + ] + hosts = ["houdini"] - label = "Collect Output SOP Path" + label = "Collect Output Node Path" def process(self, instance): @@ -17,12 +24,44 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): node = instance[0] # Get sop path - if node.type().name() == "alembic": - sop_path_parm = "sop_path" + node_type = node.type().name() + if node_type == "geometry": + out_node = node.parm("soppath").evalAsNode() + + elif node_type == "alembic": + + # Alembic can switch between using SOP Path or object + if node.parm("use_sop_path").eval(): + out_node = node.parm("sop_path").evalAsNode() + else: + root = node.parm("root").eval() + objects = node.parm("objects").eval() + path = root + "/" + objects + out_node = hou.node(path) + + elif node_type == "comp": + out_node = node.parm("coppath").evalAsNode() + + elif node_type == "usd" or node_type == "usdrender": + out_node = node.parm("loppath").evalAsNode() + + elif node_type == "usd_rop" or node_type == "usdrender_rop": + # Inside Solaris e.g. /stage (not in ROP context) + # When incoming connection is present it takes it directly + inputs = node.inputs() + if inputs: + out_node = inputs[0] + else: + out_node = node.parm("loppath").evalAsNode() + else: - sop_path_parm = "soppath" + raise ValueError( + "ROP node type '%s' is" " not supported." % node_type + ) - sop_path = node.parm(sop_path_parm).eval() - out_node = hou.node(sop_path) + if not out_node: + self.log.warning("No output node collected.") + return + self.log.debug("Output node: %s" % out_node.path()) instance.data["output_node"] = out_node diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py new file mode 100644 index 0000000000..72b554b567 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -0,0 +1,135 @@ +import re +import os + +import hou +import pyblish.api + + +def get_top_referenced_parm(parm): + + processed = set() # disallow infinite loop + while True: + if parm.path() in processed: + raise RuntimeError("Parameter references result in cycle.") + + processed.add(parm.path()) + + ref = parm.getReferencedParm() + if ref.path() == parm.path(): + # It returns itself when it doesn't reference + # another parameter + return ref + else: + parm = ref + + +def evalParmNoFrame(node, parm, pad_character="#"): + + parameter = node.parm(parm) + assert parameter, "Parameter does not exist: %s.%s" % (node, parm) + + # If the parameter has a parameter reference, then get that + # parameter instead as otherwise `unexpandedString()` fails. + parameter = get_top_referenced_parm(parameter) + + # Substitute out the frame numbering with padded characters + try: + raw = parameter.unexpandedString() + except hou.Error as exc: + print("Failed: %s" % parameter) + raise RuntimeError(exc) + + def replace(match): + padding = 1 + n = match.group(2) + if n and int(n): + padding = int(n) + return pad_character * padding + + expression = re.sub(r"(\$F([0-9]*))", replace, raw) + + with hou.ScriptEvalContext(parameter): + return hou.expandStringAtFrame(expression, 0) + + +class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): + """Collect USD Render Products + + Collects the instance.data["files"] for the render products. + + Provides: + instance -> files + + """ + + label = "Redshift ROP Render Products" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + families = ["redshift_rop"] + + def process(self, instance): + + rop = instance[0] + + # Collect chunkSize + chunk_size_parm = rop.parm("chunkSize") + if chunk_size_parm: + chunk_size = int(chunk_size_parm.eval()) + instance.data["chunkSize"] = chunk_size + self.log.debug("Chunk Size: %s" % chunk_size) + + default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix") + beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix") + render_products = [] + + # Default beauty AOV + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=beauty_suffix + ) + render_products.append(beauty_product) + + num_aovs = rop.evalParm("RS_aov") + for index in range(num_aovs): + i = index + 1 + + # Skip disabled AOVs + if not rop.evalParm("RS_aovEnable_%s" % i): + continue + + aov_suffix = rop.evalParm("RS_aovSuffix_%s" % i) + aov_prefix = evalParmNoFrame(rop, "RS_aovCustomPrefix_%s" % i) + if not aov_prefix: + aov_prefix = default_prefix + + aov_product = self.get_render_product_name(aov_prefix, aov_suffix) + render_products.append(aov_product) + + for product in render_products: + self.log.debug("Found render product: %s" % product) + + filenames = list(render_products) + instance.data["files"] = filenames + + def get_render_product_name(self, prefix, suffix): + """Return the output filename using the AOV prefix and suffix""" + + # When AOV is explicitly defined in prefix we just swap it out + # directly with the AOV suffix to embed it. + # Note: ${AOV} seems to be evaluated in the parameter as %AOV% + has_aov_in_prefix = "%AOV%" in prefix + if has_aov_in_prefix: + # It seems that when some special separator characters are present + # before the %AOV% token that Redshift will secretly remove it if + # there is no suffix for the current product, for example: + # foo_%AOV% -> foo.exr + pattern = "%AOV%" if suffix else "[._-]?%AOV%" + product_name = re.sub(pattern, suffix, prefix, flags=re.IGNORECASE) + else: + if suffix: + # Add ".{suffix}" before the extension + prefix_base, ext = os.path.splitext(prefix) + product_name = prefix_base + "." + suffix + ext + else: + product_name = prefix + + return product_name diff --git a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py new file mode 100644 index 0000000000..3ae16efe56 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py @@ -0,0 +1,30 @@ +import pyblish.api +import openpype.api + +import hou +from avalon.houdini import lib + + +class CollectRemotePublishSettings(pyblish.api.ContextPlugin): + """Collect custom settings of the Remote Publish node.""" + + order = pyblish.api.CollectorOrder + families = ["*"] + hosts = ["houdini"] + targets = ["deadline"] + label = "Remote Publish Submission Settings" + actions = [openpype.api.RepairAction] + + def process(self, context): + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + return + + attributes = lib.read(node) + + # Debug the settings we have collected + for key, value in sorted(attributes.items()): + self.log.debug("Collected %s: %s" % (key, value)) + + context.data.update(attributes) diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py new file mode 100644 index 0000000000..d7163b43c0 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -0,0 +1,133 @@ +import re +import os + +import hou +import pxr.UsdRender + +import pyblish.api + + +def get_var_changed(variable=None): + """Return changed variables and operators that use it. + + Note: `varchange` hscript states that it forces a recook of the nodes + that use Variables. That was tested in Houdini + 18.0.391. + + Args: + variable (str, Optional): A specific variable to query the operators + for. When None is provided it will return all variables that have + had recent changes and require a recook. Defaults to None. + + Returns: + dict: Variable that changed with the operators that use it. + + """ + cmd = "varchange -V" + if variable: + cmd += " {0}".format(variable) + output, _ = hou.hscript(cmd) + + changed = {} + for line in output.split("Variable: "): + if not line.strip(): + continue + + split = line.split() + var = split[0] + operators = split[1:] + changed[var] = operators + + return changed + + +class CollectRenderProducts(pyblish.api.InstancePlugin): + """Collect USD Render Products.""" + + label = "Collect Render Products" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + families = ["usdrender"] + + def process(self, instance): + + node = instance.data.get("output_node") + if not node: + rop_path = instance[0].path() + raise RuntimeError( + "No output node found. Make sure to connect an " + "input to the USD ROP: %s" % rop_path + ) + + # Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically + # update after scene save. + if hou.applicationVersion() == (18, 0, 391): + self.log.debug( + "Checking for recook to workaround " "$HIPNAME refresh bug..." + ) + changed = get_var_changed("HIPNAME").get("HIPNAME") + if changed: + self.log.debug("Recooking for $HIPNAME refresh bug...") + for operator in changed: + hou.node(operator).cook(force=True) + + # Make sure to recook any 'cache' nodes in the history chain + chain = [node] + chain.extend(node.inputAncestors()) + for input_node in chain: + if input_node.type().name() == "cache": + input_node.cook(force=True) + + stage = node.stage() + + filenames = [] + for prim in stage.Traverse(): + + if not prim.IsA(pxr.UsdRender.Product): + continue + + # Get Render Product Name + product = pxr.UsdRender.Product(prim) + + # We force taking it from any random time sample as opposed to + # "default" that the USD Api falls back to since that won't return + # time sampled values if they were set per time sample. + name = product.GetProductNameAttr().Get(time=0) + dirname = os.path.dirname(name) + basename = os.path.basename(name) + + dollarf_regex = r"(\$F([0-9]?))" + frame_regex = r"^(.+\.)([0-9]+)(\.[a-zA-Z]+)$" + if re.match(dollarf_regex, basename): + # TODO: Confirm this actually is allowed USD stages and HUSK + # Substitute $F + def replace(match): + """Replace $F4 with padded #.""" + padding = int(match.group(2)) if match.group(2) else 1 + return "#" * padding + + filename_base = re.sub(dollarf_regex, replace, basename) + filename = os.path.join(dirname, filename_base) + else: + # Substitute basename.0001.ext + def replace(match): + prefix, frame, ext = match.groups() + padding = "#" * len(frame) + return prefix + padding + ext + + filename_base = re.sub(frame_regex, replace, basename) + filename = os.path.join(dirname, filename_base) + filename = filename.replace("\\", "/") + + assert "#" in filename, ( + "Couldn't resolve render product name " + "with frame number: %s" % name + ) + + filenames.append(filename) + + prim_path = str(prim.GetPath()) + self.log.info("Collected %s name: %s" % (prim_path, filename)) + + # Filenames for Deadline + instance.data["files"] = filenames diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py new file mode 100644 index 0000000000..66dfba64df --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -0,0 +1,110 @@ +import pyblish.api + +from avalon import io +import openpype.lib.usdlib as usdlib + + +class CollectUsdBootstrap(pyblish.api.InstancePlugin): + """Collect special Asset/Shot bootstrap instances if those are needed. + + Some specific subsets are intended to be part of the default structure + of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset + we layer a Model and Shade USD file over each other and expose that in + a Asset USD file, ready to use. + + On the first publish of any of the components of a Asset or Shot the + missing pieces are bootstrapped and generated in the pipeline too. This + means that on the very first publish of your model the Asset USD file + will exist too. + + """ + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Bootstrap" + hosts = ["houdini"] + families = ["usd", "usd.layered"] + + def process(self, instance): + + # Detect whether the current subset is a subset in a pipeline + def get_bootstrap(instance): + instance_subset = instance.data["subset"] + for name, layers in usdlib.PIPELINE.items(): + if instance_subset in set(layers): + return name # e.g. "asset" + break + else: + return + + bootstrap = get_bootstrap(instance) + if bootstrap: + self.add_bootstrap(instance, bootstrap) + + # Check if any of the dependencies requires a bootstrap + for dependency in instance.data.get("publishDependencies", list()): + bootstrap = get_bootstrap(dependency) + if bootstrap: + self.add_bootstrap(dependency, bootstrap) + + def add_bootstrap(self, instance, bootstrap): + + self.log.debug("Add bootstrap for: %s" % bootstrap) + + asset = io.find_one({"name": instance.data["asset"], "type": "asset"}) + assert asset, "Asset must exist: %s" % asset + + # Check which are not about to be created and don't exist yet + required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) + + require_all_layers = instance.data.get("requireAllLayers", False) + if require_all_layers: + # USD files load fine in usdview and Houdini even when layered or + # referenced files do not exist. So by default we don't require + # the layers to exist. + layers = usdlib.PIPELINE.get(bootstrap) + if layers: + required += list(layers) + + self.log.debug("Checking required bootstrap: %s" % required) + for subset in required: + if self._subset_exists(instance, subset, asset): + continue + + self.log.debug( + "Creating {0} USD bootstrap: {1} {2}".format( + bootstrap, asset["name"], subset + ) + ) + + new = instance.context.create_instance(subset) + new.data["subset"] = subset + new.data["label"] = "{0} ({1})".format(subset, asset["name"]) + new.data["family"] = "usd.bootstrap" + new.data["comment"] = "Automated bootstrap USD file." + new.data["publishFamilies"] = ["usd"] + + # Do not allow the user to toggle this instance + new.data["optional"] = False + + # Copy some data from the instance for which we bootstrap + for key in ["asset"]: + new.data[key] = instance.data[key] + + def _subset_exists(self, instance, subset, asset): + """Return whether subset exists in current context or in database.""" + # Allow it to be created during this publish session + context = instance.context + for inst in context: + if ( + inst.data["subset"] == subset + and inst.data["asset"] == asset["name"] + ): + return True + + # Or, if they already exist in the database we can + # skip them too. + return bool( + io.find_one( + {"name": subset, "type": "subset", "parent": asset["_id"]} + ) + ) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py new file mode 100644 index 0000000000..8be6ead1b1 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -0,0 +1,61 @@ +import os + +import pyblish.api +import openpype.hosts.houdini.api.usd as usdlib + + +class CollectUsdLayers(pyblish.api.InstancePlugin): + """Collect the USD Layers that have configured save paths.""" + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Layers" + hosts = ["houdini"] + families = ["usd"] + + def process(self, instance): + + output = instance.data.get("output_node") + if not output: + self.log.debug("No output node found..") + return + + rop_node = instance[0] + + save_layers = [] + for layer in usdlib.get_configured_save_layers(rop_node): + + info = layer.rootPrims.get("HoudiniLayerInfo") + save_path = info.customData.get("HoudiniSavePath") + creator = info.customData.get("HoudiniCreatorNode") + + self.log.debug("Found configured save path: " + "%s -> %s" % (layer, save_path)) + + # Log node that configured this save path + if creator: + self.log.debug("Created by: %s" % creator) + + save_layers.append((layer, save_path)) + + # Store on the instance + instance.data["usdConfiguredSavePaths"] = save_layers + + # Create configured layer instances so User can disable updating + # specific configured layers for publishing. + context = instance.context + for layer, save_path in save_layers: + name = os.path.basename(save_path) + label = "{0} -> {1}".format(instance.data["name"], name) + layer_inst = context.create_instance(name) + + family = "colorbleed.usdlayer" + layer_inst.data["family"] = family + layer_inst.data["families"] = [family] + layer_inst.data["subset"] = "__stub__" + layer_inst.data["label"] = label + layer_inst.data["asset"] = instance.data["asset"] + layer_inst.append(instance[0]) # include same USD ROP + layer_inst.append((layer, save_path)) # include layer data + + # Allow this subset to be grouped into a USD Layer on creation + layer_inst.data["subsetGroup"] = "USD Layer" diff --git a/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py b/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py index c145eea519..6f6cc978cd 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py +++ b/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py @@ -3,7 +3,7 @@ import hou class CollectWorksceneFPS(pyblish.api.ContextPlugin): - """Get the FPS of the work scene""" + """Get the FPS of the work scene.""" label = "Workscene FPS" order = pyblish.api.CollectorOrder diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index b251ebdc90..83b790407f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -2,6 +2,7 @@ import os import pyblish.api import openpype.api +from openpype.hosts.houdini.api.lib import render_rop class ExtractAlembic(openpype.api.Extractor): @@ -13,29 +14,20 @@ class ExtractAlembic(openpype.api.Extractor): def process(self, instance): - import hou - ropnode = instance[0] # Get the filename from the filename parameter output = ropnode.evalParm("filename") staging_dir = os.path.dirname(output) - # instance.data["stagingDir"] = staging_dir + instance.data["stagingDir"] = staging_dir file_name = os.path.basename(output) # We run the render self.log.info("Writing alembic '%s' to '%s'" % (file_name, staging_dir)) - try: - ropnode.render() - except hou.Error as exc: - # The hou.Error is not inherited from a Python Exception class, - # so we explicitly capture the houdini error, otherwise pyblish - # will remain hanging. - import traceback - traceback.print_exc() - raise RuntimeError("Render failed: {0}".format(exc)) + + render_rop(ropnode) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py new file mode 100644 index 0000000000..f300b6d28d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -0,0 +1,35 @@ +import os + +import pyblish.api +import openpype.api + +from openpype.hosts.houdini.api.lib import render_rop + + +class ExtractComposite(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract Composite (Image Sequence)" + hosts = ["houdini"] + families = ["imagesequence"] + + def process(self, instance): + + ropnode = instance[0] + + # Get the filename from the copoutput parameter + # `.evalParm(parameter)` will make sure all tokens are resolved + output = ropnode.evalParm("copoutput") + staging_dir = os.path.dirname(output) + instance.data["stagingDir"] = staging_dir + file_name = os.path.basename(output) + + self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir)) + + render_rop(ropnode) + + if "files" not in instance.data: + instance.data["files"] = [] + + frames = instance.data["frames"] + instance.data["files"].append(frames) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py new file mode 100644 index 0000000000..0fc26900fb --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -0,0 +1,42 @@ +import os + +import pyblish.api +import openpype.api +from openpype.hosts.houdini.api.lib import render_rop + + +class ExtractUSD(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract USD" + hosts = ["houdini"] + families = ["usd", + "usdModel", + "usdSetDress"] + + def process(self, instance): + + ropnode = instance[0] + + # Get the filename from the filename parameter + output = ropnode.evalParm("lopoutput") + staging_dir = os.path.dirname(output) + instance.data["stagingDir"] = staging_dir + file_name = os.path.basename(output) + + self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir)) + + render_rop(ropnode) + + assert os.path.exists(output), "Output does not exist: %s" % output + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'usd', + 'ext': 'usd', + 'files': file_name, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py new file mode 100644 index 0000000000..645bd05d4b --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -0,0 +1,315 @@ +import os +import contextlib +import hou +import sys +from collections import deque + +import pyblish.api +import openpype.api + +import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.hosts.houdini.api.lib import render_rop + + +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks. + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature. + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with + # statement + _cm_type = type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + + def _fix_exception_context(new_exc, old_exc): + while 1: + exc_context = new_exc.__context__ + if exc_context in (None, frame_exc): + break + new_exc = exc_context + new_exc.__context__ = old_exc + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + exc_details = (None, None, None) + except Exception: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + if not self._exit_callbacks: + raise + exc_details = new_exc_details + return suppressed_exc + + +@contextlib.contextmanager +def parm_values(overrides): + """Override Parameter values during the context.""" + + originals = [] + try: + for parm, value in overrides: + originals.append((parm, parm.eval())) + parm.set(value) + yield + finally: + for parm, value in originals: + # Parameter might not exist anymore so first + # check whether it's still valid + if hou.parm(parm.path()): + parm.set(value) + + +class ExtractUSDLayered(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract Layered USD" + hosts = ["houdini"] + families = ["usdLayered", "usdShade"] + + # Force Output Processors so it will always save any file + # into our unique staging directory with processed Avalon paths + output_processors = ["avalon_uri_processor", "stagingdir_processor"] + + def process(self, instance): + + self.log.info("Extracting: %s" % instance) + + staging_dir = self.staging_dir(instance) + fname = instance.data.get("usdFilename") + + # The individual rop nodes are collected as "publishDependencies" + dependencies = instance.data["publishDependencies"] + ropnodes = [dependency[0] for dependency in dependencies] + assert all( + node.type().name() in {"usd", "usd_rop"} for node in ropnodes + ) + + # Main ROP node, either a USD Rop or ROP network with + # multiple USD ROPs + node = instance[0] + + # Collect any output dependencies that have not been processed yet + # during extraction of other instances + outputs = [fname] + active_dependencies = [ + dep + for dep in dependencies + if dep.data.get("publish", True) + and not dep.data.get("_isExtracted", False) + ] + for dependency in active_dependencies: + outputs.append(dependency.data["usdFilename"]) + + pattern = r"*[/\]{0} {0}" + save_pattern = " ".join(pattern.format(fname) for fname in outputs) + + # Run a stack of context managers before we start the render to + # temporarily adjust USD ROP settings for our publish output. + rop_overrides = { + # This sets staging directory on the processor to force our + # output files to end up in the Staging Directory. + "stagingdiroutputprocessor_stagingDir": staging_dir, + # Force the Avalon URI Output Processor to refactor paths for + # references, payloads and layers to published paths. + "avalonurioutputprocessor_use_publish_paths": True, + # Only write out specific USD files based on our outputs + "savepattern": save_pattern, + } + overrides = list() + with ExitStack() as stack: + + for ropnode in ropnodes: + manager = hou_usdlib.outputprocessors( + ropnode, + processors=self.output_processors, + disable_all_others=True, + ) + stack.enter_context(manager) + + # Some of these must be added after we enter the output + # processor context manager because those parameters only + # exist when the Output Processor is added to the ROP node. + for name, value in rop_overrides.items(): + parm = ropnode.parm(name) + assert parm, "Parm not found: %s.%s" % ( + ropnode.path(), + name, + ) + overrides.append((parm, value)) + + stack.enter_context(parm_values(overrides)) + + # Render the single ROP node or the full ROP network + render_rop(node) + + # Assert all output files in the Staging Directory + for output_fname in outputs: + path = os.path.join(staging_dir, output_fname) + assert os.path.exists(path), "Output file must exist: %s" % path + + # Set up the dependency for publish if they have new content + # compared to previous publishes + for dependency in active_dependencies: + dependency_fname = dependency.data["usdFilename"] + + filepath = os.path.join(staging_dir, dependency_fname) + similar = self._compare_with_latest_publish(dependency, filepath) + if similar: + # Deactivate this dependency + self.log.debug( + "Dependency matches previous publish version," + " deactivating %s for publish" % dependency + ) + dependency.data["publish"] = False + else: + self.log.debug("Extracted dependency: %s" % dependency) + # This dependency should be published + dependency.data["files"] = [dependency_fname] + dependency.data["stagingDir"] = staging_dir + dependency.data["_isExtracted"] = True + + # Store the created files on the instance + if "files" not in instance.data: + instance.data["files"] = [] + instance.data["files"].append(fname) + + def _compare_with_latest_publish(self, dependency, new_file): + + from avalon import api, io + import filecmp + + _, ext = os.path.splitext(new_file) + + # Compare this dependency with the latest published version + # to detect whether we should make this into a new publish + # version. If not, skip it. + asset = io.find_one( + {"name": dependency.data["asset"], "type": "asset"} + ) + subset = io.find_one( + { + "name": dependency.data["subset"], + "type": "subset", + "parent": asset["_id"], + } + ) + if not subset: + # Subset doesn't exist yet. Definitely new file + self.log.debug("No existing subset..") + return False + + version = io.find_one( + {"type": "version", "parent": subset["_id"], }, + sort=[("name", -1)] + ) + if not version: + self.log.debug("No existing version..") + return False + + representation = io.find_one( + { + "name": ext.lstrip("."), + "type": "representation", + "parent": version["_id"], + } + ) + if not representation: + self.log.debug("No existing representation..") + return False + + old_file = api.get_representation_path(representation) + if not os.path.exists(old_file): + return False + + return filecmp.cmp(old_file, new_file) diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index f480fe6236..78794acc97 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -2,6 +2,7 @@ import os import pyblish.api import openpype.api +from openpype.hosts.houdini.api.lib import render_rop class ExtractVDBCache(openpype.api.Extractor): @@ -13,8 +14,6 @@ class ExtractVDBCache(openpype.api.Extractor): def process(self, instance): - import hou - ropnode = instance[0] # Get the filename from the filename parameter @@ -25,15 +24,8 @@ class ExtractVDBCache(openpype.api.Extractor): file_name = os.path.basename(sop_output) self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir)) - try: - ropnode.render() - except hou.Error as exc: - # The hou.Error is not inherited from a Python Exception class, - # so we explicitly capture the houdini error, otherwise pyblish - # will remain hanging. - import traceback - traceback.print_exc() - raise RuntimeError("Render failed: {0}".format(exc)) + + render_rop(ropnode) output = instance.data["frames"] @@ -41,9 +33,9 @@ class ExtractVDBCache(openpype.api.Extractor): instance.data["representations"] = [] representation = { - 'name': 'mov', - 'ext': 'mov', - 'files': output, + "name": "vdb", + "ext": "vdb", + "files": output, "stagingDir": staging_dir, } instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py new file mode 100644 index 0000000000..31c2954ee7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -0,0 +1,51 @@ +import pyblish.api +import avalon.api + +from openpype.api import version_up +from openpype.action import get_errored_plugins_from_data + + +class IncrementCurrentFile(pyblish.api.InstancePlugin): + """Increment the current file. + + Saves the current scene with an increased version number. + + """ + + label = "Increment current file" + order = pyblish.api.IntegratorOrder + 9.0 + hosts = ["houdini"] + families = ["colorbleed.usdrender", "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + context = instance.context + errored_plugins = get_errored_plugins_from_data(context) + if any( + plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins + ): + raise RuntimeError( + "Skipping incrementing current file because " + "submission to deadline failed." + ) + + # Filename must not have changed since collecting + host = avalon.api.registered_host() + current_file = host.current_file() + assert ( + context.data["currentFile"] == current_file + ), "Collected filename from current scene name." + + new_filepath = version_up(current_file) + host.save(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py new file mode 100644 index 0000000000..faa015f739 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py @@ -0,0 +1,35 @@ +import pyblish.api + +import hou +from openpype.api import version_up +from openpype.action import get_errored_plugins_from_data + + +class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): + """Increment the current file. + + Saves the current scene with an increased version number. + + """ + + label = "Increment current file" + order = pyblish.api.IntegratorOrder + 9.0 + hosts = ["houdini"] + targets = ["deadline"] + + def process(self, context): + + errored_plugins = get_errored_plugins_from_data(context) + if any( + plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins + ): + raise RuntimeError( + "Skipping incrementing current file because " + "submission to deadline failed." + ) + + current_filepath = context.data["currentFile"] + new_filepath = version_up(current_filepath) + + hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py new file mode 100644 index 0000000000..1b12efa603 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -0,0 +1,37 @@ +import pyblish.api +import avalon.api + + +class SaveCurrentScene(pyblish.api.InstancePlugin): + """Save current scene""" + + label = "Save current file" + order = pyblish.api.IntegratorOrder - 0.49 + hosts = ["houdini"] + families = ["usdrender", + "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + # Filename must not have changed since collecting + host = avalon.api.registered_host() + current_file = host.current_file() + assert context.data['currentFile'] == current_file, ( + "Collected filename from current scene name." + ) + + if host.has_unsaved_changes(): + self.log.info("Saving current file..") + host.save_file(current_file) + else: + self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py new file mode 100644 index 0000000000..a0efd0610c --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py @@ -0,0 +1,23 @@ +import pyblish.api + + +class SaveCurrentSceneDeadline(pyblish.api.ContextPlugin): + """Save current scene""" + + label = "Save current file" + order = pyblish.api.IntegratorOrder - 0.49 + hosts = ["houdini"] + targets = ["deadline"] + + def process(self, context): + import hou + + assert ( + context.data["currentFile"] == hou.hipFile.path() + ), "Collected filename from current scene name." + + if hou.hipFile.hasUnsavedChanges(): + self.log.info("Saving current file..") + hou.hipFile.save(save_to_recent_files=True) + else: + self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py index 7b23d73ac7..0ae1bc94eb 100644 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py @@ -3,7 +3,7 @@ import openpype.api class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB + """Validate that the node connected to the output node is of type VDB. Regardless of the amount of VDBs create the output will need to have an equal amount of VDBs, points, primitives and vertices @@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node is not" - "of type VDB!") + raise RuntimeError( + "Node connected to the output node is not" "of type VDB!" + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py new file mode 100644 index 0000000000..8fe1b44b7a --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -0,0 +1,132 @@ +import pyblish.api +import openpype.api + +from collections import defaultdict + + +class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): + """Validate Alembic ROP Primitive to Detail attribute is consistent. + + The Alembic ROP crashes Houdini whenever an attribute in the "Primitive to + Detail" parameter exists on only a part of the primitives that belong to + the same hierarchy path. Whenever it encounters inconsistent values, + specifically where some are empty as opposed to others then Houdini + crashes. (Tested in Houdini 17.5.229) + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["pointcache"] + hosts = ["houdini"] + label = "Validate Primitive to Detail (Abc)" + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Primitives found with inconsistent primitive " + "to detail attributes. See log." + ) + + @classmethod + def get_invalid(cls, instance): + + output = instance.data["output_node"] + + rop = instance[0] + pattern = rop.parm("prim_to_detail_pattern").eval().strip() + if not pattern: + cls.log.debug( + "Alembic ROP has no 'Primitive to Detail' pattern. " + "Validation is ignored.." + ) + return + + build_from_path = rop.parm("build_from_path").eval() + if not build_from_path: + cls.log.debug( + "Alembic ROP has 'Build from Path' disabled. " + "Validation is ignored.." + ) + return + + path_attr = rop.parm("path_attrib").eval() + if not path_attr: + cls.log.error( + "The Alembic ROP node has no Path Attribute" + "value set, but 'Build Hierarchy from Attribute'" + "is enabled." + ) + return [rop.path()] + + # Let's assume each attribute is explicitly named for now and has no + # wildcards for Primitive to Detail. This simplifies the check. + cls.log.debug("Checking Primitive to Detail pattern: %s" % pattern) + cls.log.debug("Checking with path attribute: %s" % path_attr) + + # Check if the primitive attribute exists + frame = instance.data.get("startFrame", 0) + geo = output.geometryAtFrame(frame) + + # If there are no primitives on the start frame then it might be + # something that is emitted over time. As such we can't actually + # validate whether the attributes exist, because they won't exist + # yet. In that case, just warn the user and allow it. + if len(geo.iterPrims()) == 0: + cls.log.warning( + "No primitives found on current frame. Validation" + " for Primitive to Detail will be skipped." + ) + return + + attrib = geo.findPrimAttrib(path_attr) + if not attrib: + cls.log.info( + "Geometry Primitives are missing " + "path attribute: `%s`" % path_attr + ) + return [output.path()] + + # Ensure at least a single string value is present + if not attrib.strings(): + cls.log.info( + "Primitive path attribute has no " + "string values: %s" % path_attr + ) + return [output.path()] + + paths = None + for attr in pattern.split(" "): + if not attr.strip(): + # Ignore empty values + continue + + # Check if the primitive attribute exists + attrib = geo.findPrimAttrib(attr) + if not attrib: + # It is allowed to not have the attribute at all + continue + + # The issue can only happen if at least one string attribute is + # present. So we ignore cases with no values whatsoever. + if not attrib.strings(): + continue + + check = defaultdict(set) + values = geo.primStringAttribValues(attr) + if paths is None: + paths = geo.primStringAttribValues(path_attr) + + for path, value in zip(paths, values): + check[path].add(value) + + for path, values in check.items(): + # Whenever a single path has multiple values for the + # Primitive to Detail attribute then we consider it + # inconsistent and invalidate the ROP node's content. + if len(values) > 1: + cls.log.warning( + "Path has multiple values: %s (path: %s)" + % (list(values), path) + ) + return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py new file mode 100644 index 0000000000..e9126ffef0 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -0,0 +1,37 @@ +import pyblish.api +import openpype.api + + +class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): + """Validate Face Sets are disabled for extraction to pointcache. + + When groups are saved as Face Sets with the Alembic these show up + as shadingEngine connections in Maya - however, with animated groups + these connections in Maya won't work as expected, it won't update per + frame. Additionally, it can break shader assignments in some cases + where it requires to first break this connection to allow a shader to + be assigned. + + It is allowed to include Face Sets, so only an issue is logged to + identify that it could introduce issues down the pipeline. + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["pointcache"] + hosts = ["houdini"] + label = "Validate Alembic ROP Face Sets" + + def process(self, instance): + + rop = instance[0] + facesets = rop.parm("facesets").eval() + + # 0 = No Face Sets + # 1 = Save Non-Empty Groups as Face Sets + # 2 = Save All Groups As Face Sets + if facesets != 0: + self.log.warning( + "Alembic ROP saves 'Face Sets' for Geometry. " + "Are you sure you want this?" + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index e8596b739d..17c9da837a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.api +import colorbleed.api class ValidateAlembicInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output is correct + """Validate that the node connected to the output is correct. The connected node cannot be of the following types for Alembic: - VDB @@ -11,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = colorbleed.api.ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -19,19 +19,35 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node incorrect") + raise RuntimeError( + "Primitive types found that are not supported" + "for Alembic output." + ) @classmethod def get_invalid(cls, instance): - invalid_nodes = ["VDB", "Volume"] + invalid_prim_types = ["VDB", "Volume"] node = instance.data["output_node"] - prims = node.geometry().prims() + if not hasattr(node, "geometry"): + # In the case someone has explicitly set an Object + # node instead of a SOP node in Geometry context + # then for now we ignore - this allows us to also + # export object transforms. + cls.log.warning("No geometry output node found, skipping check..") + return - for prim in prims: - prim_type = prim.type().name() - if prim_type in invalid_nodes: - cls.log.error("Found a primitive which is of type '%s' !" - % prim_type) - return [instance] + frame = instance.data.get("startFrame", 0) + geo = node.geometryAtFrame(frame) + + invalid = False + for prim_type in invalid_prim_types: + if geo.countPrimType(prim_type) > 0: + cls.log.error( + "Found a primitive which is of type '%s' !" % prim_type + ) + invalid = True + + if invalid: + return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index a42c3696da..5eb8f93d03 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -29,8 +29,9 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output settings do no match for '%s'" % - instance) + raise RuntimeError( + "Output settings do no match for '%s'" % instance + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 9118ae0e8c..79c67c3008 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -18,12 +18,17 @@ class ValidateBypassed(pyblish.api.InstancePlugin): def process(self, instance): + if len(instance) == 0: + # Ignore instances without any nodes + # e.g. in memory bootstrap instances + return + invalid = self.get_invalid(instance) if invalid: rop = invalid[0] raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." % - rop.path() + "ROP node %s is set to bypass, publishing cannot continue.." + % rop.path() ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index ca75579267..a0919e1323 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -6,9 +6,9 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" order = openpype.api.ValidateContentsOrder - families = ['camera'] - hosts = ['houdini'] - label = 'Camera ROP' + families = ["camera"] + hosts = ["houdini"] + label = "Camera ROP" def process(self, instance): @@ -16,8 +16,10 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): node = instance[0] if node.parm("use_sop_path").eval(): - raise RuntimeError("Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable.") + raise RuntimeError( + "Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable." + ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() @@ -34,8 +36,8 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): if not camera: raise ValueError("Camera path does not exist: %s" % path) - if not camera.type().name() == "cam": - raise ValueError("Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name())) - - + if camera.type().name() != "cam": + raise ValueError( + "Object set in Alembic ROP is not a camera: " + "%s (type: %s)" % (camera, camera.type().name()) + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py new file mode 100644 index 0000000000..543539ffe3 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -0,0 +1,60 @@ +import pyblish.api + + +class ValidateCopOutputNode(pyblish.api.InstancePlugin): + """Validate the instance COP Output Node. + + This will ensure: + - The COP Path is set. + - The COP Path refers to an existing object. + - The COP Path node is a COP node. + + """ + + order = pyblish.api.ValidatorOrder + families = ["imagesequence"] + hosts = ["houdini"] + label = "Validate COP Output Node" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error( + "COP Output node in '%s' does not exist. " + "Ensure a valid COP output path is set." % node.path() + ) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.CopNode): + cls.log.error( + "Output node %s is not a COP node. " + "COP Path must point to a COP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) + return [output_node.path()] + + # For the sake of completeness also assert the category type + # is Cop2 to avoid potential edge case scenarios even though + # the isinstance check above should be stricter than this category + assert output_node.type().category().name() == "Cop2", ( + "Output node %s is not of category Cop2. This is a bug.." + % output_node.path() + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py new file mode 100644 index 0000000000..b26d28a1e7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -0,0 +1,59 @@ +import os +import pyblish.api + +from openpype.hosts.houdini.api import lib + + +class ValidateFileExtension(pyblish.api.InstancePlugin): + """Validate the output file extension fits the output family. + + File extensions: + - Pointcache must be .abc + - Camera must be .abc + - VDB must be .vdb + + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache", "camera", "vdbcache"] + hosts = ["houdini"] + label = "Output File Extension" + + family_extensions = { + "pointcache": ".abc", + "camera": ".abc", + "vdbcache": ".vdb", + } + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "ROP node has incorrect " "file extension: %s" % invalid + ) + + @classmethod + def get_invalid(cls, instance): + + # Get ROP node from instance + node = instance[0] + + # Create lookup for current family in instance + families = [] + family = instance.data.get("family", None) + if family: + families.append(family) + families = set(families) + + # Perform extension check + output = lib.get_output_parameter(node).eval() + _, output_extension = os.path.splitext(output) + + for family in families: + extension = cls.family_extensions.get(family, None) + if extension is None: + raise RuntimeError("Unsupported family: %s" % family) + + if output_extension != extension: + return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py new file mode 100644 index 0000000000..76b5910576 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -0,0 +1,51 @@ +import pyblish.api + +from openpype.hosts.houdini.api import lib + + +class ValidateFrameToken(pyblish.api.InstancePlugin): + """Validate if the unexpanded string contains the frame ('$F') token. + + This validator will *only* check the output parameter of the node if + the Valid Frame Range is not set to 'Render Current Frame' + + Rules: + If you render out a frame range it is mandatory to have the + frame token - '$F4' or similar - to ensure that each frame gets + written. If this is not the case you will override the same file + every time a frame is written out. + + Examples: + Good: 'my_vbd_cache.$F4.vdb' + Bad: 'my_vbd_cache.vdb' + + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Frame Token" + families = ["vdbcache"] + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Output settings do no match for '%s'" % instance + ) + + @classmethod + def get_invalid(cls, instance): + + node = instance[0] + + # Check trange parm, 0 means Render Current Frame + frame_range = node.evalParm("trange") + if frame_range == 0: + return [] + + output_parm = lib.get_output_parameter(node) + unexpanded_str = output_parm.unexpandedString() + + if "$F" not in unexpanded_str: + cls.log.error("No frame token found in '%s'" % node.path()) + return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py new file mode 100644 index 0000000000..f5f03aa844 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -0,0 +1,30 @@ +import pyblish.api + + +class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): + """Validate the Houdini instance runs a Commercial license. + + When extracting USD files from a non-commercial Houdini license, even with + Houdini Indie license, the resulting files will get "scrambled" with + a license protection and get a special .usdnc or .usdlc suffix. + + This currently breaks the Subset/representation pipeline so we disallow + any publish with those licenses. Only the commercial license is valid. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usd"] + hosts = ["houdini"] + label = "Houdini Commercial License" + + def process(self, instance): + + import hou + + license = hou.licenseCategory() + if license != hou.licenseCategoryType.Commercial: + raise RuntimeError( + "USD Publishing requires a full Commercial " + "license. You are on: %s" % license + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index a735f4b64b..cd72877949 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -6,18 +6,18 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" order = openpype.api.ValidateContentsOrder - families = ['pointcache', - 'camera', - 'vdbcache'] - hosts = ['houdini'] - label = 'Create Intermediate Directories Checked' + families = ["pointcache", "camera", "vdbcache"] + hosts = ["houdini"] + label = "Create Intermediate Directories Checked" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid) + raise RuntimeError( + "Found ROP node with Create Intermediate " + "Directories turned off: %s" % invalid + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py new file mode 100644 index 0000000000..f58e5f8d7d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -0,0 +1,65 @@ +import pyblish.api +import openpype.api +import hou + + +def cook_in_range(node, start, end): + current = hou.intFrame() + if start >= current >= end: + # Allow cooking current frame since we're in frame range + node.cook(force=False) + else: + node.cook(force=False, frame_range=(start, start)) + + +def get_errors(node): + """Get cooking errors. + + If node already has errors check whether it needs to recook + If so, then recook first to see if that solves it. + + """ + if node.errors() and node.needsToCook(): + node.cook() + + return node.errors() + + +class ValidateNoErrors(pyblish.api.InstancePlugin): + """Validate the Instance has no current cooking errors.""" + + order = openpype.api.ValidateContentsOrder + hosts = ["houdini"] + label = "Validate no errors" + + def process(self, instance): + + validate_nodes = [] + + if len(instance) > 0: + validate_nodes.append(instance[0]) + output_node = instance.data.get("output_node") + if output_node: + validate_nodes.append(output_node) + + for node in validate_nodes: + self.log.debug("Validating for errors: %s" % node.path()) + errors = get_errors(node) + + if errors: + # If there are current errors, then try an unforced cook + # to see whether the error will disappear. + self.log.debug( + "Recooking to revalidate error " + "is up to date for: %s" % node.path() + ) + current_frame = hou.intFrame() + start = instance.data.get("frameStart", current_frame) + end = instance.data.get("frameEnd", current_frame) + cook_in_range(node, start=start, end=end) + + # Check for errors again after the forced recook + errors = get_errors(node) + if errors: + self.log.error(errors) + raise RuntimeError("Node has errors: %s" % node.path()) diff --git a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py b/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py deleted file mode 100644 index bfa2d38f1a..0000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py +++ /dev/null @@ -1,50 +0,0 @@ -import pyblish.api -import openpype.api - - -class ValidatOutputNodeExists(pyblish.api.InstancePlugin): - """Validate if node attribute Create intermediate Directories is turned on - - Rules: - * The node must have Create intermediate Directories turned on to - ensure the output file will be created - - """ - - order = openpype.api.ValidateContentsOrder - families = ["*"] - hosts = ['houdini'] - label = "Output Node Exists" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError("Could not find output node(s)!") - - @classmethod - def get_invalid(cls, instance): - - import hou - - result = set() - - node = instance[0] - if node.type().name() == "alembic": - soppath_parm = "sop_path" - else: - # Fall back to geometry node - soppath_parm = "soppath" - - sop_path = node.parm(soppath_parm).eval() - output_node = hou.node(sop_path) - - if output_node is None: - cls.log.error("Node at '%s' does not exist" % sop_path) - result.add(node.path()) - - # Added cam as this is a legit output type (cameras can't - if output_node.type().name() not in ["output", "cam"]: - cls.log.error("SOP Path does not end path at output node") - result.add(node.path()) - - return result diff --git a/openpype/hosts/houdini/plugins/publish/validate_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_output_node.py index 5e20ee40d6..0b60ab5c48 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_output_node.py @@ -14,8 +14,7 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["pointcache", - "vdbcache"] + families = ["pointcache", "vdbcache"] hosts = ["houdini"] label = "Validate Output Node" @@ -23,8 +22,10 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid) + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) @classmethod def get_invalid(cls, instance): @@ -35,39 +36,42 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): if output_node is None: node = instance[0] - cls.log.error("SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." - % node.path()) + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) return [node.path()] # Output node must be a Sop node. if not isinstance(output_node, hou.SopNode): - cls.log.error("Output node %s is not a SOP node. " - "SOP Path must point to a SOP node, " - "instead found category type: %s" % ( - output_node.path(), - output_node.type().category().name() - ) - ) + cls.log.error( + "Output node %s is not a SOP node. " + "SOP Path must point to a SOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) return [output_node.path()] # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." % - output_node.path() + "Output node %s is not of category Sop. This is a bug.." + % output_node.path() ) # Check if output node has incoming connections if not output_node.inputConnections(): - cls.log.error("Output node `%s` has no incoming connections" - % output_node.path()) + cls.log.error( + "Output node `%s` has no incoming connections" + % output_node.path() + ) return [output_node.path()] # Ensure the output node has at least Geometry data if not output_node.geometry(): - cls.log.error("Output node `%s` has no geometry data." - % output_node.path()) + cls.log.error( + "Output node `%s` has no geometry data." % output_node.path() + ) return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 608e236198..3c15532be8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -19,8 +19,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("See log for details. " - "Invalid nodes: {0}".format(invalid)) + raise RuntimeError( + "See log for details. " "Invalid nodes: {0}".format(invalid) + ) @classmethod def get_invalid(cls, instance): @@ -28,48 +29,68 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): import hou output = instance.data["output_node"] - prims = output.geometry().prims() rop = instance[0] build_from_path = rop.parm("build_from_path").eval() if not build_from_path: - cls.log.debug("Alembic ROP has 'Build from Path' disabled. " - "Validation is ignored..") + cls.log.debug( + "Alembic ROP has 'Build from Path' disabled. " + "Validation is ignored.." + ) return path_attr = rop.parm("path_attrib").eval() if not path_attr: - cls.log.error("The Alembic ROP node has no Path Attribute" - "value set, but 'Build Hierarchy from Attribute'" - "is enabled.") + cls.log.error( + "The Alembic ROP node has no Path Attribute" + "value set, but 'Build Hierarchy from Attribute'" + "is enabled." + ) return [rop.path()] cls.log.debug("Checking for attribute: %s" % path_attr) - missing_attr = [] - invalid_attr = [] - for prim in prims: + # Check if the primitive attribute exists + frame = instance.data.get("startFrame", 0) + geo = output.geometryAtFrame(frame) - try: - path = prim.stringAttribValue(path_attr) - except hou.OperationFailed: - # Attribute does not exist. - missing_attr.append(prim) - continue + # If there are no primitives on the current frame then we can't + # check whether the path names are correct. So we'll just issue a + # warning that the check can't be done consistently and skip + # validation. + if len(geo.iterPrims()) == 0: + cls.log.warning( + "No primitives found on current frame. Validation" + " for primitive hierarchy paths will be skipped," + " thus can't be validated." + ) + return - if not path: - # Empty path value is invalid. - invalid_attr.append(prim) - continue - - if missing_attr: - cls.log.info("Prims are missing attribute `%s`" % path_attr) - - if invalid_attr: - cls.log.info("Prims have no value for attribute `%s` " - "(%s of %s prims)" % (path_attr, - len(invalid_attr), - len(prims))) - - if missing_attr or invalid_attr: + # Check if there are any values for the primitives + attrib = geo.findPrimAttrib(path_attr) + if not attrib: + cls.log.info( + "Geometry Primitives are missing " + "path attribute: `%s`" % path_attr + ) + return [output.path()] + + # Ensure at least a single string value is present + if not attrib.strings(): + cls.log.info( + "Primitive path attribute has no " + "string values: %s" % path_attr + ) + return [output.path()] + + paths = geo.primStringAttribValues(path_attr) + # Ensure all primitives are set to a valid path + # Collect all invalid primitive numbers + invalid_prims = [i for i, path in enumerate(paths) if not path] + if invalid_prims: + num_prims = len(geo.iterPrims()) # faster than len(geo.prims()) + cls.log.info( + "Prims have no value for attribute `%s` " + "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) + ) return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py new file mode 100644 index 0000000000..95c66edff0 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -0,0 +1,43 @@ +import pyblish.api +import openpype.api + +from openpype.hosts.houdini.api import lib + +import hou + + +class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): + """Validate the remote publish out node exists for Deadline to trigger.""" + + order = pyblish.api.ValidatorOrder - 0.4 + families = ["*"] + hosts = ["houdini"] + targets = ["deadline"] + label = "Remote Publish ROP node" + actions = [openpype.api.RepairContextAction] + + def process(self, context): + + cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()" + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + # We ensure it's a shell node and that it has the pre-render script + # set correctly. Plus the shell script it will trigger should be + # completely empty (doing nothing) + assert node.type().name() == "shell", "Must be shell ROP node" + assert node.parm("command").eval() == "", "Must have no command" + assert not node.parm("shellexec").eval(), "Must not execute in shell" + assert ( + node.parm("prerender").eval() == cmd + ), "REMOTE_PUBLISH node does not have correct prerender script." + assert ( + node.parm("lprerender").eval() == "python" + ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + + @classmethod + def repair(cls, context): + """(Re)create the node if it fails to pass validation.""" + lib.create_remote_publish_node(force=True) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py new file mode 100644 index 0000000000..b681fd0ee1 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -0,0 +1,35 @@ +import pyblish.api +import openpype.api + +import hou + + +class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): + """Validate the remote publish node is *not* bypassed.""" + + order = pyblish.api.ValidatorOrder - 0.39 + families = ["*"] + hosts = ["houdini"] + targets = ["deadline"] + label = "Remote Publish ROP enabled" + actions = [openpype.api.RepairContextAction] + + def process(self, context): + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + if node.isBypassed(): + raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + + @classmethod + def repair(cls, context): + """(Re)create the node if it fails to pass validation.""" + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") + node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py new file mode 100644 index 0000000000..a5a07b1b1a --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -0,0 +1,80 @@ +import pyblish.api + + +class ValidateSopOutputNode(pyblish.api.InstancePlugin): + """Validate the instance SOP Output Node. + + This will ensure: + - The SOP Path is set. + - The SOP Path refers to an existing object. + - The SOP Path node is a SOP node. + - The SOP Path node has at least one input connection (has an input) + - The SOP Path has geometry data. + + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache", "vdbcache"] + hosts = ["houdini"] + label = "Validate Output Node" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.SopNode): + cls.log.error( + "Output node %s is not a SOP node. " + "SOP Path must point to a SOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) + return [output_node.path()] + + # For the sake of completeness also assert the category type + # is Sop to avoid potential edge case scenarios even though + # the isinstance check above should be stricter than this category + assert output_node.type().category().name() == "Sop", ( + "Output node %s is not of category Sop. This is a bug.." + % output_node.path() + ) + + # Ensure the node is cooked and succeeds to cook so we can correctly + # check for its geometry data. + if output_node.needsToCook(): + cls.log.debug("Cooking node: %s" % output_node.path()) + try: + output_node.cook() + except hou.Error as exc: + cls.log.error("Cook failed: %s" % exc) + cls.log.error(output_node.errors()[0]) + return [output_node.path()] + + # Ensure the output node has at least Geometry data + if not output_node.geometry(): + cls.log.error( + "Output node `%s` has no geometry data." % output_node.path() + ) + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py new file mode 100644 index 0000000000..ac0181aed2 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -0,0 +1,50 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): + """Validate USD loaded paths have no backslashes. + + This is a crucial validation for HUSK USD rendering as Houdini's + USD Render ROP will fail to write out a .usd file for rendering that + correctly preserves the backslashes, e.g. it will incorrectly convert a + '\t' to a TAB character disallowing HUSK to find those specific files. + + This validation is redundant for usdModel since that flattens the model + before write. As such it will never have any used layers with a path. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdSetDress", "usdShade", "usd", "usdrender"] + hosts = ["houdini"] + label = "USD Layer path backslashes" + optional = True + + def process(self, instance): + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for layer in stage.GetUsedLayers(): + references = layer.externalReferences + + for ref in references: + + # Ignore anonymous layers + if ref.startswith("anon:"): + continue + + # If any backslashes in the path consider it invalid + if "\\" in ref: + self.log.error("Found invalid path: %s" % ref) + invalid.append(layer) + + if invalid: + raise RuntimeError( + "Loaded layers have backslashes. " + "This is invalid for HUSK USD rendering." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py new file mode 100644 index 0000000000..2fd2f5eb9f --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -0,0 +1,76 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +from pxr import UsdShade, UsdRender, UsdLux + + +def fullname(o): + """Get fully qualified class name""" + module = o.__module__ + if module is None or module == str.__module__: + return o.__name__ + return module + "." + o.__name__ + + +class ValidateUsdModel(pyblish.api.InstancePlugin): + """Validate USD Model. + + Disallow Shaders, Render settings, products and vars and Lux lights. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdModel"] + hosts = ["houdini"] + label = "Validate USD Model" + optional = True + + disallowed = [ + UsdShade.Shader, + UsdRender.Settings, + UsdRender.Product, + UsdRender.Var, + UsdLux.Light, + ] + + def process(self, instance): + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for prim in stage.Traverse(): + + for klass in self.disallowed: + if klass(prim): + # Get full class name without pxr. prefix + name = fullname(klass).split("pxr.", 1)[-1] + path = str(prim.GetPath()) + self.log.warning("Disallowed %s: %s" % (name, path)) + + invalid.append(prim) + + if invalid: + prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) + raise RuntimeError("Found invalid primitives: %s" % prim_paths) + + +class ValidateUsdShade(ValidateUsdModel): + """Validate usdShade. + + Disallow Render settings, products, vars and Lux lights. + + """ + + families = ["usdShade"] + label = "Validate USD Shade" + + disallowed = [ + UsdRender.Settings, + UsdRender.Product, + UsdRender.Var, + UsdLux.Light, + ] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py new file mode 100644 index 0000000000..1f10fafdf4 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -0,0 +1,52 @@ +import pyblish.api + + +class ValidateUSDOutputNode(pyblish.api.InstancePlugin): + """Validate the instance USD LOPs Output Node. + + This will ensure: + - The LOP Path is set. + - The LOP Path refers to an existing object. + - The LOP Path node is a LOP node. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usd"] + hosts = ["houdini"] + label = "Validate Output Node (USD)" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error( + "USD node '%s' LOP path does not exist. " + "Ensure a valid LOP path is set." % node.path() + ) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.LopNode): + cls.log.error( + "Output node %s is not a LOP node. " + "LOP Path must point to a LOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py new file mode 100644 index 0000000000..36336a03ae --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -0,0 +1,31 @@ +import pyblish.api + +import os + + +class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): + """Validate USD Render Product names are correctly set absolute paths.""" + + order = pyblish.api.ValidatorOrder + families = ["usdrender"] + hosts = ["houdini"] + label = "Validate USD Render Product Names" + optional = True + + def process(self, instance): + + invalid = [] + for filepath in instance.data["files"]: + + if not filepath: + invalid.append("Detected empty output filepath.") + + if not os.path.isabs(filepath): + invalid.append( + "Output file path is not " "absolute path: %s" % filepath + ) + + if invalid: + for message in invalid: + self.log.error(message) + raise RuntimeError("USD Render Paths are invalid.") diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py new file mode 100644 index 0000000000..fb1094e6b5 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -0,0 +1,54 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +class ValidateUsdSetDress(pyblish.api.InstancePlugin): + """Validate USD Set Dress. + + Must only have references or payloads. May not generate new mesh or + flattened meshes. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdSetDress"] + hosts = ["houdini"] + label = "Validate USD Set Dress" + optional = True + + def process(self, instance): + + from pxr import UsdGeom + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for node in stage.Traverse(): + + if UsdGeom.Mesh(node): + # This solely checks whether there is any USD involved + # in this Prim's Stack and doesn't accurately tell us + # whether it was generated locally or not. + # TODO: More accurately track whether the Prim was created + # in the local scene + stack = node.GetPrimStack() + for sdf in stack: + path = sdf.layer.realPath + if path: + break + else: + prim_path = node.GetPath() + self.log.error( + "%s is not referenced geometry." % prim_path + ) + invalid.append(node) + + if invalid: + raise RuntimeError( + "SetDress contains local geometry. " + "This is not allowed, it must be an assembly " + "of referenced assets." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py new file mode 100644 index 0000000000..fcfbf6b22d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -0,0 +1,41 @@ +import re + +import pyblish.api +import openpype.api + +from avalon import io + + +class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): + """Validate the Instance has no current cooking errors.""" + + order = openpype.api.ValidateContentsOrder + hosts = ["houdini"] + families = ["usdShade"] + label = "USD Shade model exists" + + def process(self, instance): + + asset = instance.data["asset"] + subset = instance.data["subset"] + + # Assume shading variation starts after a dot separator + shade_subset = subset.split(".", 1)[0] + model_subset = re.sub("^usdShade", "usdModel", shade_subset) + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + if not asset_doc: + raise RuntimeError("Asset does not exist: %s" % asset) + + subset_doc = io.find_one( + { + "name": model_subset, + "type": "subset", + "parent": asset_doc["_id"], + } + ) + if not subset_doc: + raise RuntimeError( + "USD Model subset not found: " + "%s (%s)" % (model_subset, asset) + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py new file mode 100644 index 0000000000..a77ca2f3cb --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -0,0 +1,63 @@ +import pyblish.api +import openpype.api + +import hou + + +class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): + """Validate USD Shading Workspace is correct version. + + There have been some issues with outdated/erroneous Shading Workspaces + so this is to confirm everything is set as it should. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["houdini"] + families = ["usdShade"] + label = "USD Shade Workspace" + + def process(self, instance): + + rop = instance[0] + workspace = rop.parent() + + definition = workspace.type().definition() + name = definition.nodeType().name() + library = definition.libraryFilePath() + + all_definitions = hou.hda.definitionsInFile(library) + node_type, version = name.rsplit(":", 1) + version = float(version) + + highest = version + for other_definition in all_definitions: + other_name = other_definition.nodeType().name() + other_node_type, other_version = other_name.rsplit(":", 1) + other_version = float(other_version) + + if node_type != other_node_type: + continue + + # Get highest version + highest = max(highest, other_version) + + if version != highest: + raise RuntimeError( + "Shading Workspace is not the latest version." + " Found %s. Latest is %s." % (version, highest) + ) + + # There were some issues with the editable node not having the right + # configured path. So for now let's assure that is correct to.from + value = ( + 'avalon://`chs("../asset_name")`/' + 'usdShade`chs("../model_variantname1")`.usd' + ) + rop_value = rop.parm("lopoutput").rawValue() + if rop_value != value: + raise RuntimeError( + "Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index 7b23d73ac7..0ae1bc94eb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -3,7 +3,7 @@ import openpype.api class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB + """Validate that the node connected to the output node is of type VDB. Regardless of the amount of VDBs create the output will need to have an equal amount of VDBs, points, primitives and vertices @@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node is not" - "of type VDB!") + raise RuntimeError( + "Node connected to the output node is not" "of type VDB!" + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py new file mode 100644 index 0000000000..1ba840b71d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -0,0 +1,73 @@ +import pyblish.api +import openpype.api +import hou + + +class ValidateVDBOutputNode(pyblish.api.InstancePlugin): + """Validate that the node connected to the output node is of type VDB. + + Regardless of the amount of VDBs create the output will need to have an + equal amount of VDBs, points, primitives and vertices + + A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["vdbcache"] + hosts = ["houdini"] + label = "Validate Output Node (VDB)" + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Node connected to the output node is not" " of type VDB!" + ) + + @classmethod + def get_invalid(cls, instance): + + node = instance.data["output_node"] + if node is None: + cls.log.error( + "SOP path is not correctly set on " + "ROP node '%s'." % instance[0].path() + ) + return [instance] + + frame = instance.data.get("frameStart", 0) + geometry = node.geometryAtFrame(frame) + if geometry is None: + # No geometry data on this node, maybe the node hasn't cooked? + cls.log.error( + "SOP node has no geometry data. " + "Is it cooked? %s" % node.path() + ) + return [node] + + prims = geometry.prims() + nr_of_prims = len(prims) + + # All primitives must be hou.VDB + invalid_prim = False + for prim in prims: + if not isinstance(prim, hou.VDB): + cls.log.error("Found non-VDB primitive: %s" % prim) + invalid_prim = True + if invalid_prim: + return [instance] + + nr_of_points = len(geometry.points()) + if nr_of_points != nr_of_prims: + cls.log.error("The number of primitives and points do not match") + return [instance] + + for prim in prims: + if prim.numVertices() != 1: + cls.log.error("Found primitive with more than 1 vertex!") + return [instance] diff --git a/openpype/hosts/houdini/startup/scripts/123.py b/openpype/hosts/houdini/startup/scripts/123.py index 6d90b8352e..4233d68c15 100644 --- a/openpype/hosts/houdini/startup/scripts/123.py +++ b/openpype/hosts/houdini/startup/scripts/123.py @@ -1,5 +1,4 @@ from avalon import api, houdini -import hou def main(): diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py rename to openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py new file mode 100644 index 0000000000..4071eb3e0c --- /dev/null +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -0,0 +1,168 @@ +import hou +import husdoutputprocessors.base as base +import os +import re +import logging + +import colorbleed.usdlib as usdlib + + +def _get_project_publish_template(): + """Return publish template from database for current project""" + from avalon import io + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + return project["config"]["template"]["publish"] + + +class AvalonURIOutputProcessor(base.OutputProcessorBase): + """Process Avalon URIs into their full path equivalents. + + """ + + _parameters = None + _param_prefix = 'avalonurioutputprocessor_' + _parms = { + "use_publish_paths": _param_prefix + "use_publish_paths" + } + + def __init__(self): + """ There is only one object of each output processor class that is + ever created in a Houdini session. Therefore be very careful + about what data gets put in this object. + """ + self._template = None + self._use_publish_paths = False + self._cache = dict() + + def displayName(self): + return 'Avalon URI Output Processor' + + def parameters(self): + + if not self._parameters: + parameters = hou.ParmTemplateGroup() + use_publish_path = hou.ToggleParmTemplate( + name=self._parms["use_publish_paths"], + label='Resolve Reference paths to publish paths', + default_value=False, + help=("When enabled any paths for Layers, References or " + "Payloads are resolved to published master versions.\n" + "This is usually only used by the publishing pipeline, " + "but can be used for testing too.")) + parameters.append(use_publish_path) + self._parameters = parameters.asDialogScript() + + return self._parameters + + def beginSave(self, config_node, t): + self._template = _get_project_publish_template() + + parm = self._parms["use_publish_paths"] + self._use_publish_paths = config_node.parm(parm).evalAtTime(t) + self._cache.clear() + + def endSave(self): + self._template = None + self._use_publish_paths = None + self._cache.clear() + + def processAsset(self, + asset_path, + asset_path_for_save, + referencing_layer_path, + asset_is_layer, + for_save): + """ + Args: + asset_path (str): The incoming file path you want to alter or not. + asset_path_for_save (bool): Whether the current path is a + referenced path in the USD file. When True, return the path + you want inside USD file. + referencing_layer_path (str): ??? + asset_is_layer (bool): Whether this asset is a USD layer file. + If this is False, the asset is something else (for example, + a texture or volume file). + for_save (bool): Whether the asset path is for a file to be saved + out. If so, then return actual written filepath. + + Returns: + The refactored asset path. + + """ + + # Retrieve from cache if this query occurred before (optimization) + cache_key = (asset_path, asset_path_for_save, asset_is_layer, for_save) + if cache_key in self._cache: + return self._cache[cache_key] + + relative_template = "{asset}_{subset}.{ext}" + uri_data = usdlib.parse_avalon_uri(asset_path) + if uri_data: + + if for_save: + # Set save output path to a relative path so other + # processors can potentially manage it easily? + path = relative_template.format(**uri_data) + + print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) + self._cache[cache_key] = path + return path + + if self._use_publish_paths: + # Resolve to an Avalon published asset for embedded paths + path = self._get_usd_master_path(**uri_data) + else: + path = relative_template.format(**uri_data) + + print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) + self._cache[cache_key] = path + return path + + self._cache[cache_key] = asset_path + return asset_path + + def _get_usd_master_path(self, + asset, + subset, + ext): + """Get the filepath for a .usd file of a subset. + + This will return the path to an unversioned master file generated by + `usd_master_file.py`. + + """ + + from avalon import api, io + + PROJECT = api.Session["AVALON_PROJECT"] + asset_doc = io.find_one({"name": asset, + "type": "asset"}) + if not asset_doc: + raise RuntimeError("Invalid asset name: '%s'" % asset) + + root = api.registered_root() + path = self._template.format(**{ + "root": root, + "project": PROJECT, + "silo": asset_doc["silo"], + "asset": asset_doc["name"], + "subset": subset, + "representation": ext, + "version": 0 # stub version zero + }) + + # Remove the version folder + subset_folder = os.path.dirname(os.path.dirname(path)) + master_folder = os.path.join(subset_folder, "master") + fname = "{0}.{1}".format(subset, ext) + + return os.path.join(master_folder, fname).replace("\\", "/") + + +output_processor = AvalonURIOutputProcessor() + + +def usdOutputProcessor(): + return output_processor + diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py new file mode 100644 index 0000000000..d8e36d5aa8 --- /dev/null +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py @@ -0,0 +1,90 @@ +import hou +import husdoutputprocessors.base as base +import os + + +class StagingDirOutputProcessor(base.OutputProcessorBase): + """Output all USD Rop file nodes into the Staging Directory + + Ignore any folders and paths set in the Configured Layers + and USD Rop node, just take the filename and save into a + single directory. + + """ + theParameters = None + parameter_prefix = "stagingdiroutputprocessor_" + stagingdir_parm_name = parameter_prefix + "stagingDir" + + def __init__(self): + self.staging_dir = None + + def displayName(self): + return 'StagingDir Output Processor' + + def parameters(self): + if not self.theParameters: + parameters = hou.ParmTemplateGroup() + rootdirparm = hou.StringParmTemplate( + self.stagingdir_parm_name, + 'Staging Directory', 1, + string_type=hou.stringParmType.FileReference, + file_type=hou.fileType.Directory + ) + parameters.append(rootdirparm) + self.theParameters = parameters.asDialogScript() + return self.theParameters + + def beginSave(self, config_node, t): + + # Use the Root Directory parameter if it is set. + root_dir_parm = config_node.parm(self.stagingdir_parm_name) + if root_dir_parm: + self.staging_dir = root_dir_parm.evalAtTime(t) + + if not self.staging_dir: + out_file_parm = config_node.parm('lopoutput') + if out_file_parm: + self.staging_dir = out_file_parm.evalAtTime(t) + if self.staging_dir: + (self.staging_dir, filename) = os.path.split(self.staging_dir) + + def endSave(self): + self.staging_dir = None + + def processAsset(self, asset_path, + asset_path_for_save, + referencing_layer_path, + asset_is_layer, + for_save): + """ + Args: + asset_path (str): The incoming file path you want to alter or not. + asset_path_for_save (bool): Whether the current path is a + referenced path in the USD file. When True, return the path + you want inside USD file. + referencing_layer_path (str): ??? + asset_is_layer (bool): Whether this asset is a USD layer file. + If this is False, the asset is something else (for example, + a texture or volume file). + for_save (bool): Whether the asset path is for a file to be saved + out. If so, then return actual written filepath. + + Returns: + The refactored asset path. + + """ + + # Treat save paths as being relative to the output path. + if for_save and self.staging_dir: + # Whenever we're processing a Save Path make sure to + # resolve it to the Staging Directory + filename = os.path.basename(asset_path) + return os.path.join(self.staging_dir, filename) + + return asset_path + + +output_processor = StagingDirOutputProcessor() +def usdOutputProcessor(): + return output_processor + diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 5155aec0ab..7ce96166f7 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -24,6 +24,7 @@ class CreateAnimation(plugin.Creator): # Write vertex colors with the geometry. self.data["writeColorSets"] = False + self.data["writeFaceSets"] = False # Include only renderable visible shapes. # Skips locators and empty transforms diff --git a/openpype/hosts/maya/plugins/create/create_model.py b/openpype/hosts/maya/plugins/create/create_model.py index f1d9d22c1c..37faad23a0 100644 --- a/openpype/hosts/maya/plugins/create/create_model.py +++ b/openpype/hosts/maya/plugins/create/create_model.py @@ -15,6 +15,7 @@ class CreateModel(plugin.Creator): # Vertex colors with the geometry self.data["writeColorSets"] = False + self.data["writeFaceSets"] = False # Include attributes by attribute name or prefix self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 9afea731fd..d8e5fd43a7 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -20,6 +20,7 @@ class CreatePointCache(plugin.Creator): self.data.update(lib.collect_animation_data()) self.data["writeColorSets"] = False # Vertex colors with the geometry. + self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups diff --git a/openpype/hosts/maya/plugins/publish/extract_animation.py b/openpype/hosts/maya/plugins/publish/extract_animation.py index b86ded1fb0..7ecc40a68d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_animation.py +++ b/openpype/hosts/maya/plugins/publish/extract_animation.py @@ -57,7 +57,8 @@ class ExtractAnimation(openpype.api.Extractor): "uvWrite": True, "selection": True, "worldSpace": instance.data.get("worldSpace", True), - "writeColorSets": instance.data.get("writeColorSets", False) + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False) } if not instance.data.get("includeParentHierarchy", True): diff --git a/openpype/hosts/maya/plugins/publish/extract_model.py b/openpype/hosts/maya/plugins/publish/extract_model.py index 1773297826..40cc9427f3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_model.py +++ b/openpype/hosts/maya/plugins/publish/extract_model.py @@ -28,6 +28,7 @@ class ExtractModel(openpype.api.Extractor): hosts = ["maya"] families = ["model"] scene_type = "ma" + optional = True def process(self, instance): """Plugin entry point.""" diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index ba716c0d18..630cc39398 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -38,6 +38,7 @@ class ExtractAlembic(openpype.api.Extractor): # Get extra export arguments writeColorSets = instance.data.get("writeColorSets", False) + writeFaceSets = instance.data.get("writeFaceSets", False) self.log.info("Extracting pointcache..") dirname = self.staging_dir(instance) @@ -53,6 +54,7 @@ class ExtractAlembic(openpype.api.Extractor): "writeVisibility": True, "writeCreases": True, "writeColorSets": writeColorSets, + "writeFaceSets": writeFaceSets, "uvWrite": True, "selection": True, "worldSpace": instance.data.get("worldSpace", True) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py index f7f96c7d03..adbac6ef09 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py @@ -11,6 +11,7 @@ import zipfile import pyblish.api from avalon import api, io import openpype.api +from openpype.lib import get_workfile_template_key_from_context class ExtractHarmonyZip(openpype.api.Extractor): @@ -65,10 +66,10 @@ class ExtractHarmonyZip(openpype.api.Extractor): # Get Task types and Statuses for creation if needed self.task_types = self._get_all_task_types(project_entity) - self.task_statuses = self.get_all_task_statuses(project_entity) + self.task_statuses = self._get_all_task_statuses(project_entity) # Get Statuses of AssetVersions - self.assetversion_statuses = self.get_all_assetversion_statuses( + self.assetversion_statuses = self._get_all_assetversion_statuses( project_entity ) @@ -233,18 +234,28 @@ class ExtractHarmonyZip(openpype.api.Extractor): "version": 1, "ext": "zip", } + host_name = "harmony" + template_name = get_workfile_template_key_from_context( + instance.data["asset"], + instance.data.get("task"), + host_name, + project_name=project_entity["name"], + dbcon=io + ) # Get a valid work filename first with version 1 - file_template = anatomy.templates["work"]["file"] + file_template = anatomy.templates[template_name]["file"] anatomy_filled = anatomy.format(data) - work_path = anatomy_filled["work"]["path"] + work_path = anatomy_filled[template_name]["path"] # Get the final work filename with the proper version data["version"] = api.last_workfile_with_version( - os.path.dirname(work_path), file_template, data, [".zip"] + os.path.dirname(work_path), + file_template, + data, + api.HOST_WORKFILE_EXTENSIONS[host_name] )[1] - work_path = anatomy_filled["work"]["path"] base_name = os.path.splitext(os.path.basename(work_path))[0] staging_work_path = os.path.join(os.path.dirname(staging_scene), diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index e496b144cd..dfa8f17ee9 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -9,7 +9,7 @@ from openpype.lib import get_subset_name class CollectInstances(pyblish.api.ContextPlugin): label = "Collect Instances" - order = pyblish.api.CollectorOrder - 1 + order = pyblish.api.CollectorOrder - 0.4 hosts = ["tvpaint"] def process(self, context): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index b61fec895f..65e38ea258 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -8,7 +8,7 @@ from openpype.lib import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect Workfile" - order = pyblish.api.CollectorOrder - 1 + order = pyblish.api.CollectorOrder - 0.4 hosts = ["tvpaint"] def process(self, context): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 79cc01740a..f4259f1b5f 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -39,7 +39,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): class CollectWorkfileData(pyblish.api.ContextPlugin): label = "Collect Workfile Data" - order = pyblish.api.CollectorOrder - 1.01 + order = pyblish.api.CollectorOrder - 0.45 hosts = ["tvpaint"] actions = [ResetTVPaintWorkfileMetadata] diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py index a9279bf6e0..ad37a7a068 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py @@ -15,6 +15,46 @@ class PointCacheAlembicLoader(api.Loader): icon = "cube" color = "orange" + def get_task( + self, filename, asset_dir, asset_name, replace, frame_start, frame_end + ): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + gc_settings = unreal.AbcGeometryCacheSettings() + conversion_settings = unreal.AbcConversionSettings() + sampling_settings = unreal.AbcSamplingSettings() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + + gc_settings.set_editor_property('flatten_tracks', False) + + conversion_settings.set_editor_property('flip_u', False) + conversion_settings.set_editor_property('flip_v', True) + conversion_settings.set_editor_property( + 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) + conversion_settings.set_editor_property( + 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) + + sampling_settings.set_editor_property('frame_start', frame_start) + sampling_settings.set_editor_property('frame_end', frame_end) + + options.geometry_cache_settings = gc_settings + options.conversion_settings = conversion_settings + options.sampling_settings = sampling_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,25 +95,17 @@ class PointCacheAlembicLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + frame_start = context.get('asset').get('data').get('frameStart') + frame_end = context.get('asset').get('data').get('frameEnd') - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) + # If frame start and end are the same, we increse the end frame by + # one, otherwise Unreal will not import it + if frame_start == frame_end: + frame_end += 1 - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + task = self.get_task( + self.fname, asset_dir, asset_name, False, frame_start, frame_end) - options.geometry_cache_settings.set_editor_property( - 'flatten_tracks', False) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -109,28 +141,11 @@ class PointCacheAlembicLoader(api.Loader): source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) - - options.geometry_cache_settings.set_editor_property( - 'flatten_tracks', False) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 12b9320f72..ccec31b832 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -15,6 +15,39 @@ class StaticMeshAlembicLoader(api.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.STATIC_MESH) + + sm_settings.set_editor_property('merge_meshes', True) + + conversion_settings.set_editor_property('flip_u', False) + conversion_settings.set_editor_property('flip_v', True) + conversion_settings.set_editor_property( + 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) + conversion_settings.set_editor_property( + 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,22 +88,8 @@ class StaticMeshAlembicLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.STATIC_MESH) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -106,25 +125,11 @@ class StaticMeshAlembicLoader(api.Loader): source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.STATIC_MESH) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py index dcb566fa4c..d25f84ea69 100644 --- a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py @@ -15,6 +15,31 @@ class StaticMeshFBXLoader(api.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.FbxImportUI() + import_data = unreal.FbxStaticMeshImportData() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + options.set_editor_property( + 'automated_import_should_detect_type', False) + options.set_editor_property('import_animations', False) + + import_data.set_editor_property('combine_meshes', True) + import_data.set_editor_property('remove_degenerates', False) + + options.static_mesh_import_data = import_data + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,22 +80,8 @@ class StaticMeshFBXLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - options = unreal.FbxImportUI() - options.set_editor_property( - 'automated_import_should_detect_type', False) - options.set_editor_property('import_animations', False) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -102,29 +113,15 @@ class StaticMeshFBXLoader(api.Loader): return asset_content def update(self, container, representation): - name = container["name"] + name = container["asset_name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - options = unreal.FbxImportUI() - options.set_editor_property( - 'automated_import_should_detect_type', False) - options.set_editor_property('import_animations', False) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index 2d9f6eb3d1..a47187cf47 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -83,7 +83,7 @@ class ExtractLayout(openpype.api.Extractor): "z": transform.translation.z }, "rotation": { - "x": math.radians(transform.rotation.euler().x + 90.0), + "x": math.radians(transform.rotation.euler().x), "y": math.radians(transform.rotation.euler().y), "z": math.radians(180.0 - transform.rotation.euler().z) }, diff --git a/openpype/hosts/webpublisher/README.md b/openpype/hosts/webpublisher/README.md new file mode 100644 index 0000000000..0826e44490 --- /dev/null +++ b/openpype/hosts/webpublisher/README.md @@ -0,0 +1,6 @@ +Webpublisher +------------- + +Plugins meant for processing of Webpublisher. + +Gets triggered by calling openpype.cli.remotepublish with appropriate arguments. \ No newline at end of file diff --git a/openpype/modules/log_viewer/tray/__init__.py b/openpype/hosts/webpublisher/__init__.py similarity index 100% rename from openpype/modules/log_viewer/tray/__init__.py rename to openpype/hosts/webpublisher/__init__.py diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py new file mode 100644 index 0000000000..e40d46d662 --- /dev/null +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -0,0 +1,43 @@ +import os +import logging + +from avalon import api as avalon +from avalon import io +from pyblish import api as pyblish +import openpype.hosts.webpublisher + +log = logging.getLogger("openpype.hosts.webpublisher") + +HOST_DIR = os.path.dirname(os.path.abspath( + openpype.hosts.webpublisher.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") + + +def application_launch(): + pass + + +def install(): + print("Installing Pype config...") + + pyblish.register_plugin_path(PUBLISH_PATH) + avalon.register_plugin_path(avalon.Loader, LOAD_PATH) + avalon.register_plugin_path(avalon.Creator, CREATE_PATH) + log.info(PUBLISH_PATH) + + io.install() + avalon.on("application.launched", application_launch) + + +def uninstall(): + pyblish.deregister_plugin_path(PUBLISH_PATH) + avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) + avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + + +# to have required methods for interface +def ls(): + pass diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_fps.py b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py new file mode 100644 index 0000000000..79fe53176a --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py @@ -0,0 +1,28 @@ +""" +Requires: + Nothing + +Provides: + Instance +""" + +import pyblish.api +from pprint import pformat + + +class CollectFPS(pyblish.api.InstancePlugin): + """ + Adds fps from context to instance because of ExtractReview + """ + + label = "Collect fps" + order = pyblish.api.CollectorOrder + 0.49 + hosts = ["webpublisher"] + + def process(self, instance): + fps = instance.context.data["fps"] + + instance.data.update({ + "fps": fps + }) + self.log.debug(f"instance.data: {pformat(instance.data)}") diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py new file mode 100644 index 0000000000..6584120d97 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -0,0 +1,267 @@ +"""Loads publishing context from json and continues in publish process. + +Requires: + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + +Provides: + context, instances -> All data from previous publishing process. +""" + +import os +import json +import clique + +import pyblish.api +from avalon import io +from openpype.lib import prepare_template_data + + +class CollectPublishedFiles(pyblish.api.ContextPlugin): + """ + This collector will try to find json files in provided + `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + + """ + # must be really early, context values are only in json file + order = pyblish.api.CollectorOrder - 0.490 + label = "Collect rendered frames" + host = ["webpublisher"] + + _context = None + + # from Settings + task_type_to_family = {} + + def _load_json(self, path): + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(path, exc) + ) + return data + + def _process_batch(self, dir_url): + task_subfolders = [ + os.path.join(dir_url, o) + for o in os.listdir(dir_url) + if os.path.isdir(os.path.join(dir_url, o))] + self.log.info("task_sub:: {}".format(task_subfolders)) + for task_dir in task_subfolders: + task_data = self._load_json(os.path.join(task_dir, + "manifest.json")) + self.log.info("task_data:: {}".format(task_data)) + ctx = task_data["context"] + task_type = "default_task_type" + task_name = None + + if ctx["type"] == "task": + items = ctx["path"].split('/') + asset = items[-2] + os.environ["AVALON_TASK"] = ctx["name"] + task_name = ctx["name"] + task_type = ctx["attributes"]["type"] + else: + asset = ctx["name"] + os.environ["AVALON_TASK"] = "" + + is_sequence = len(task_data["files"]) > 1 + + _, extension = os.path.splitext(task_data["files"][0]) + family, families, subset_template, tags = self._get_family( + self.task_type_to_family, + task_type, + is_sequence, + extension.replace(".", '')) + + subset = self._get_subset_name(family, subset_template, task_name, + task_data["variant"]) + + os.environ["AVALON_ASSET"] = asset + io.Session["AVALON_ASSET"] = asset + + instance = self._context.create_instance(subset) + instance.data["asset"] = asset + instance.data["subset"] = subset + instance.data["family"] = family + instance.data["families"] = families + instance.data["version"] = \ + self._get_last_version(asset, subset) + 1 + instance.data["stagingDir"] = task_dir + instance.data["source"] = "webpublisher" + + # to store logging info into DB openpype.webpublishes + instance.data["ctx_path"] = ctx["path"] + instance.data["batch_id"] = task_data["batch"] + + # to convert from email provided into Ftrack username + instance.data["user_email"] = task_data["user"] + + if is_sequence: + instance.data["representations"] = self._process_sequence( + task_data["files"], task_dir, tags + ) + instance.data["frameStart"] = \ + instance.data["representations"][0]["frameStart"] + instance.data["frameEnd"] = \ + instance.data["representations"][0]["frameEnd"] + else: + instance.data["representations"] = self._get_single_repre( + task_dir, task_data["files"], tags + ) + + self.log.info("instance.data:: {}".format(instance.data)) + + def _get_subset_name(self, family, subset_template, task_name, variant): + fill_pairs = { + "variant": variant, + "family": family, + "task": task_name + } + subset = subset_template.format(**prepare_template_data(fill_pairs)) + return subset + + def _get_single_repre(self, task_dir, files, tags): + _, ext = os.path.splitext(files[0]) + repre_data = { + "name": ext[1:], + "ext": ext[1:], + "files": files[0], + "stagingDir": task_dir, + "tags": tags + } + self.log.info("single file repre_data.data:: {}".format(repre_data)) + return [repre_data] + + def _process_sequence(self, files, task_dir, tags): + """Prepare reprentations for sequence of files.""" + collections, remainder = clique.assemble(files) + assert len(collections) == 1, \ + "Too many collections in {}".format(files) + + frame_start = list(collections[0].indexes)[0] + frame_end = list(collections[0].indexes)[-1] + ext = collections[0].tail + repre_data = { + "frameStart": frame_start, + "frameEnd": frame_end, + "name": ext[1:], + "ext": ext[1:], + "files": files, + "stagingDir": task_dir, + "tags": tags + } + self.log.info("sequences repre_data.data:: {}".format(repre_data)) + return [repre_data] + + def _get_family(self, settings, task_type, is_sequence, extension): + """Guess family based on input data. + + Args: + settings (dict): configuration per task_type + task_type (str): Animation|Art etc + is_sequence (bool): single file or sequence + extension (str): without '.' + + Returns: + (family, [families], subset_template_name, tags) tuple + AssertionError if not matching family found + """ + task_obj = settings.get(task_type) + assert task_obj, "No family configuration for '{}'".format(task_type) + + found_family = None + for family, content in task_obj.items(): + if is_sequence != content["is_sequence"]: + continue + if extension in content["extensions"] or \ + '' in content["extensions"]: # all extensions setting + found_family = family + break + + msg = "No family found for combination of " +\ + "task_type: {}, is_sequence:{}, extension: {}".format( + task_type, is_sequence, extension) + assert found_family, msg + + return found_family, \ + content["families"], \ + content["subset_template_name"], \ + content["tags"] + + def _get_last_version(self, asset_name, subset_name): + """Returns version number or 0 for 'asset' and 'subset'""" + query = [ + { + "$match": {"type": "asset", "name": asset_name} + }, + { + "$lookup": + { + "from": os.environ["AVALON_PROJECT"], + "localField": "_id", + "foreignField": "parent", + "as": "subsets" + } + }, + { + "$unwind": "$subsets" + }, + { + "$match": {"subsets.type": "subset", + "subsets.name": subset_name}}, + { + "$lookup": + { + "from": os.environ["AVALON_PROJECT"], + "localField": "subsets._id", + "foreignField": "parent", + "as": "versions" + } + }, + { + "$unwind": "$versions" + }, + { + "$group": { + "_id": { + "asset_name": "$name", + "subset_name": "$subsets.name" + }, + 'version': {'$max': "$versions.name"} + } + } + ] + version = list(io.aggregate(query)) + + if version: + return version[0].get("version") or 0 + else: + return 0 + + def process(self, context): + self._context = context + + batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") + + assert batch_dir, ( + "Missing `OPENPYPE_PUBLISH_DATA`") + + assert batch_dir, \ + "Folder {} doesn't exist".format(batch_dir) + + project_name = os.environ.get("AVALON_PROJECT") + if project_name is None: + raise AssertionError( + "Environment `AVALON_PROJECT` was not found." + "Could not set project `root` which may cause issues." + ) + + self._process_batch(batch_dir) diff --git a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py new file mode 100644 index 0000000000..419c065e16 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py @@ -0,0 +1,38 @@ +import os + +import pyblish.api +from openpype.lib import OpenPypeMongoConnection + + +class IntegrateContextToLog(pyblish.api.ContextPlugin): + """ Adds context information to log document for displaying in front end""" + + label = "Integrate Context to Log" + order = pyblish.api.IntegratorOrder - 0.1 + hosts = ["webpublisher"] + + def process(self, context): + self.log.info("Integrate Context to Log") + + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + for instance in context: + self.log.info("ctx_path: {}".format(instance.data.get("ctx_path"))) + self.log.info("batch_id: {}".format(instance.data.get("batch_id"))) + if instance.data.get("ctx_path") and instance.data.get("batch_id"): + self.log.info("Updating log record") + dbcon.update_one( + { + "batch_id": instance.data.get("batch_id"), + "status": "in_progress" + }, + {"$set": + { + "path": instance.data.get("ctx_path") + + }} + ) + + return diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py new file mode 100644 index 0000000000..0014d1b344 --- /dev/null +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -0,0 +1,247 @@ +"""Routes and etc. for webpublisher API.""" +import os +import json +import datetime +from bson.objectid import ObjectId +import collections +from aiohttp.web_response import Response +import subprocess + +from avalon.api import AvalonMongoDB + +from openpype.lib import OpenPypeMongoConnection +from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint + +from openpype.lib import PypeLogger + +log = PypeLogger.get_logger("WebServer") + + +class RestApiResource: + """Resource carrying needed info and Avalon DB connection for publish.""" + def __init__(self, server_manager, executable, upload_dir): + self.server_manager = server_manager + self.upload_dir = upload_dir + self.executable = executable + + self.dbcon = AvalonMongoDB() + self.dbcon.install() + + @staticmethod + def json_dump_handler(value): + if isinstance(value, datetime.datetime): + return value.isoformat() + if isinstance(value, ObjectId): + return str(value) + raise TypeError(value) + + @classmethod + def encode(cls, data): + return json.dumps( + data, + indent=4, + default=cls.json_dump_handler + ).encode("utf-8") + + +class OpenPypeRestApiResource(RestApiResource): + """Resource carrying OP DB connection for storing batch info into DB.""" + def __init__(self, ): + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + self.dbcon = mongo_client[database_name]["webpublishes"] + + +class WebpublisherProjectsEndpoint(_RestApiEndpoint): + """Returns list of dict with project info (id, name).""" + async def get(self) -> Response: + output = [] + for project_name in self.dbcon.database.collection_names(): + project_doc = self.dbcon.database[project_name].find_one({ + "type": "project" + }) + if project_doc: + ret_val = { + "id": project_doc["_id"], + "name": project_doc["name"] + } + output.append(ret_val) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class WebpublisherHiearchyEndpoint(_RestApiEndpoint): + """Returns dictionary with context tree from assets.""" + async def get(self, project_name) -> Response: + query_projection = { + "_id": 1, + "data.tasks": 1, + "data.visualParent": 1, + "data.entityType": 1, + "name": 1, + "type": 1, + } + + asset_docs = self.dbcon.database[project_name].find( + {"type": "asset"}, + query_projection + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs + } + + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in asset_docs_by_id.values(): + parent_id = asset_doc["data"].get("visualParent") + asset_docs_by_parent_id[parent_id].append(asset_doc) + + assets = collections.defaultdict(list) + + for parent_id, children in asset_docs_by_parent_id.items(): + for child in children: + node = assets.get(child["_id"]) + if not node: + node = Node(child["_id"], + child["data"].get("entityType", "Folder"), + child["name"]) + assets[child["_id"]] = node + + tasks = child["data"].get("tasks", {}) + for t_name, t_con in tasks.items(): + task_node = TaskNode("task", t_name) + task_node["attributes"]["type"] = t_con.get("type") + + task_node.parent = node + + parent_node = assets.get(parent_id) + if not parent_node: + asset_doc = asset_docs_by_id.get(parent_id) + if asset_doc: # regular node + parent_node = Node(parent_id, + asset_doc["data"].get("entityType", + "Folder"), + asset_doc["name"]) + else: # root + parent_node = Node(parent_id, + "project", + project_name) + assets[parent_id] = parent_node + node.parent = parent_node + + roots = [x for x in assets.values() if x.parent is None] + + return Response( + status=200, + body=self.resource.encode(roots[0]), + content_type="application/json" + ) + + +class Node(dict): + """Node element in context tree.""" + + def __init__(self, uid, node_type, name): + self._parent = None # pointer to parent Node + self["type"] = node_type + self["name"] = name + self['id'] = uid # keep reference to id # + self['children'] = [] # collection of pointers to child Nodes + + @property + def parent(self): + return self._parent # simply return the object at the _parent pointer + + @parent.setter + def parent(self, node): + self._parent = node + # add this node to parent's list of children + node['children'].append(self) + + +class TaskNode(Node): + """Special node type only for Tasks.""" + + def __init__(self, node_type, name): + self._parent = None + self["type"] = node_type + self["name"] = name + self["attributes"] = {} + + +class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): + """Triggers headless publishing of batch.""" + async def post(self, request) -> Response: + output = {} + log.info("WebpublisherBatchPublishEndpoint called") + content = await request.json() + + batch_path = os.path.join(self.resource.upload_dir, + content["batch"]) + + openpype_app = self.resource.executable + args = [ + openpype_app, + 'remotepublish', + batch_path + ] + + if not openpype_app or not os.path.exists(openpype_app): + msg = "Non existent OpenPype executable {}".format(openpype_app) + raise RuntimeError(msg) + + add_args = { + "host": "webpublisher", + "project": content["project_name"], + "user": content["user"] + } + + for key, value in add_args.items(): + args.append("--{}".format(key)) + args.append(value) + + log.info("args:: {}".format(args)) + + subprocess.call(args) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class WebpublisherTaskPublishEndpoint(_RestApiEndpoint): + """Prepared endpoint triggered after each task - for future development.""" + async def post(self, request) -> Response: + return Response( + status=200, + body=self.resource.encode([]), + content_type="application/json" + ) + + +class BatchStatusEndpoint(_RestApiEndpoint): + """Returns dict with info for batch_id.""" + async def get(self, batch_id) -> Response: + output = self.dbcon.find_one({"batch_id": batch_id}) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class PublishesStatusEndpoint(_RestApiEndpoint): + """Returns list of dict with batch info for user (email address).""" + async def get(self, user) -> Response: + output = list(self.dbcon.find({"user": user})) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py new file mode 100644 index 0000000000..d00d269059 --- /dev/null +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -0,0 +1,141 @@ +import time +import os +from datetime import datetime +import requests +import json + +from openpype.lib import PypeLogger + +from .webpublish_routes import ( + RestApiResource, + OpenPypeRestApiResource, + WebpublisherBatchPublishEndpoint, + WebpublisherTaskPublishEndpoint, + WebpublisherHiearchyEndpoint, + WebpublisherProjectsEndpoint, + BatchStatusEndpoint, + PublishesStatusEndpoint +) + + +log = PypeLogger().get_logger("webserver_gui") + + +def run_webserver(*args, **kwargs): + """Runs webserver in command line, adds routes.""" + from openpype.modules import ModulesManager + + manager = ModulesManager() + webserver_module = manager.modules_by_name["webserver"] + host = kwargs.get("host") or "localhost" + port = kwargs.get("port") or 8079 + server_manager = webserver_module.create_new_server_manager(port, host) + webserver_url = server_manager.url + + resource = RestApiResource(server_manager, + upload_dir=kwargs["upload_dir"], + executable=kwargs["executable"]) + projects_endpoint = WebpublisherProjectsEndpoint(resource) + server_manager.add_route( + "GET", + "/api/projects", + projects_endpoint.dispatch + ) + + hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) + server_manager.add_route( + "GET", + "/api/hierarchy/{project_name}", + hiearchy_endpoint.dispatch + ) + + # triggers publish + webpublisher_task_publish_endpoint = \ + WebpublisherBatchPublishEndpoint(resource) + server_manager.add_route( + "POST", + "/api/webpublish/batch", + webpublisher_task_publish_endpoint.dispatch + ) + + webpublisher_batch_publish_endpoint = \ + WebpublisherTaskPublishEndpoint(resource) + server_manager.add_route( + "POST", + "/api/webpublish/task", + webpublisher_batch_publish_endpoint.dispatch + ) + + # reporting + openpype_resource = OpenPypeRestApiResource() + batch_status_endpoint = BatchStatusEndpoint(openpype_resource) + server_manager.add_route( + "GET", + "/api/batch_status/{batch_id}", + batch_status_endpoint.dispatch + ) + + user_status_endpoint = PublishesStatusEndpoint(openpype_resource) + server_manager.add_route( + "GET", + "/api/publishes/{user}", + user_status_endpoint.dispatch + ) + + server_manager.start_server() + last_reprocessed = time.time() + while True: + if time.time() - last_reprocessed > 20: + reprocess_failed(kwargs["upload_dir"], webserver_url) + last_reprocessed = time.time() + time.sleep(1.0) + + +def reprocess_failed(upload_dir, webserver_url): + # log.info("check_reprocesable_records") + from openpype.lib import OpenPypeMongoConnection + + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + results = dbcon.find({"status": "reprocess"}) + for batch in results: + batch_url = os.path.join(upload_dir, + batch["batch_id"], + "manifest.json") + log.info("batch:: {} {}".format(os.path.exists(batch_url), batch_url)) + if not os.path.exists(batch_url): + msg = "Manifest {} not found".format(batch_url) + print(msg) + dbcon.update_one( + {"_id": batch["_id"]}, + {"$set": + { + "finish_date": datetime.now(), + "status": "error", + "progress": 1, + "log": batch.get("log") + msg + }} + ) + continue + server_url = "{}/api/webpublish/batch".format(webserver_url) + + with open(batch_url) as f: + data = json.loads(f.read()) + + try: + r = requests.post(server_url, json=data) + log.info("response{}".format(r)) + except Exception: + log.info("exception", exc_info=True) + + dbcon.update_one( + {"_id": batch["_id"]}, + {"$set": + { + "finish_date": datetime.now(), + "status": "sent_for_reprocessing", + "progress": 1 + }} + ) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 12c04a4236..3d392dc745 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -52,9 +52,11 @@ from .vendor_bin_utils import ( ) from .python_module_tools import ( + import_filepath, modules_from_path, recursive_bases_from_class, - classes_from_module + classes_from_module, + import_module_from_dirpath ) from .avalon_context import ( @@ -69,6 +71,8 @@ from .avalon_context import ( get_linked_assets, get_latest_version, + get_workfile_template_key, + get_workfile_template_key_from_context, get_workdir_data, get_workdir, get_workdir_with_workdir_data, @@ -170,9 +174,11 @@ __all__ = [ "get_ffmpeg_tool_path", "ffprobe_streams", + "import_filepath", "modules_from_path", "recursive_bases_from_class", "classes_from_module", + "import_module_from_dirpath", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", @@ -185,6 +191,8 @@ __all__ = [ "get_linked_assets", "get_latest_version", + "get_workfile_template_key", + "get_workfile_template_key_from_context", "get_workdir_data", "get_workdir", "get_workdir_with_workdir_data", diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index fe964d3bab..45b8e6468d 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -28,7 +28,8 @@ from . import ( from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, - get_workdir_with_workdir_data + get_workdir_with_workdir_data, + get_workfile_template_key ) from .python_module_tools import ( @@ -1105,7 +1106,7 @@ def prepare_host_environments(data, implementation_envs=True): asset_doc = data.get("asset_doc") # Add tools environments groups_by_name = {} - tool_by_group_name = collections.defaultdict(list) + tool_by_group_name = collections.defaultdict(dict) if asset_doc: # Make sure each tool group can be added only once for key in asset_doc["data"].get("tools_env") or []: @@ -1113,12 +1114,14 @@ def prepare_host_environments(data, implementation_envs=True): if not tool: continue groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name].append(tool) + tool_by_group_name[tool.group.name][tool.name] = tool - for group_name, group in groups_by_name.items(): + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] environments.append(group.environment) added_env_keys.add(group_name) - for tool in tool_by_group_name[group_name]: + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[group_name][tool_name] environments.append(tool.environment) added_env_keys.add(tool.name) @@ -1223,8 +1226,12 @@ def prepare_context_environments(data): # Load project specific environments project_name = project_doc["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings # Apply project specific environments on current env value - apply_project_environments_value(project_name, data["env"]) + apply_project_environments_value( + project_name, data["env"], project_settings + ) app = data["app"] workdir_data = get_workdir_data( @@ -1234,8 +1241,20 @@ def prepare_context_environments(data): anatomy = data["anatomy"] + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + workfile_template_key = get_workfile_template_key( + task_type, + app.host_name, + project_name=project_name, + project_settings=project_settings + ) + try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + workdir = get_workdir_with_workdir_data( + workdir_data, anatomy, template_key=workfile_template_key + ) except Exception as exc: raise ApplicationLaunchFailed( @@ -1268,10 +1287,10 @@ def prepare_context_environments(data): ) data["env"].update(context_env) - _prepare_last_workfile(data, workdir) + _prepare_last_workfile(data, workdir, workfile_template_key) -def _prepare_last_workfile(data, workdir): +def _prepare_last_workfile(data, workdir, workfile_template_key): """last workfile workflow preparation. Function check if should care about last workfile workflow and tries @@ -1332,7 +1351,7 @@ def _prepare_last_workfile(data, workdir): if extensions: anatomy = data["anatomy"] # Find last workfile - file_template = anatomy.templates["work"]["file"] + file_template = anatomy.templates[workfile_template_key]["file"] workdir_data.update({ "version": 1, "user": get_openpype_username(), diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index c4217cc6d5..497348af33 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -344,6 +344,127 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): return version_doc +def get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name=None, + dbcon=None, project_settings=None +): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + It is required to pass one of 'dbcon' with already set project name or + 'project_name' arguments. + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. Not required + when 'dbcon' is passed. + dbcon(AvalonMongoDB): Connection to mongo with already set project + under `AVALON_PROJECT`. Not required when 'project_name' is passed. + project_settings(dict): Project settings for passed 'project_name'. + Not required at all but makes function faster. + Raises: + ValueError: When both 'dbcon' and 'project_name' were not + passed. + """ + if not dbcon: + if not project_name: + raise ValueError(( + "`get_workfile_template_key_from_context` requires to pass" + " one of 'dbcon' or 'project_name' arguments." + )) + from avalon.api import AvalonMongoDB + + dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + + elif not project_name: + project_name = dbcon.Session["AVALON_PROJECT"] + + asset_doc = dbcon.find_one( + { + "type": "asset", + "name": asset_name + }, + { + "data.tasks": 1 + } + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) + + +def get_workfile_template_key( + task_type, host_name, project_name=None, project_settings=None +): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + One of 'project_name' or 'project_settings' must be passed it is preffered + to pass settings if are already available. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. Not required if `project_settings` are passed. + project_settings(dict): Prepare project settings for project name. + Not needed if `project_name` is passed. + + Raises: + ValueError: When both 'project_name' and 'project_settings' were not + passed. + """ + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + if not project_name: + raise ValueError(( + "`get_workfile_template_key` requires to pass" + " one of 'project_name' or 'project_settings' arguments." + )) + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + from .profiles_filtering import filter_profiles + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + def get_workdir_data(project_doc, asset_doc, task_name, host_name): """Prepare data for workdir template filling from entered information. @@ -373,7 +494,8 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): def get_workdir_with_workdir_data( - workdir_data, anatomy=None, project_name=None, template_key=None + workdir_data, anatomy=None, project_name=None, + template_key=None, dbcon=None ): """Fill workdir path from entered data and project's anatomy. @@ -387,8 +509,10 @@ def get_workdir_with_workdir_data( `project_name` is entered. project_name (str): Project's name. Optional if `anatomy` is entered otherwise Anatomy object is created with using the project name. - template_key (str): Key of work templates in anatomy templates. By - default is seto to `"work"`. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + dbcon(AvalonMongoDB): Mongo connection. Required only if 'template_key' + and 'project_name' are not passed. Returns: TemplateResult: Workdir path. @@ -406,7 +530,13 @@ def get_workdir_with_workdir_data( anatomy = Anatomy(project_name) if not template_key: - template_key = "work" + template_key = get_workfile_template_key_from_context( + workdir_data["asset"], + workdir_data["task"], + workdir_data["app"], + project_name=workdir_data["project"]["name"], + dbcon=dbcon + ) anatomy_filled = anatomy.format(workdir_data) # Output is TemplateResult object which contain usefull data @@ -447,7 +577,9 @@ def get_workdir( project_doc, asset_doc, task_name, host_name ) # Output is TemplateResult object which contain usefull data - return get_workdir_with_workdir_data(workdir_data, anatomy, template_key) + return get_workdir_with_workdir_data( + workdir_data, anatomy, template_key=template_key + ) @with_avalon @@ -516,7 +648,9 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Prepare anatomy anatomy = Anatomy(project_doc["name"]) # Get workdir path (result is anatomy.TemplateResult) - template_workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + template_workdir = get_workdir_with_workdir_data( + workdir_data, anatomy, dbcon=dbcon + ) template_workdir_path = str(template_workdir).replace("\\", "/") # Replace slashses in workdir path where workfile is located diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index c4410204dd..992d757059 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -165,7 +165,8 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in {}".format(key, profile_value) + "\"{}\" not found in \"{}\": {}".format(value, key, + profile_value) ) profile_points = -1 break @@ -192,13 +193,13 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): ]) if not matching_profiles: - logger.warning( + logger.info( "None of profiles match your setup. {}".format(log_parts) ) return None if len(matching_profiles) > 1: - logger.warning( + logger.info( "More than one profile match your setup. {}".format(log_parts) ) diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 44a1007889..cb5f285ddd 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -9,6 +9,38 @@ log = logging.getLogger(__name__) PY3 = sys.version_info[0] == 3 +def import_filepath(filepath, module_name=None): + """Import python file as python module. + + Python 2 and Python 3 compatibility. + + Args: + filepath(str): Path to python file. + module_name(str): Name of loaded module. Only for Python 3. By default + is filled with filename of filepath. + """ + if module_name is None: + module_name = os.path.splitext(os.path.basename(filepath))[0] + + # Prepare module object where content of file will be parsed + module = types.ModuleType(module_name) + + if PY3: + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, filepath + ) + module_loader.exec_module(module) + else: + # Execute module code and store content to module + with open(filepath) as _stream: + # Execute content and store it to module object + exec(_stream.read(), module.__dict__) + + module.__file__ = filepath + return module + + def modules_from_path(folder_path): """Get python scripts as modules from a path. @@ -55,23 +87,7 @@ def modules_from_path(folder_path): continue try: - # Prepare module object where content of file will be parsed - module = types.ModuleType(mod_name) - - if PY3: - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - mod_name, full_path - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(full_path) as _stream: - # Execute content and store it to module object - exec(_stream.read(), module.__dict__) - - module.__file__ = full_path - + module = import_filepath(full_path, mod_name) modules.append((full_path, module)) except Exception: @@ -127,3 +143,96 @@ def classes_from_module(superclass, module): classes.append(obj) return classes + + +def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using `imp`.""" + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None + + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import imp + + fp, pathname, description = imp.find_module(module_name, [dirpath]) + module = imp.load_module(full_module_name, fp, pathname, description) + if dst_module is not None: + setattr(dst_module, module_name, module) + + return module + + +def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using Python 3 modules.""" + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None + + # Skip import if is already imported + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import importlib.util + from importlib._bootstrap_external import PathFinder + + # Find loader for passed path and name + loader = PathFinder.find_module(full_module_name, [dirpath]) + + # Load specs of module + spec = importlib.util.spec_from_loader( + full_module_name, loader, origin=dirpath + ) + + # Create module based on specs + module = importlib.util.module_from_spec(spec) + + # Store module to destination module and `sys.modules` + # WARNING this mus be done before module execution + if dst_module is not None: + setattr(dst_module, module_name, module) + + sys.modules[full_module_name] = module + + # Execute module import + loader.exec_module(module) + + return module + + +def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): + """Import passed directory as a python module. + + Python 2 and 3 compatible. + + Imported module can be assigned as a child attribute of already loaded + module from `sys.modules` if has support of `setattr`. That is not default + behavior of python modules so parent module must be a custom module with + that ability. + + It is not possible to reimport already cached module. If you need to + reimport module you have to remove it from caches manually. + + Args: + dirpath(str): Parent directory path of loaded folder. + folder_name(str): Folder name which should be imported inside passed + directory. + dst_module_name(str): Parent module name under which can be loaded + module added. + """ + if PY3: + module = _import_module_from_dirpath_py3( + dirpath, folder_name, dst_module_name + ) + else: + module = _import_module_from_dirpath_py2( + dirpath, folder_name, dst_module_name + ) + return module diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py new file mode 100644 index 0000000000..3ae7430c7b --- /dev/null +++ b/openpype/lib/usdlib.py @@ -0,0 +1,350 @@ +import os +import re +import logging + +try: + from pxr import Usd, UsdGeom, Sdf, Kind +except ImportError: + # Allow to fall back on Multiverse 6.3.0+ pxr usd library + from mvpxr import Usd, UsdGeom, Sdf, Kind + +from avalon import io, api + +log = logging.getLogger(__name__) + + +# The predefined steps order used for bootstrapping USD Shots and Assets. +# These are ordered in order from strongest to weakest opinions, like in USD. +PIPELINE = { + "shot": [ + "usdLighting", + "usdFx", + "usdSimulation", + "usdAnimation", + "usdLayout", + ], + "asset": ["usdShade", "usdModel"], +} + + +def create_asset( + filepath, asset_name, reference_layers, kind=Kind.Tokens.component +): + """ + Creates an asset file that consists of a top level layer and sublayers for + shading and geometry. + + Args: + filepath (str): Filepath where the asset.usd file will be saved. + reference_layers (list): USD Files to reference in the asset. + Note that the bottom layer (first file, like a model) would + be last in the list. The strongest layer will be the first + index. + asset_name (str): The name for the Asset identifier and default prim. + kind (pxr.Kind): A USD Kind for the root asset. + + """ + # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example + + log.info("Creating asset at %s", filepath) + + # Make the layer ascii - good for readability, plus the file is small + root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) + stage = Usd.Stage.Open(root_layer) + + # Define a prim for the asset and make it the default for the stage. + asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim() + stage.SetDefaultPrim(asset_prim) + + # Let viewing applications know how to orient a free camera properly + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + + # Usually we will "loft up" the kind authored into the exported geometry + # layer rather than re-stamping here; we'll leave that for a later + # tutorial, and just be explicit here. + model = Usd.ModelAPI(asset_prim) + if kind: + model.SetKind(kind) + + model.SetAssetName(asset_name) + model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name)) + + # Add references to the asset prim + references = asset_prim.GetReferences() + for reference_filepath in reference_layers: + references.AddReference(reference_filepath) + + stage.GetRootLayer().Save() + + +def create_shot(filepath, layers, create_layers=False): + """Create a shot with separate layers for departments. + + Args: + filepath (str): Filepath where the asset.usd file will be saved. + layers (str): When provided this will be added verbatim in the + subLayerPaths layers. When the provided layer paths do not exist + they are generated using Sdf.Layer.CreateNew + create_layers (bool): Whether to create the stub layers on disk if + they do not exist yet. + + Returns: + str: The saved shot file path + + """ + # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example + + stage = Usd.Stage.CreateNew(filepath) + log.info("Creating shot at %s" % filepath) + + for layer_path in layers: + if create_layers and not os.path.exists(layer_path): + # We use the Sdf API here to quickly create layers. Also, we're + # using it as a way to author the subLayerPaths as there is no + # way to do that directly in the Usd API. + layer_folder = os.path.dirname(layer_path) + if not os.path.exists(layer_folder): + os.makedirs(layer_folder) + + Sdf.Layer.CreateNew(layer_path) + + stage.GetRootLayer().subLayerPaths.append(layer_path) + + # Lets viewing applications know how to orient a free camera properly + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + stage.GetRootLayer().Save() + + return filepath + + +def create_model(filename, asset, variant_subsets): + """Create a USD Model file. + + For each of the variation paths it will payload the path and set its + relevant variation name. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + for subset in variant_subsets: + prefix = "usdModel" + if subset.startswith(prefix): + # Strip off `usdModel_` + variant = subset[len(prefix):] + else: + raise ValueError( + "Model subsets must start " "with usdModel: %s" % subset + ) + + path = get_usd_master_path( + asset=asset_doc, subset=subset, representation="usd" + ) + variants.append((variant, path)) + + stage = _create_variants_file( + filename, + variants=variants, + variantset="model", + variant_prim="/root", + reference_prim="/root/geo", + as_payload=True, + ) + + UsdGeom.SetStageMetersPerUnit(stage, 1) + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + + # modelAPI = Usd.ModelAPI(root_prim) + # modelAPI.SetKind(Kind.Tokens.component) + + # See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details + # for more on assetInfo + # modelAPI.SetAssetName(asset) + # modelAPI.SetAssetIdentifier(asset) + + stage.GetRootLayer().Save() + + +def create_shade(filename, asset, variant_subsets): + """Create a master USD shade file for an asset. + + For each available model variation this should generate a reference + to a `usdShade_{modelVariant}` subset. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + + for subset in variant_subsets: + prefix = "usdModel" + if subset.startswith(prefix): + # Strip off `usdModel_` + variant = subset[len(prefix):] + else: + raise ValueError( + "Model subsets must start " "with usdModel: %s" % subset + ) + + shade_subset = re.sub("^usdModel", "usdShade", subset) + path = get_usd_master_path( + asset=asset_doc, subset=shade_subset, representation="usd" + ) + variants.append((variant, path)) + + stage = _create_variants_file( + filename, variants=variants, variantset="model", variant_prim="/root" + ) + + stage.GetRootLayer().Save() + + +def create_shade_variation(filename, asset, model_variant, shade_variants): + """Create the master Shade file for a specific model variant. + + This should reference all shade variants for the specific model variant. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + for variant in shade_variants: + subset = "usdShade_{model}_{shade}".format( + model=model_variant, shade=variant + ) + path = get_usd_master_path( + asset=asset_doc, subset=subset, representation="usd" + ) + variants.append((variant, path)) + + stage = _create_variants_file( + filename, variants=variants, variantset="shade", variant_prim="/root" + ) + + stage.GetRootLayer().Save() + + +def _create_variants_file( + filename, + variants, + variantset, + default_variant=None, + variant_prim="/root", + reference_prim=None, + set_default_variant=True, + as_payload=False, + skip_variant_on_single_file=True, +): + + root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"}) + stage = Usd.Stage.Open(root_layer) + + root_prim = stage.DefinePrim(variant_prim) + stage.SetDefaultPrim(root_prim) + + def _reference(path): + """Reference/Payload path depending on function arguments""" + + if reference_prim: + prim = stage.DefinePrim(reference_prim) + else: + prim = root_prim + + if as_payload: + # Payload + prim.GetPayloads().AddPayload(Sdf.Payload(path)) + else: + # Reference + prim.GetReferences().AddReference(Sdf.Reference(path)) + + assert variants, "Must have variants, got: %s" % variants + + log.info(filename) + + if skip_variant_on_single_file and len(variants) == 1: + # Reference directly, no variants + variant_path = variants[0][1] + _reference(variant_path) + + log.info("Non-variants..") + log.info("Path: %s" % variant_path) + + else: + # Variants + append = Usd.ListPositionBackOfAppendList + variant_set = root_prim.GetVariantSets().AddVariantSet( + variantset, append + ) + + for variant, variant_path in variants: + + if default_variant is None: + default_variant = variant + + variant_set.AddVariant(variant, append) + variant_set.SetVariantSelection(variant) + with variant_set.GetVariantEditContext(): + _reference(variant_path) + + log.info("Variants..") + log.info("Variant: %s" % variant) + log.info("Path: %s" % variant_path) + + if set_default_variant: + variant_set.SetVariantSelection(default_variant) + + return stage + + +def get_usd_master_path(asset, subset, representation): + """Get the filepath for a .usd file of a subset. + + This will return the path to an unversioned master file generated by + `usd_master_file.py`. + + """ + + project = io.find_one( + {"type": "project"}, projection={"config.template.publish": True} + ) + template = project["config"]["template"]["publish"] + + if isinstance(asset, dict) and "silo" in asset and "name" in asset: + # Allow explicitly passing asset document + asset_doc = asset + else: + asset_doc = io.find_one({"name": asset, "type": "asset"}) + + path = template.format( + **{ + "root": api.registered_root(), + "project": api.Session["AVALON_PROJECT"], + "silo": asset_doc["silo"], + "asset": asset_doc["name"], + "subset": subset, + "representation": representation, + "version": 0, # stub version zero + } + ) + + # Remove the version folder + subset_folder = os.path.dirname(os.path.dirname(path)) + master_folder = os.path.join(subset_folder, "master") + fname = "{0}.{1}".format(subset, representation) + + return os.path.join(master_folder, fname).replace("\\", "/") + + +def parse_avalon_uri(uri): + # URI Pattern: avalon://{asset}/{subset}.{ext} + pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" + if uri.startswith("avalon://"): + match = re.match(pattern, uri) + if match: + return match.groupdict() diff --git a/openpype/modules/README.md b/openpype/modules/README.md index 818375461f..a3733518ac 100644 --- a/openpype/modules/README.md +++ b/openpype/modules/README.md @@ -1,7 +1,19 @@ -# Pype modules -Pype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering. +# OpenPype modules/addons +OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or may contain only special plugins. Addons work the same way currently there is no difference in module and addon. -## Base class `PypeModule` +## Modules concept +- modules and addons are dynamically imported to virtual python module `openpype_modules` from which it is possible to import them no matter where is the modulo located +- modules or addons should never be imported directly even if you know possible full import path + - it is because all of their content must be imported in specific order and should not be imported without defined functions as it may also break few implementation parts + +### TODOs +- add module/addon manifest + - definition of module (not 100% defined content e.g. minimum require OpenPype version etc.) + - defying that folder is content of a module or an addon +- module/addon have it's settings schemas and default values outside OpenPype +- add general setting of paths to modules + +## Base class `OpenPypeModule` - abstract class as base for each module - implementation should be module's api withou GUI parts - may implement `get_global_environments` method which should return dictionary of environments that are globally appliable and value is the same for whole studio if launched at any workstation (except os specific paths) @@ -17,6 +29,16 @@ Pype modules should contain separated logic of specific kind of implementation, - interface is class that has defined abstract methods to implement and may contain preimplemented helper methods - module that inherit from an interface must implement those abstract methods otherwise won't be initialized - it is easy to find which module object inherited from which interfaces withh 100% chance they have implemented required methods +- interfaces can be defined in `interfaces.py` inside module directory + - the file can't use relative imports or import anything from other parts + of module itself at the header of file + - this is one of reasons why modules/addons can't be imported directly without using defined functions in OpenPype modules implementation + +## Base class `OpenPypeInterface` +- has nothing implemented +- has ABCMeta as metaclass +- is defined to be able find out classes which inherit from this base to be + able tell this is an Interface ## Global interfaces - few interfaces are implemented for global usage @@ -70,7 +92,7 @@ Pype modules should contain separated logic of specific kind of implementation, - Clockify has more inharitance it's class definition looks like ``` class ClockifyModule( - PypeModule, # Says it's Pype module so ModulesManager will try to initialize. + OpenPypeModule, # Says it's Pype module so ModulesManager will try to initialize. ITrayModule, # Says has special implementation when used in tray. IPluginPaths, # Says has plugin paths that want to register (paths to clockify actions for launcher). IFtrackEventHandlerPaths, # Says has Ftrack actions/events for user/server. diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 068aeb98af..68b5f6c247 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,86 +1,35 @@ # -*- coding: utf-8 -*- from .base import ( - PypeModule, - ITrayModule, - ITrayAction, - ITrayService, - IPluginPaths, - ILaunchHookPaths, + OpenPypeModule, + OpenPypeAddOn, + OpenPypeInterface, + + load_modules, + ModulesManager, - TrayModulesManager + TrayModulesManager, + + BaseModuleSettingsDef, + ModuleSettingsDef, + JsonFilesSettingsDef, + + get_module_settings_defs ) -from .settings_action import ( - SettingsAction, - ISettingsChangeListener, - LocalSettingsAction -) -from .webserver import ( - WebServerModule, - IWebServerRoutes -) -from .idle_manager import ( - IdleManager, - IIdleManager -) -from .timers_manager import ( - TimersManager, - ITimersManager -) -from .avalon_apps import AvalonModule -from .launcher_action import LauncherAction -from .ftrack import ( - FtrackModule, - IFtrackEventHandlerPaths -) -from .clockify import ClockifyModule -from .log_viewer import LogViewModule -from .muster import MusterModule -from .deadline import DeadlineModule -from .project_manager_action import ProjectManagerAction -from .standalonepublish_action import StandAlonePublishAction -from .python_console_interpreter import PythonInterpreterAction -from .sync_server import SyncServerModule -from .slack import SlackIntegrationModule __all__ = ( - "PypeModule", - "ITrayModule", - "ITrayAction", - "ITrayService", - "IPluginPaths", - "ILaunchHookPaths", + "OpenPypeModule", + "OpenPypeAddOn", + "OpenPypeInterface", + + "load_modules", + "ModulesManager", "TrayModulesManager", - "SettingsAction", - "LocalSettingsAction", + "BaseModuleSettingsDef", + "ModuleSettingsDef", + "JsonFilesSettingsDef", - "WebServerModule", - "IWebServerRoutes", - - "IdleManager", - "IIdleManager", - - "TimersManager", - "ITimersManager", - - "AvalonModule", - "LauncherAction", - - "FtrackModule", - "IFtrackEventHandlerPaths", - - "ClockifyModule", - "IdleManager", - "LogViewModule", - "MusterModule", - "DeadlineModule", - "ProjectManagerAction", - "StandAlonePublishAction", - "PythonInterpreterAction", - - "SyncServerModule", - - "SlackIntegrationModule" + "get_module_settings_defs" ) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c7efbd5ab3..01c3cebe60 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1,21 +1,381 @@ # -*- coding: utf-8 -*- """Base class for Pype Modules.""" +import os +import sys +import json import time import inspect import logging +import platform +import threading import collections from uuid import uuid4 from abc import ABCMeta, abstractmethod import six import openpype -from openpype.settings import get_system_settings +from openpype.settings import ( + get_system_settings, + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS +) + +from openpype.settings.lib import ( + get_studio_system_settings_overrides, + load_json_file +) from openpype.lib import PypeLogger -from openpype import resources + + +# Inherit from `object` for Python 2 hosts +class _ModuleClass(object): + """Fake module class for storing OpenPype modules. + + Object of this class can be stored to `sys.modules` and used for storing + dynamically imported modules. + """ + def __init__(self, name): + # Call setattr on super class + super(_ModuleClass, self).__setattr__("name", name) + + # Where modules and interfaces are stored + super(_ModuleClass, self).__setattr__("__attributes__", dict()) + super(_ModuleClass, self).__setattr__("__defaults__", set()) + + super(_ModuleClass, self).__setattr__("_log", None) + + def __getattr__(self, attr_name): + if attr_name not in self.__attributes__: + if attr_name in ("__path__"): + return None + raise ImportError("No module named {}.{}".format( + self.name, attr_name + )) + return self.__attributes__[attr_name] + + def __iter__(self): + for module in self.values(): + yield module + + def __setattr__(self, attr_name, value): + if attr_name in self.__attributes__: + self.log.warning( + "Duplicated name \"{}\" in {}. Overriding.".format( + self.name, attr_name + ) + ) + self.__attributes__[attr_name] = value + + def __setitem__(self, key, value): + self.__setattr__(key, value) + + def __getitem__(self, key): + return getattr(self, key) + + @property + def log(self): + if self._log is None: + super(_ModuleClass, self).__setattr__( + "_log", PypeLogger.get_logger(self.name) + ) + return self._log + + def get(self, key, default=None): + return self.__attributes__.get(key, default) + + def keys(self): + return self.__attributes__.keys() + + def values(self): + return self.__attributes__.values() + + def items(self): + return self.__attributes__.items() + + +class _InterfacesClass(_ModuleClass): + """Fake module class for storing OpenPype interfaces. + + MissingInterface object is returned if interfaces does not exists. + - this is because interfaces must be available even if are missing + implementation + """ + def __getattr__(self, attr_name): + if attr_name not in self.__attributes__: + # Fake Interface if is not missing + self.__attributes__[attr_name] = type( + attr_name, + (MissingInteface, ), + {} + ) + + return self.__attributes__[attr_name] + + +class _LoadCache: + interfaces_lock = threading.Lock() + modules_lock = threading.Lock() + interfaces_loaded = False + modules_loaded = False + + +def get_default_modules_dir(): + """Path to default OpenPype modules.""" + current_dir = os.path.abspath(os.path.dirname(__file__)) + + return os.path.join(current_dir, "default_modules") + + +def get_dynamic_modules_dirs(): + """Possible paths to OpenPype Addons of Modules. + + Paths are loaded from studio settings under: + `modules -> addon_paths -> {platform name}` + + Path may contain environment variable as a formatting string. + + They are not validated or checked their existence. + + Returns: + list: Paths loaded from studio overrides. + """ + output = [] + value = get_studio_system_settings_overrides() + for key in ("modules", "addon_paths", platform.system().lower()): + if key not in value: + return output + value = value[key] + + for path in value: + if not path: + continue + + try: + path = path.format(**os.environ) + except Exception: + pass + output.append(path) + return output + + +def get_module_dirs(): + """List of paths where OpenPype modules can be found.""" + _dirpaths = [] + _dirpaths.append(get_default_modules_dir()) + _dirpaths.extend(get_dynamic_modules_dirs()) + + dirpaths = [] + for path in _dirpaths: + if not path: + continue + normalized = os.path.normpath(path) + if normalized not in dirpaths: + dirpaths.append(normalized) + return dirpaths + + +def load_interfaces(force=False): + """Load interfaces from modules into `openpype_interfaces`. + + Only classes which inherit from `OpenPypeInterface` are loaded and stored. + + Args: + force(bool): Force to load interfaces even if are already loaded. + This won't update already loaded and used (cached) interfaces. + """ + + if _LoadCache.interfaces_loaded and not force: + return + + if not _LoadCache.interfaces_lock.locked(): + with _LoadCache.interfaces_lock: + _load_interfaces() + _LoadCache.interfaces_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.interfaces_lock.locked(): + time.sleep(0.1) + + +def _load_interfaces(): + # Key under which will be modules imported in `sys.modules` + from openpype.lib import import_filepath + + modules_key = "openpype_interfaces" + + sys.modules[modules_key] = openpype_interfaces = ( + _InterfacesClass(modules_key) + ) + + log = PypeLogger.get_logger("InterfacesLoader") + + dirpaths = get_module_dirs() + + interface_paths = [] + interface_paths.append( + os.path.join(get_default_modules_dir(), "interfaces.py") + ) + for dirpath in dirpaths: + if not os.path.exists(dirpath): + continue + + for filename in os.listdir(dirpath): + if filename in ("__pycache__", ): + continue + + full_path = os.path.join(dirpath, filename) + if not os.path.isdir(full_path): + continue + + interfaces_path = os.path.join(full_path, "interfaces.py") + if os.path.exists(interfaces_path): + interface_paths.append(interfaces_path) + + for full_path in interface_paths: + if not os.path.exists(full_path): + continue + + try: + # Prepare module object where content of file will be parsed + module = import_filepath(full_path) + + except Exception: + log.warning( + "Failed to load path: \"{0}\"".format(full_path), + exc_info=True + ) + continue + + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + not inspect.isclass(attr) + or attr is OpenPypeInterface + or not issubclass(attr, OpenPypeInterface) + ): + continue + setattr(openpype_interfaces, attr_name, attr) + + +def load_modules(force=False): + """Load OpenPype modules as python modules. + + Modules does not load only classes (like in Interfaces) because there must + be ability to use inner code of module and be able to import it from one + defined place. + + With this it is possible to import module's content from predefined module. + + Function makes sure that `load_interfaces` was triggered. Modules import + has specific order which can't be changed. + + Args: + force(bool): Force to load modules even if are already loaded. + This won't update already loaded and used (cached) modules. + """ + + if _LoadCache.modules_loaded and not force: + return + + # First load interfaces + # - modules must not be imported before interfaces + load_interfaces(force) + + if not _LoadCache.modules_lock.locked(): + with _LoadCache.modules_lock: + _load_modules() + _LoadCache.modules_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.modules_lock.locked(): + time.sleep(0.1) + + +def _load_modules(): + # Import helper functions from lib + from openpype.lib import ( + import_filepath, + import_module_from_dirpath + ) + + # Key under which will be modules imported in `sys.modules` + modules_key = "openpype_modules" + + # Change `sys.modules` + sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) + + log = PypeLogger.get_logger("ModulesLoader") + + # Look for OpenPype modules in paths defined with `get_module_dirs` + dirpaths = get_module_dirs() + + for dirpath in dirpaths: + if not os.path.exists(dirpath): + log.warning(( + "Could not find path when loading OpenPype modules \"{}\"" + ).format(dirpath)) + continue + + for filename in os.listdir(dirpath): + # Ignore filenames + if filename in ("__pycache__", ): + continue + + fullpath = os.path.join(dirpath, filename) + basename, ext = os.path.splitext(filename) + + # TODO add more logic how to define if folder is module or not + # - check manifest and content of manifest + try: + if os.path.isdir(fullpath): + import_module_from_dirpath(dirpath, filename, modules_key) + + elif ext in (".py", ): + module = import_filepath(fullpath) + setattr(openpype_modules, basename, module) + + except Exception: + log.error( + "Failed to import '{}'.".format(fullpath), + exc_info=True + ) + + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + def __repr__(self): + return str(self) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell that has implementation + for specific part or for other module/addon. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. + """ + pass + + +class MissingInteface(OpenPypeInterface): + """Class representing missing interface class. + + Used when interface is not available from currently registered paths. + """ + pass @six.add_metaclass(ABCMeta) -class PypeModule: +class OpenPypeModule: """Base class of pype module. Attributes: @@ -38,7 +398,7 @@ class PypeModule: def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger().get_logger(self.name) + self.log = PypeLogger.get_logger(self.name) self.initialize(settings) @@ -70,267 +430,19 @@ class PypeModule: return {} -@six.add_metaclass(ABCMeta) -class IPluginPaths: - """Module has plugin paths to return. +class OpenPypeAddOn(OpenPypeModule): + # Enable Addon by default + enabled = True - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - # TODO validation of an output - @abstractmethod - def get_plugin_paths(self): + def initialize(self, module_settings): + """Initialization is not be required for most of addons.""" pass - -@six.add_metaclass(ABCMeta) -class ILaunchHookPaths: - """Module has launch hook paths to return. - - Expected result is list of paths. - ["path/to/launch_hooks_dir"] - """ - - @abstractmethod - def get_launch_hook_paths(self): + def connect_with_modules(self, enabled_modules): + """Do not require to implement connection with modules for addon.""" pass -@six.add_metaclass(ABCMeta) -class ITrayModule: - """Module has special procedures when used in Pype Tray. - - IMPORTANT: - The module still must be usable if is not used in tray even if - would do nothing. - """ - tray_initialized = False - _tray_manager = None - - @abstractmethod - def tray_init(self): - """Initialization part of tray implementation. - - Triggered between `initialization` and `connect_with_modules`. - - This is where GUIs should be loaded or tray specific parts should be - prepared. - """ - pass - - @abstractmethod - def tray_menu(self, tray_menu): - """Add module's action to tray menu.""" - pass - - @abstractmethod - def tray_start(self): - """Start procedure in Pype tray.""" - pass - - @abstractmethod - def tray_exit(self): - """Cleanup method which is executed on tray shutdown. - - This is place where all threads should be shut. - """ - pass - - def execute_in_main_thread(self, callback): - """ Pushes callback to the queue or process 'callback' on a main thread - - Some callbacks need to be processed on main thread (menu actions - must be added on main thread or they won't get triggered etc.) - """ - # called without initialized tray, still main thread needed - if not self.tray_initialized: - try: - callback = self._main_thread_callbacks.popleft() - callback() - except: - self.log.warning( - "Failed to execute {} in main thread".format(callback), - exc_info=True) - - return - self.manager.tray_manager.execute_in_main_thread(callback) - - def show_tray_message(self, title, message, icon=None, msecs=None): - """Show tray message. - - Args: - title (str): Title of message. - message (str): Content of message. - icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is - Information icon, may differ by Qt version. - msecs (int): Duration of message visibility in miliseconds. - Default is 10000 msecs, may differ by Qt version. - """ - if self._tray_manager: - self._tray_manager.show_tray_message(title, message, icon, msecs) - - def add_doubleclick_callback(self, callback): - if hasattr(self.manager, "add_doubleclick_callback"): - self.manager.add_doubleclick_callback(self, callback) - - -class ITrayAction(ITrayModule): - """Implementation of Tray action. - - Add action to tray menu which will trigger `on_action_trigger`. - It is expected to be used for showing tools. - - Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden - as it's not expected that action will use them. But it is possible if - necessary. - """ - - admin_action = False - _admin_submenu = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - @abstractmethod - def on_action_trigger(self): - """What happens on actions click.""" - pass - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - - if self.admin_action: - menu = self.admin_submenu(tray_menu) - action = QtWidgets.QAction(self.label, menu) - menu.addAction(action) - if not menu.menuAction().isVisible(): - menu.menuAction().setVisible(True) - - else: - action = QtWidgets.QAction(self.label, tray_menu) - tray_menu.addAction(action) - - action.triggered.connect(self.on_action_trigger) - - def tray_start(self): - return - - def tray_exit(self): - return - - @staticmethod - def admin_submenu(tray_menu): - if ITrayAction._admin_submenu is None: - from Qt import QtWidgets - - admin_submenu = QtWidgets.QMenu("Admin", tray_menu) - admin_submenu.menuAction().setVisible(False) - ITrayAction._admin_submenu = admin_submenu - return ITrayAction._admin_submenu - - -class ITrayService(ITrayModule): - # Module's property - menu_action = None - - # Class properties - _services_submenu = None - _icon_failed = None - _icon_running = None - _icon_idle = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - # TODO be able to get any sort of information to show/print - # @abstractmethod - # def get_service_info(self): - # pass - - @staticmethod - def services_submenu(tray_menu): - if ITrayService._services_submenu is None: - from Qt import QtWidgets - - services_submenu = QtWidgets.QMenu("Services", tray_menu) - services_submenu.menuAction().setVisible(False) - ITrayService._services_submenu = services_submenu - return ITrayService._services_submenu - - @staticmethod - def add_service_action(action): - ITrayService._services_submenu.addAction(action) - if not ITrayService._services_submenu.menuAction().isVisible(): - ITrayService._services_submenu.menuAction().setVisible(True) - - @staticmethod - def _load_service_icons(): - from Qt import QtGui - ITrayService._failed_icon = QtGui.QIcon( - resources.get_resource("icons", "circle_red.png") - ) - ITrayService._icon_running = QtGui.QIcon( - resources.get_resource("icons", "circle_green.png") - ) - ITrayService._icon_idle = QtGui.QIcon( - resources.get_resource("icons", "circle_orange.png") - ) - - @staticmethod - def get_icon_running(): - if ITrayService._icon_running is None: - ITrayService._load_service_icons() - return ITrayService._icon_running - - @staticmethod - def get_icon_idle(): - if ITrayService._icon_idle is None: - ITrayService._load_service_icons() - return ITrayService._icon_idle - - @staticmethod - def get_icon_failed(): - if ITrayService._failed_icon is None: - ITrayService._load_service_icons() - return ITrayService._failed_icon - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - action = QtWidgets.QAction( - self.label, - self.services_submenu(tray_menu) - ) - self.menu_action = action - - self.add_service_action(action) - - self.set_service_running_icon() - - def set_service_running_icon(self): - """Change icon of an QAction to green circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_running()) - - def set_service_failed_icon(self): - """Change icon of an QAction to red circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_failed()) - - def set_service_idle_icon(self): - """Change icon of an QAction to orange circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_idle()) - - class ModulesManager: """Manager of Pype modules helps to load and prepare them to work. @@ -357,6 +469,11 @@ class ModulesManager: def initialize_modules(self): """Import and initialize modules.""" + # Make sure modules are loaded + load_modules() + + import openpype_modules + self.log.debug("*** Pype modules initialization.") # Prepare settings for modules system_settings = getattr(self, "_system_settings", None) @@ -368,33 +485,43 @@ class ModulesManager: time_start = time.time() prev_start_time = time_start - # Go through globals in `pype.modules` - for name in dir(openpype.modules): - modules_item = getattr(openpype.modules, name, None) - # Filter globals that are not classes which inherit from PypeModule - if ( - not inspect.isclass(modules_item) - or modules_item is openpype.modules.PypeModule - or not issubclass(modules_item, openpype.modules.PypeModule) - ): - continue + module_classes = [] + for module in openpype_modules: + # Go through globals in `pype.modules` + for name in dir(module): + modules_item = getattr(module, name, None) + # Filter globals that are not classes which inherit from + # OpenPypeModule + if ( + not inspect.isclass(modules_item) + or modules_item is OpenPypeModule + or not issubclass(modules_item, OpenPypeModule) + ): + continue - # Check if class is abstract (Developing purpose) - if inspect.isabstract(modules_item): - # Find missing implementations by convetion on `abc` module - not_implemented = [] - for attr_name in dir(modules_item): - attr = getattr(modules_item, attr_name, None) - if attr and getattr(attr, "__isabstractmethod__", None): - not_implemented.append(attr_name) + # Check if class is abstract (Developing purpose) + if inspect.isabstract(modules_item): + # Find missing implementations by convetion on `abc` module + not_implemented = [] + for attr_name in dir(modules_item): + attr = getattr(modules_item, attr_name, None) + abs_method = getattr( + attr, "__isabstractmethod__", None + ) + if attr and abs_method: + not_implemented.append(attr_name) - # Log missing implementations - self.log.warning(( - "Skipping abstract Class: {}. Missing implementations: {}" - ).format(name, ", ".join(not_implemented))) - continue + # Log missing implementations + self.log.warning(( + "Skipping abstract Class: {}." + " Missing implementations: {}" + ).format(name, ", ".join(not_implemented))) + continue + module_classes.append(modules_item) + for modules_item in module_classes: try: + name = modules_item.__name__ # Try initialize module module = modules_item(self, modules_settings) # Store initialized object @@ -492,6 +619,8 @@ class ModulesManager: and "actions" each containing list of paths. """ # Output structure + from openpype_interfaces import IPluginPaths + output = { "publish": [], "create": [], @@ -544,6 +673,8 @@ class ModulesManager: Returns: list: Paths to launch hook directories. """ + from openpype_interfaces import ILaunchHookPaths + str_type = type("") expected_types = (list, tuple, set) @@ -711,6 +842,7 @@ class TrayModulesManager(ModulesManager): self.modules_by_id = {} self.modules_by_name = {} self._report = {} + self.tray_manager = None self.doubleclick_callbacks = {} @@ -743,6 +875,8 @@ class TrayModulesManager(ModulesManager): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): + from openpype_interfaces import ITrayModule + output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -818,6 +952,8 @@ class TrayModulesManager(ModulesManager): self._report["Tray menu"] = report def start_modules(self): + from openpype_interfaces import ITrayService + report = {} time_start = time.time() prev_start_time = time_start @@ -856,3 +992,424 @@ class TrayModulesManager(ModulesManager): ), exc_info=True ) + + +def get_module_settings_defs(): + """Check loaded addons/modules for existence of thei settings definition. + + Check if OpenPype addon/module as python module has class that inherit + from `ModuleSettingsDef` in python module variables (imported + in `__init__py`). + + Returns: + list: All valid and not abstract settings definitions from imported + openpype addons and modules. + """ + # Make sure modules are loaded + load_modules() + + import openpype_modules + + settings_defs = [] + + log = PypeLogger.get_logger("ModuleSettingsLoad") + + for raw_module in openpype_modules: + for attr_name in dir(raw_module): + attr = getattr(raw_module, attr_name) + if ( + not inspect.isclass(attr) + or attr is ModuleSettingsDef + or not issubclass(attr, ModuleSettingsDef) + ): + continue + + if inspect.isabstract(attr): + # Find missing implementations by convetion on `abc` module + not_implemented = [] + for attr_name in dir(attr): + attr = getattr(attr, attr_name, None) + abs_method = getattr( + attr, "__isabstractmethod__", None + ) + if attr and abs_method: + not_implemented.append(attr_name) + + # Log missing implementations + log.warning(( + "Skipping abstract Class: {} in module {}." + " Missing implementations: {}" + ).format( + attr_name, raw_module.__name__, ", ".join(not_implemented) + )) + continue + + settings_defs.append(attr) + + return settings_defs + + +@six.add_metaclass(ABCMeta) +class BaseModuleSettingsDef: + """Definition of settings for OpenPype module or AddOn.""" + _id = None + + @property + def id(self): + """ID created on initialization. + + ID should be per created object. Helps to store objects. + """ + if self._id is None: + self._id = uuid4() + return self._id + + @abstractmethod + def get_settings_schemas(self, schema_type): + """Setting schemas for passed schema type. + + These are main schemas by dynamic schema keys. If they're using + sub schemas or templates they should be loaded with + `get_dynamic_schemas`. + + Returns: + dict: Schema by `dynamic_schema` keys. + """ + pass + + @abstractmethod + def get_dynamic_schemas(self, schema_type): + """Settings schemas and templates that can be used anywhere. + + It is recommended to add prefix specific for addon/module to keys + (e.g. "my_addon/real_schema_name"). + + Returns: + dict: Schemas and templates by their keys. + """ + pass + + @abstractmethod + def get_defaults(self, top_key): + """Default values for passed top key. + + Top keys are (currently) "system_settings" or "project_settings". + + Should return exactly what was passed with `save_defaults`. + + Returns: + dict: Default values by path to first key in OpenPype defaults. + """ + pass + + @abstractmethod + def save_defaults(self, top_key, data): + """Save default values for passed top key. + + Top keys are (currently) "system_settings" or "project_settings". + + Passed data are by path to first key defined in main schemas. + """ + pass + + +class ModuleSettingsDef(BaseModuleSettingsDef): + """Settings definiton with separated system and procect settings parts. + + Reduce conditions that must be checked and adds predefined methods for + each case. + """ + def get_defaults(self, top_key): + """Split method into 2 methods by top key.""" + if top_key == SYSTEM_SETTINGS_KEY: + return self.get_default_system_settings() or {} + elif top_key == PROJECT_SETTINGS_KEY: + return self.get_default_project_settings() or {} + return {} + + def save_defaults(self, top_key, data): + """Split method into 2 methods by top key.""" + if top_key == SYSTEM_SETTINGS_KEY: + self.save_system_defaults(data) + elif top_key == PROJECT_SETTINGS_KEY: + self.save_project_defaults(data) + + def get_settings_schemas(self, schema_type): + """Split method into 2 methods by schema type.""" + if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: + return self.get_system_settings_schemas() or {} + elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS: + return self.get_project_settings_schemas() or {} + return {} + + def get_dynamic_schemas(self, schema_type): + """Split method into 2 methods by schema type.""" + if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: + return self.get_system_dynamic_schemas() or {} + elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS: + return self.get_project_dynamic_schemas() or {} + return {} + + @abstractmethod + def get_system_settings_schemas(self): + """Schemas and templates usable in system settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + pass + + @abstractmethod + def get_project_settings_schemas(self): + """Schemas and templates usable in project settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + pass + + @abstractmethod + def get_system_dynamic_schemas(self): + """System schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + pass + + @abstractmethod + def get_project_dynamic_schemas(self): + """Project schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + pass + + @abstractmethod + def get_default_system_settings(self): + """Default system settings values. + + Returns: + dict: Default values by path to first key. + """ + pass + + @abstractmethod + def get_default_project_settings(self): + """Default project settings values. + + Returns: + dict: Default values by path to first key. + """ + pass + + @abstractmethod + def save_system_defaults(self, data): + """Save default system settings values. + + Passed data are by path to first key defined in main schemas. + """ + pass + + @abstractmethod + def save_project_defaults(self, data): + """Save default project settings values. + + Passed data are by path to first key defined in main schemas. + """ + pass + + +class JsonFilesSettingsDef(ModuleSettingsDef): + """Preimplemented settings definition using json files and file structure. + + Expected file structure: + β”• root + β”‚ + β”‚ # Default values + ┝ defaults + β”‚ ┝ system_settings.json + β”‚ β”• project_settings.json + β”‚ + β”‚ # Schemas for `dynamic_template` type + ┝ dynamic_schemas + β”‚ ┝ system_dynamic_schemas.json + β”‚ β”• project_dynamic_schemas.json + β”‚ + β”‚ # Schemas that can be used anywhere (enhancement for `dynamic_schemas`) + β”• schemas + ┝ system_schemas + β”‚ ┝ # Any schema or template files + β”‚ β”• ... + β”• project_schemas + ┝ # Any schema or template files + β”• ... + + Schemas can be loaded with prefix to avoid duplicated schema/template names + across all OpenPype addons/modules. Prefix can be defined with class + attribute `schema_prefix`. + + Only think which must be implemented in `get_settings_root_path` which + should return directory path to `root` (in structure graph above). + """ + # Possible way how to define `schemas` prefix + schema_prefix = "" + + @abstractmethod + def get_settings_root_path(self): + """Directory path where settings and it's schemas are located.""" + pass + + def __init__(self): + settings_root_dir = self.get_settings_root_path() + defaults_dir = os.path.join( + settings_root_dir, "defaults" + ) + dynamic_schemas_dir = os.path.join( + settings_root_dir, "dynamic_schemas" + ) + schemas_dir = os.path.join( + settings_root_dir, "schemas" + ) + + self.system_defaults_filepath = os.path.join( + defaults_dir, "system_settings.json" + ) + self.project_defaults_filepath = os.path.join( + defaults_dir, "project_settings.json" + ) + + self.system_dynamic_schemas_filepath = os.path.join( + dynamic_schemas_dir, "system_dynamic_schemas.json" + ) + self.project_dynamic_schemas_filepath = os.path.join( + dynamic_schemas_dir, "project_dynamic_schemas.json" + ) + + self.system_schemas_dir = os.path.join( + schemas_dir, "system_schemas" + ) + self.project_schemas_dir = os.path.join( + schemas_dir, "project_schemas" + ) + + def _load_json_file_data(self, path): + if os.path.exists(path): + return load_json_file(path) + return {} + + def get_default_system_settings(self): + """Default system settings values. + + Returns: + dict: Default values by path to first key. + """ + return self._load_json_file_data(self.system_defaults_filepath) + + def get_default_project_settings(self): + """Default project settings values. + + Returns: + dict: Default values by path to first key. + """ + return self._load_json_file_data(self.project_defaults_filepath) + + def _save_data_to_filepath(self, path, data): + dirpath = os.path.dirname(path) + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + with open(path, "w") as file_stream: + json.dump(data, file_stream, indent=4) + + def save_system_defaults(self, data): + """Save default system settings values. + + Passed data are by path to first key defined in main schemas. + """ + self._save_data_to_filepath(self.system_defaults_filepath, data) + + def save_project_defaults(self, data): + """Save default project settings values. + + Passed data are by path to first key defined in main schemas. + """ + self._save_data_to_filepath(self.project_defaults_filepath, data) + + def get_system_dynamic_schemas(self): + """System schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + return self._load_json_file_data(self.system_dynamic_schemas_filepath) + + def get_project_dynamic_schemas(self): + """Project schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + return self._load_json_file_data(self.project_dynamic_schemas_filepath) + + def _load_files_from_path(self, path): + output = {} + if not path or not os.path.exists(path): + return output + + if os.path.isfile(path): + filename = os.path.basename(path) + basename, ext = os.path.splitext(filename) + if ext == ".json": + if self.schema_prefix: + key = "{}/{}".format(self.schema_prefix, basename) + else: + key = basename + output[key] = self._load_json_file_data(path) + return output + + path = os.path.normpath(path) + for root, _, files in os.walk(path, topdown=False): + for filename in files: + basename, ext = os.path.splitext(filename) + if ext != ".json": + continue + + json_path = os.path.join(root, filename) + store_key = os.path.join( + root.replace(path, ""), basename + ).replace("\\", "/") + if self.schema_prefix: + store_key = "{}/{}".format(self.schema_prefix, store_key) + output[store_key] = self._load_json_file_data(json_path) + + return output + + def get_system_settings_schemas(self): + """Schemas and templates usable in system settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + return self._load_files_from_path(self.system_schemas_dir) + + def get_project_settings_schemas(self): + """Schemas and templates usable in project settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + return self._load_files_from_path(self.project_schemas_dir) diff --git a/openpype/modules/avalon_apps/__init__.py b/openpype/modules/default_modules/avalon_apps/__init__.py similarity index 100% rename from openpype/modules/avalon_apps/__init__.py rename to openpype/modules/default_modules/avalon_apps/__init__.py diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/default_modules/avalon_apps/avalon_app.py similarity index 95% rename from openpype/modules/avalon_apps/avalon_app.py rename to openpype/modules/default_modules/avalon_apps/avalon_app.py index 4e95f6e72b..53e06ec90a 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/default_modules/avalon_apps/avalon_app.py @@ -1,14 +1,14 @@ import os import openpype from openpype import resources -from .. import ( - PypeModule, +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class AvalonModule(PypeModule, ITrayModule, IWebServerRoutes): +class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes): name = "avalon" def initialize(self, modules_settings): diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/default_modules/avalon_apps/rest_api.py similarity index 97% rename from openpype/modules/avalon_apps/rest_api.py rename to openpype/modules/default_modules/avalon_apps/rest_api.py index b77c256398..533050fc0c 100644 --- a/openpype/modules/avalon_apps/rest_api.py +++ b/openpype/modules/default_modules/avalon_apps/rest_api.py @@ -1,16 +1,13 @@ import os -import re import json import datetime -import bson from bson.objectid import ObjectId -import bson.json_util from aiohttp.web_response import Response from avalon.api import AvalonMongoDB -from openpype.modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webserver.base_routes import RestApiEndpoint class _RestApiEndpoint(RestApiEndpoint): diff --git a/openpype/modules/clockify/__init__.py b/openpype/modules/default_modules/clockify/__init__.py similarity index 100% rename from openpype/modules/clockify/__init__.py rename to openpype/modules/default_modules/clockify/__init__.py diff --git a/openpype/modules/clockify/clockify_api.py b/openpype/modules/default_modules/clockify/clockify_api.py similarity index 100% rename from openpype/modules/clockify/clockify_api.py rename to openpype/modules/default_modules/clockify/clockify_api.py diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/default_modules/clockify/clockify_module.py similarity index 98% rename from openpype/modules/clockify/clockify_module.py rename to openpype/modules/default_modules/clockify/clockify_module.py index e3751c46b8..a9e989f4ec 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/default_modules/clockify/clockify_module.py @@ -7,8 +7,8 @@ from .constants import ( CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import ( - PypeModule, +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, @@ -17,7 +17,7 @@ from openpype.modules import ( class ClockifyModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, diff --git a/openpype/modules/clockify/constants.py b/openpype/modules/default_modules/clockify/constants.py similarity index 100% rename from openpype/modules/clockify/constants.py rename to openpype/modules/default_modules/clockify/constants.py diff --git a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py similarity index 97% rename from openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py rename to openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py index 495f87dc7e..c6b55947da 100644 --- a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -1,7 +1,7 @@ import os import json -from openpype.modules.ftrack.lib import ServerAction -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.ftrack.lib import ServerAction +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyServer(ServerAction): diff --git a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py similarity index 96% rename from openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py rename to openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py index 4f4579a8bf..a430791906 100644 --- a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -1,6 +1,6 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyLocal(BaseAction): diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py similarity index 95% rename from openpype/modules/clockify/launcher_actions/ClockifyStart.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py index c431ea240d..db51964eb7 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifyStart.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py @@ -1,6 +1,6 @@ from avalon import api, io from openpype.api import Logger -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI log = Logger().get_logger(__name__) diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py similarity index 97% rename from openpype/modules/clockify/launcher_actions/ClockifySync.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py index 1bb168a80b..02982d373a 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifySync.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py @@ -1,5 +1,5 @@ from avalon import api, io -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI from openpype.api import Logger log = Logger().get_logger(__name__) diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/default_modules/clockify/widgets.py similarity index 100% rename from openpype/modules/clockify/widgets.py rename to openpype/modules/default_modules/clockify/widgets.py diff --git a/openpype/modules/deadline/__init__.py b/openpype/modules/default_modules/deadline/__init__.py similarity index 100% rename from openpype/modules/deadline/__init__.py rename to openpype/modules/default_modules/deadline/__init__.py diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/default_modules/deadline/deadline_module.py similarity index 88% rename from openpype/modules/deadline/deadline_module.py rename to openpype/modules/default_modules/deadline/deadline_module.py index a07cb1a660..ada5e8225a 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/default_modules/deadline/deadline_module.py @@ -1,9 +1,9 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths) +from openpype.modules import OpenPypeModule +from openpype_interfaces import IPluginPaths -class DeadlineModule(PypeModule, IPluginPaths): +class DeadlineModule(OpenPypeModule, IPluginPaths): name = "deadline" def __init__(self, manager, settings): diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py diff --git a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py diff --git a/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_remote_publish.py new file mode 100644 index 0000000000..9ada437716 --- /dev/null +++ b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -0,0 +1,153 @@ +import os +import json + +import hou + +from avalon import api, io +from avalon.vendor import requests + +import pyblish.api + + +class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): + """Submit Houdini scene to perform a local publish in Deadline. + + Publishing in Deadline can be helpful for scenes that publish very slow. + This way it can process in the background on another machine without the + Artist having to wait for the publish to finish on their local machine. + + Submission is done through the Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE. + + """ + + label = "Submit Scene to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["houdini"] + families = ["*"] + targets = ["deadline"] + + def process(self, context): + + # Ensure no errors so far + assert all( + result["success"] for result in context.data["results"] + ), "Errors found, aborting integration.." + + # Deadline connection + AVALON_DEADLINE = api.Session.get( + "AVALON_DEADLINE", "http://localhost:8082" + ) + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + # Note that `publish` data member might change in the future. + # See: https://github.com/pyblish/pyblish-base/issues/307 + actives = [i for i in context if i.data["publish"]] + instance_names = sorted(instance.name for instance in actives) + + if not instance_names: + self.log.warning( + "No active instances found. " "Skipping submission.." + ) + return + + scene = context.data["currentFile"] + scenename = os.path.basename(scene) + + # Get project code + project = io.find_one({"type": "project"}) + code = project["data"].get("code", project["name"]) + + job_name = "{scene} [PUBLISH]".format(scene=scenename) + batch_name = "{code} - {scene}".format(code=code, scene=scenename) + deadline_user = "roy" # todo: get deadline user dynamically + + # Get only major.minor version of Houdini, ignore patch version + version = hou.applicationVersionString() + version = ".".join(version.split(".")[:2]) + + # Generate the payload for Deadline submission + payload = { + "JobInfo": { + "Plugin": "Houdini", + "Pool": "houdini", # todo: remove hardcoded pool + "BatchName": batch_name, + "Comment": context.data.get("comment", ""), + "Priority": 50, + "Frames": "1-1", # Always trigger a single frame + "IsFrameDependent": False, + "Name": job_name, + "UserName": deadline_user, + # "Comment": instance.context.data.get("comment", ""), + # "InitialStatus": state + }, + "PluginInfo": { + "Build": None, # Don't force build + "IgnoreInputs": True, + # Inputs + "SceneFile": scene, + "OutputDriver": "/out/REMOTE_PUBLISH", + # Mandatory for Deadline + "Version": version, + }, + # Mandatory for Deadline, may be empty + "AuxFiles": [], + } + + # Process submission per individual instance if the submission + # is set to publish each instance as a separate job. Else submit + # a single job to process all instances. + per_instance = context.data.get("separateJobPerInstance", False) + if per_instance: + # Submit a job per instance + job_name = payload["JobInfo"]["Name"] + for instance in instance_names: + # Clarify job name per submission (include instance name) + payload["JobInfo"]["Name"] = job_name + " - %s" % instance + self.submit_job( + payload, instances=[instance], deadline=AVALON_DEADLINE + ) + else: + # Submit a single job + self.submit_job( + payload, instances=instance_names, deadline=AVALON_DEADLINE + ) + + def submit_job(self, payload, instances, deadline): + + # Ensure we operate on a copy, a shallow copy is fine. + payload = payload.copy() + + # Include critical environment variables with submission + api.Session + keys = [ + # Submit along the current Avalon tool setup that we launched + # this application with so the Render Slave can build its own + # similar environment using it, e.g. "houdini17.5;pluginx2.3" + "AVALON_TOOLS", + ] + + environment = dict( + {key: os.environ[key] for key in keys if key in os.environ}, + **api.Session + ) + environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances) + + payload["JobInfo"].update( + { + "EnvironmentKeyValue%d" + % index: "{key}={value}".format( + key=key, value=environment[key] + ) + for index, key in enumerate(environment) + } + ) + + # Submit + self.log.info("Submitting..") + self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(deadline) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) diff --git a/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_render_deadline.py new file mode 100644 index 0000000000..f471d788b6 --- /dev/null +++ b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -0,0 +1,158 @@ +import os +import json +import getpass + +from avalon import api +from avalon.vendor import requests + +import pyblish.api + +import hou + + +class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): + """Submit Solaris USD Render ROPs to Deadline. + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE. + + Target "local": + Even though this does *not* render locally this is seen as + a 'local' submission as it is the regular way of submitting + a Houdini render locally. + + """ + + label = "Submit Render to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["houdini"] + families = ["usdrender", + "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + context = instance.context + code = context.data["code"] + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) + comment = context.data.get("comment", "") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) + jobname = "%s - %s" % (filename, instance.name) + + # Support code prefix label for batch name + batch_name = filename + if code: + batch_name = "{0} - {1}".format(code, batch_name) + + # Output driver to render + driver = instance[0] + + # StartFrame to EndFrame by byFrameStep + frames = "{start}-{end}x{step}".format( + start=int(instance.data["startFrame"]), + end=int(instance.data["endFrame"]), + step=int(instance.data["byFrameStep"]), + ) + + # Documentation for keys available at: + # https://docs.thinkboxsoftware.com + # /products/deadline/8.0/1_User%20Manual/manual + # /manual-submission.html#job-info-file-options + payload = { + "JobInfo": { + # Top-level group name + "BatchName": batch_name, + + # Job name, as seen in Monitor + "Name": jobname, + + # Arbitrary username, for visualisation in Monitor + "UserName": deadline_user, + + "Plugin": "Houdini", + "Pool": "houdini_redshift", # todo: remove hardcoded pool + "Frames": frames, + + "ChunkSize": instance.data.get("chunkSize", 10), + + "Comment": comment + }, + "PluginInfo": { + # Input + "SceneFile": filepath, + "OutputDriver": driver.path(), + + # Mandatory for Deadline + # Houdini version without patch number + "Version": hou.applicationVersionString().rsplit(".", 1)[0], + + "IgnoreInputs": True + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Include critical environment variables with submission + api.Session + keys = [ + # Submit along the current Avalon tool setup that we launched + # this application with so the Render Slave can build its own + # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" + "AVALON_TOOLS", + ] + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **api.Session) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Include OutputFilename entries + # The first entry also enables double-click to preview rendered + # frames from Deadline Monitor + output_data = {} + for i, filepath in enumerate(instance.data["files"]): + dirname = os.path.dirname(filepath) + fname = os.path.basename(filepath) + output_data["OutputDirectory%d" % i] = dirname.replace("\\", "/") + output_data["OutputFilename%d" % i] = fname + + # For now ensure destination folder exists otherwise HUSK + # will fail to render the output image. This is supposedly fixed + # in new production builds of Houdini + # TODO Remove this workaround with Houdini 18.0.391+ + if not os.path.exists(dirname): + self.log.info("Ensuring output directory exists: %s" % + dirname) + os.makedirs(dirname) + + payload["JobInfo"].update(output_data) + + self.submit(instance, payload) + + def submit(self, instance, payload): + + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + plugin = payload["JobInfo"]["Plugin"] + self.log.info("Using Render Plugin : {}".format(plugin)) + + self.log.info("Submitting..") + self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(AVALON_DEADLINE) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Store output dir for unified publisher (filesequence) + output_dir = os.path.dirname(instance.data["files"][0]) + instance.data["outputDir"] = output_dir + instance.data["deadlineSubmissionJob"] = response.json() diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_maya_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_publish_job.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_deadline_connection.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/default_modules/ftrack/__init__.py similarity index 67% rename from openpype/modules/ftrack/__init__.py rename to openpype/modules/default_modules/ftrack/__init__.py index c1a557812c..7261254c6f 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/default_modules/ftrack/__init__.py @@ -1,11 +1,9 @@ from .ftrack_module import ( FtrackModule, - IFtrackEventHandlerPaths, FTRACK_MODULE_DIR ) __all__ = ( "FtrackModule", - "IFtrackEventHandlerPaths", "FTRACK_MODULE_DIR" ) diff --git a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py index 59c8bffb75..1ad7a17785 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py @@ -1,6 +1,6 @@ import json -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction def clone_review_session(session, entity): diff --git a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py index 9ad7b1a969..f9aac2c80a 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class MultipleNotesServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py index 3a96ae3311..85317031b2 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py @@ -4,7 +4,7 @@ from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( ServerAction, get_openpype_attr, CUST_ATTR_AUTO_SYNC diff --git a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py index 5213e10ba3..62772740cd 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PrivateProjectDetectionAction(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py index b38e18d089..3f63ce6fac 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import sys import json import collections import ftrack_api -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PushHierValuesToNonHier(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py index 8f78f998ac..d449c4b7df 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import ServerAction -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py similarity index 90% rename from openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py index 078596cc2e..35b5d809fd 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py @@ -1,6 +1,6 @@ -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY -from openpype.modules.ftrack.event_handlers_server.event_sync_to_avalon import ( +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.event_handlers_server.event_sync_to_avalon import ( SyncToAvalonEvent ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_first_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py index 511f62a207..ecc6c95d90 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class FirstVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_next_task_update.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py index ad62beb296..a65ae46545 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class NextTaskUpdate(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 81719258e1..10b165e7f6 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import collections import datetime import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseEvent, query_custom_attributes ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py index 1ebd7b68d2..99ad3aec37 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py @@ -1,5 +1,5 @@ import ftrack_api -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class RadioButtons(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 1dd056adee..93a0404c0b 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -17,7 +17,7 @@ import ftrack_api from avalon import schema from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( get_openpype_attr, CUST_ATTR_ID_KEY, CUST_ATTR_AUTO_SYNC, diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py index 4192a4bed0..a0e039926e 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskStatusToParent(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py index f2d3723021..b77849c678 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskToVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py index cbeeeee5c5..64673f792c 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class ThumbnailEvents(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/event_user_assigment.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py index a0734e14a1..efc1e76775 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py @@ -2,8 +2,8 @@ import os import re import subprocess -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from avalon.api import AvalonMongoDB from bson.objectid import ObjectId diff --git a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py index f215bedcc2..e36c3eecd9 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class VersionToTaskStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_applications.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py index 74d14c2fc4..6d45d43958 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_applications.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py @@ -1,7 +1,7 @@ import os from uuid import uuid4 -from openpype.modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.lib import BaseAction from openpype.lib import ( ApplicationManager, ApplicationLaunchFailed, diff --git a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py index b9f0e7c5d3..c7fb1af98b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py @@ -2,7 +2,7 @@ Taken from https://github.com/tokejepsen/ftrack-hooks/tree/master/batch_tasks """ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class BatchTasksAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py index 45cc9adf55..dc97ed972d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py @@ -1,6 +1,6 @@ import collections import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr diff --git a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py index 7c9a2881d6..5ad5f10e8e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon try: from functools import cmp_to_key except Exception: diff --git a/openpype/modules/ftrack/event_handlers_user/action_component_open.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_component_open.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py index b3cdac0722..c731713c10 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_component_open.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py @@ -1,7 +1,7 @@ import os import sys import subprocess -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ComponentOpen(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 63605eda5e..3869d8ad08 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -2,7 +2,7 @@ import collections import json import arrow import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, @@ -43,7 +43,7 @@ dictionary level, task's attributes are nested more. group (string) - name of group - - based on attribute `openpype.modules.ftrack.lib.CUST_ATTR_GROUP` + - based on attribute `openpype_modules.ftrack.lib.CUST_ATTR_GROUP` - "pype" by default *** Required *************************************************************** diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_folders.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py index 075b8d3d25..994dbd90e4 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py @@ -1,5 +1,5 @@ import os -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon import lib as avalonlib from openpype.api import ( Anatomy, diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py index 035a1c60de..121c9f652b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -2,7 +2,7 @@ import os import re import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy, get_project_settings diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_asset.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index c20491349f..f860065b26 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -4,7 +4,7 @@ from datetime import datetime from queue import Queue from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py index dbddc7a95e..063f086e9c 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,7 +5,7 @@ import uuid import clique from pymongo import UpdateOne -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB from openpype.api import Anatomy diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delivery.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py index 2e7599647a..1f28b18900 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py @@ -6,8 +6,8 @@ import collections from bson.objectid import ObjectId from openpype.api import Anatomy, config -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype.lib.delivery import ( path_from_representation, get_format_dict, diff --git a/openpype/modules/ftrack/event_handlers_user/action_djvview.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_djvview.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py index c05fbed2d0..c603a2d200 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_djvview.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py @@ -1,7 +1,7 @@ import os import subprocess from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class DJVViewAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_job_killer.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py index 47ed1e7895..af24e0280d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class JobKiller(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py index f5af044de0..825fd97b06 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class MultipleNotes(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py index 4b42500e8f..87d3329179 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py @@ -4,7 +4,7 @@ from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr, diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_rv.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py index 3172b74261..71d790f7e7 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_rv.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py @@ -3,7 +3,7 @@ import subprocess import traceback import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon import ftrack_api from avalon import io, api diff --git a/openpype/modules/ftrack/event_handlers_user/action_seed.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_seed.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py index 1f01f0af1d..4021d70c0a 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_seed.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py @@ -1,6 +1,6 @@ import os from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class SeedDebugProject(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py index 4464e51d3d..4820925844 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py @@ -4,11 +4,11 @@ import json import requests from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY class StoreThumbnailsToAvalon(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py index 89fac7cf80..d6ca561bbe 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonLocal(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_test.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py similarity index 89% rename from openpype/modules/ftrack/event_handlers_user/action_test.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py index 206c67de50..bd71ba5bf9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_test.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class TestAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py index a12f25b57d..3b90960160 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToChildren(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py index 284723bb0f..2f0110b7aa 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToParent(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index 2c427cfff7..0d69913996 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -2,7 +2,7 @@ import platform import socket import getpass -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction class ActionWhereIRun(BaseAction): diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py similarity index 96% rename from openpype/modules/ftrack/ftrack_module.py rename to openpype/modules/default_modules/ftrack/ftrack_module.py index ee139a500e..1de152535c 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/default_modules/ftrack/ftrack_module.py @@ -1,35 +1,24 @@ import os import json import collections -from abc import ABCMeta, abstractmethod -import six import openpype -from openpype.modules import ( - PypeModule, +from openpype.modules import OpenPypeModule + +from openpype_interfaces import ( ITrayModule, IPluginPaths, ITimersManager, ILaunchHookPaths, - ISettingsChangeListener + ISettingsChangeListener, + IFtrackEventHandlerPaths ) from openpype.settings import SaveWarningExc FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -@six.add_metaclass(ABCMeta) -class IFtrackEventHandlerPaths: - """Other modules interface to return paths to ftrack event handlers. - - Expected output is dictionary with "server" and "user" keys. - """ - @abstractmethod - def get_event_handler_paths(self): - pass - - class FtrackModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, ITimersManager, @@ -242,7 +231,7 @@ class FtrackModule( return import ftrack_api - from openpype.modules.ftrack.lib import get_openpype_attr + from openpype_modules.ftrack.lib import get_openpype_attr try: session = self.create_ftrack_session() diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/default_modules/ftrack/ftrack_server/__init__.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/__init__.py rename to openpype/modules/default_modules/ftrack/ftrack_server/__init__.py diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py similarity index 97% rename from openpype/modules/ftrack/ftrack_server/event_server_cli.py rename to openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py index 8bba22b475..d8e4d05580 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py @@ -18,17 +18,10 @@ from openpype.lib import ( get_pype_execute_args, OpenPypeMongoConnection ) -from openpype.modules.ftrack import FTRACK_MODULE_DIR -from openpype.modules.ftrack.lib import ( - credentials, - get_ftrack_url_from_settings -) -from openpype.modules.ftrack.ftrack_server.lib import ( - check_ftrack_url, - get_ftrack_event_mongo_info -) - -from openpype.modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack.lib import credentials +from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url +from openpype_modules.ftrack.ftrack_server import socket_thread class MongoPermissionsError(Exception): diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/ftrack_server.py rename to openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py similarity index 99% rename from openpype/modules/ftrack/ftrack_server/lib.py rename to openpype/modules/default_modules/ftrack/ftrack_server/lib.py index 88f849e765..e80d6a3a6b 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py @@ -22,7 +22,7 @@ try: from weakref import WeakMethod except ImportError: from ftrack_api._weakref import WeakMethod -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/socket_thread.py rename to openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py diff --git a/openpype/modules/default_modules/ftrack/interfaces.py b/openpype/modules/default_modules/ftrack/interfaces.py new file mode 100644 index 0000000000..16ce0d2e62 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/interfaces.py @@ -0,0 +1,12 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IFtrackEventHandlerPaths(OpenPypeInterface): + """Other modules interface to return paths to ftrack event handlers. + + Expected output is dictionary with "server" and "user" keys. + """ + @abstractmethod + def get_event_handler_paths(self): + pass diff --git a/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py b/openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py similarity index 100% rename from openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py rename to openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py diff --git a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py similarity index 96% rename from openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py index d34b6533fb..0dd894bebf 100644 --- a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import FTRACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/ftrack/lib/__init__.py b/openpype/modules/default_modules/ftrack/lib/__init__.py similarity index 100% rename from openpype/modules/ftrack/lib/__init__.py rename to openpype/modules/default_modules/ftrack/lib/__init__.py diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/default_modules/ftrack/lib/avalon_sync.py similarity index 100% rename from openpype/modules/ftrack/lib/avalon_sync.py rename to openpype/modules/default_modules/ftrack/lib/avalon_sync.py diff --git a/openpype/modules/ftrack/lib/constants.py b/openpype/modules/default_modules/ftrack/lib/constants.py similarity index 100% rename from openpype/modules/ftrack/lib/constants.py rename to openpype/modules/default_modules/ftrack/lib/constants.py diff --git a/openpype/modules/ftrack/lib/credentials.py b/openpype/modules/default_modules/ftrack/lib/credentials.py similarity index 100% rename from openpype/modules/ftrack/lib/credentials.py rename to openpype/modules/default_modules/ftrack/lib/credentials.py diff --git a/openpype/modules/ftrack/lib/custom_attributes.json b/openpype/modules/default_modules/ftrack/lib/custom_attributes.json similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.json rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.json diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/default_modules/ftrack/lib/custom_attributes.py similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.py rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.py diff --git a/openpype/modules/ftrack/lib/ftrack_action_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_action_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py similarity index 99% rename from openpype/modules/ftrack/lib/ftrack_base_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py index 7b7ebfb099..7027154d86 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py @@ -10,7 +10,7 @@ from openpype.api import Logger from openpype.settings import get_project_settings import ftrack_api -from openpype.modules.ftrack import ftrack_server +from openpype_modules.ftrack import ftrack_server class MissingPermision(Exception): diff --git a/openpype/modules/ftrack/lib/ftrack_event_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_event_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py diff --git a/openpype/modules/ftrack/lib/settings.py b/openpype/modules/default_modules/ftrack/lib/settings.py similarity index 100% rename from openpype/modules/ftrack/lib/settings.py rename to openpype/modules/default_modules/ftrack/lib/settings.py diff --git a/openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py b/openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py similarity index 100% rename from openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py rename to openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py similarity index 96% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py index cc2a5b7d37..70030acad9 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -68,9 +68,6 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin): instance.data["families"].append("ftrack") else: instance.data["families"] = ["ftrack"] - else: - self.log.debug("Instance '{}' doesn't match any profile".format( - instance.data.get("family"))) def _get_add_ftrack_f_from_addit_filters(self, additional_filters, diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py new file mode 100644 index 0000000000..39b7433e11 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py @@ -0,0 +1,49 @@ +"""Loads publishing context from json and continues in publish process. + +Requires: + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + +Provides: + context, instances -> All data from previous publishing process. +""" + +import ftrack_api +import os + +import pyblish.api + + +class CollectUsername(pyblish.api.ContextPlugin): + """ + Translates user email to Ftrack username. + + Emails in Ftrack are same as company's Slack, username is needed to + load data to Ftrack. + + Expects "pype.club" user created on Ftrack and FTRACK_BOT_API_KEY env + var set up. + + """ + order = pyblish.api.CollectorOrder - 0.488 + label = "Collect ftrack username" + hosts = ["webpublisher"] + + _context = None + + def process(self, context): + os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"] + os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] + self.log.info("CollectUsername") + for instance in context: + email = instance.data["user_email"] + self.log.info("email:: {}".format(email)) + session = ftrack_api.Session(auto_connect_event_hub=False) + user = session.query("User where email like '{}'".format( + email)) + + if not user: + raise ValueError( + "Couldnt find user with {} email".format(email)) + + os.environ["FTRACK_API_USER"] = user[0].get("username") + break diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py similarity index 99% rename from openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 118a73a636..fbd64d9f70 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -4,12 +4,12 @@ import six import pyblish.api from avalon import io -# Copy of constant `openpype.modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` +# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" -# Copy of `get_pype_attr` from openpype.modules.ftrack.lib +# Copy of `get_pype_attr` from openpype_modules.ftrack.lib # TODO import from openpype's ftrack module when possible to not break Python 2 def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] diff --git a/openpype/modules/ftrack/plugins/publish/integrate_remove_components.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_remove_components.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py diff --git a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py rename to openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow b/openpype/modules/default_modules/ftrack/python2_vendor/arrow new file mode 160000 index 0000000000..b746fedf72 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow @@ -0,0 +1 @@ +Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py new file mode 100644 index 0000000000..69e3be50da --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py diff --git a/openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api new file mode 160000 index 0000000000..d277f474ab --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api @@ -0,0 +1 @@ +Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py similarity index 95% rename from openpype/modules/ftrack/scripts/sub_event_processor.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py index 0d94fa7264..51b45eb93b 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py @@ -4,8 +4,8 @@ import signal import socket import datetime -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py similarity index 98% rename from openpype/modules/ftrack/scripts/sub_event_status.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_status.py index 24b9bfb789..8a2733b635 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py @@ -7,8 +7,8 @@ import socket import datetime import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py similarity index 96% rename from openpype/modules/ftrack/scripts/sub_event_storer.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py index 6e2990ef0b..a8649e0ccc 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py @@ -6,14 +6,14 @@ import socket import pymongo import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StorerEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py similarity index 97% rename from openpype/modules/ftrack/scripts/sub_legacy_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py index ae6aefa908..e3a623c376 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py @@ -7,7 +7,7 @@ import threading import ftrack_api from openpype.api import Logger from openpype.modules import ModulesManager -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer log = Logger().get_logger("Event Server Legacy") diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py similarity index 93% rename from openpype/modules/ftrack/scripts/sub_user_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_user_server.py index 971a31b703..a3701a0950 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py @@ -2,8 +2,8 @@ import sys import signal import socket -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, SocketBaseEventHub ) diff --git a/openpype/modules/ftrack/tray/__init__.py b/openpype/modules/default_modules/ftrack/tray/__init__.py similarity index 100% rename from openpype/modules/ftrack/tray/__init__.py rename to openpype/modules/default_modules/ftrack/tray/__init__.py diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/default_modules/ftrack/tray/ftrack_tray.py similarity index 100% rename from openpype/modules/ftrack/tray/ftrack_tray.py rename to openpype/modules/default_modules/ftrack/tray/ftrack_tray.py diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/default_modules/ftrack/tray/login_dialog.py similarity index 99% rename from openpype/modules/ftrack/tray/login_dialog.py rename to openpype/modules/default_modules/ftrack/tray/login_dialog.py index cc5689bee5..6384621c8e 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/default_modules/ftrack/tray/login_dialog.py @@ -1,7 +1,7 @@ import os import requests from openpype import style -from openpype.modules.ftrack.lib import credentials +from openpype_modules.ftrack.lib import credentials from . import login_tools from openpype import resources from Qt import QtCore, QtGui, QtWidgets diff --git a/openpype/modules/ftrack/tray/login_tools.py b/openpype/modules/default_modules/ftrack/tray/login_tools.py similarity index 100% rename from openpype/modules/ftrack/tray/login_tools.py rename to openpype/modules/default_modules/ftrack/tray/login_tools.py diff --git a/openpype/modules/idle_manager/__init__.py b/openpype/modules/default_modules/idle_manager/__init__.py similarity index 54% rename from openpype/modules/idle_manager/__init__.py rename to openpype/modules/default_modules/idle_manager/__init__.py index 651f360c50..9d6e10bf39 100644 --- a/openpype/modules/idle_manager/__init__.py +++ b/openpype/modules/default_modules/idle_manager/__init__.py @@ -1,10 +1,8 @@ from .idle_module import ( - IdleManager, - IIdleManager + IdleManager ) __all__ = ( "IdleManager", - "IIdleManager" ) diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py similarity index 74% rename from openpype/modules/idle_manager/idle_module.py rename to openpype/modules/default_modules/idle_manager/idle_module.py index 5dd5160aa7..1a6d71a961 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/default_modules/idle_manager/idle_module.py @@ -1,38 +1,14 @@ import platform import collections -from abc import ABCMeta, abstractmethod -import six - -from openpype.modules import PypeModule, ITrayService +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( + ITrayService, + IIdleManager +) -@six.add_metaclass(ABCMeta) -class IIdleManager: - """Other modules interface to return callbacks by idle time in seconds. - - Expected output is dictionary with seconds as keys and callback/s - as value, value may be callback of list of callbacks. - EXAMPLE: - ``` - { - 60: self.on_minute_idle - } - ``` - """ - idle_manager = None - - @abstractmethod - def callbacks_by_idle_time(self): - pass - - @property - def idle_time(self): - if self.idle_manager: - return self.idle_manager.idle_time - - -class IdleManager(PypeModule, ITrayService): +class IdleManager(OpenPypeModule, ITrayService): """ Measure user's idle time in seconds. Idle time resets on keyboard/mouse input. Is able to emit signals at specific time idle. diff --git a/openpype/modules/idle_manager/idle_threads.py b/openpype/modules/default_modules/idle_manager/idle_threads.py similarity index 100% rename from openpype/modules/idle_manager/idle_threads.py rename to openpype/modules/default_modules/idle_manager/idle_threads.py diff --git a/openpype/modules/default_modules/idle_manager/interfaces.py b/openpype/modules/default_modules/idle_manager/interfaces.py new file mode 100644 index 0000000000..71cd17a64a --- /dev/null +++ b/openpype/modules/default_modules/idle_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IIdleManager(OpenPypeInterface): + """Other modules interface to return callbacks by idle time in seconds. + + Expected output is dictionary with seconds as keys and callback/s + as value, value may be callback of list of callbacks. + EXAMPLE: + ``` + { + 60: self.on_minute_idle + } + ``` + """ + idle_manager = None + + @abstractmethod + def callbacks_by_idle_time(self): + pass + + @property + def idle_time(self): + if self.idle_manager: + return self.idle_manager.idle_time diff --git a/openpype/modules/default_modules/interfaces.py b/openpype/modules/default_modules/interfaces.py new file mode 100644 index 0000000000..a60c5fa606 --- /dev/null +++ b/openpype/modules/default_modules/interfaces.py @@ -0,0 +1,265 @@ +from abc import abstractmethod + +from openpype import resources + +from openpype.modules import OpenPypeInterface + + +class IPluginPaths(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + # TODO validation of an output + @abstractmethod + def get_plugin_paths(self): + pass + + +class ILaunchHookPaths(OpenPypeInterface): + """Module has launch hook paths to return. + + Expected result is list of paths. + ["path/to/launch_hooks_dir"] + """ + + @abstractmethod + def get_launch_hook_paths(self): + pass + + +class ITrayModule(OpenPypeInterface): + """Module has special procedures when used in Pype Tray. + + IMPORTANT: + The module still must be usable if is not used in tray even if + would do nothing. + """ + tray_initialized = False + _tray_manager = None + + @abstractmethod + def tray_init(self): + """Initialization part of tray implementation. + + Triggered between `initialization` and `connect_with_modules`. + + This is where GUIs should be loaded or tray specific parts should be + prepared. + """ + pass + + @abstractmethod + def tray_menu(self, tray_menu): + """Add module's action to tray menu.""" + pass + + @abstractmethod + def tray_start(self): + """Start procedure in Pype tray.""" + pass + + @abstractmethod + def tray_exit(self): + """Cleanup method which is executed on tray shutdown. + + This is place where all threads should be shut. + """ + pass + + def execute_in_main_thread(self, callback): + """ Pushes callback to the queue or process 'callback' on a main thread + + Some callbacks need to be processed on main thread (menu actions + must be added on main thread or they won't get triggered etc.) + """ + if not self.tray_initialized: + # TODO Called without initialized tray, still main thread needed + try: + callback() + + except Exception: + self.log.warning( + "Failed to execute {} in main thread".format(callback), + exc_info=True) + + return + self.manager.tray_manager.execute_in_main_thread(callback) + + def show_tray_message(self, title, message, icon=None, msecs=None): + """Show tray message. + + Args: + title (str): Title of message. + message (str): Content of message. + icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is + Information icon, may differ by Qt version. + msecs (int): Duration of message visibility in miliseconds. + Default is 10000 msecs, may differ by Qt version. + """ + if self._tray_manager: + self._tray_manager.show_tray_message(title, message, icon, msecs) + + def add_doubleclick_callback(self, callback): + if hasattr(self.manager, "add_doubleclick_callback"): + self.manager.add_doubleclick_callback(self, callback) + + +class ITrayAction(ITrayModule): + """Implementation of Tray action. + + Add action to tray menu which will trigger `on_action_trigger`. + It is expected to be used for showing tools. + + Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden + as it's not expected that action will use them. But it is possible if + necessary. + """ + + admin_action = False + _admin_submenu = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + @abstractmethod + def on_action_trigger(self): + """What happens on actions click.""" + pass + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + if self.admin_action: + menu = self.admin_submenu(tray_menu) + action = QtWidgets.QAction(self.label, menu) + menu.addAction(action) + if not menu.menuAction().isVisible(): + menu.menuAction().setVisible(True) + + else: + action = QtWidgets.QAction(self.label, tray_menu) + tray_menu.addAction(action) + + action.triggered.connect(self.on_action_trigger) + + def tray_start(self): + return + + def tray_exit(self): + return + + @staticmethod + def admin_submenu(tray_menu): + if ITrayAction._admin_submenu is None: + from Qt import QtWidgets + + admin_submenu = QtWidgets.QMenu("Admin", tray_menu) + admin_submenu.menuAction().setVisible(False) + ITrayAction._admin_submenu = admin_submenu + return ITrayAction._admin_submenu + + +class ITrayService(ITrayModule): + # Module's property + menu_action = None + + # Class properties + _services_submenu = None + _icon_failed = None + _icon_running = None + _icon_idle = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + # TODO be able to get any sort of information to show/print + # @abstractmethod + # def get_service_info(self): + # pass + + @staticmethod + def services_submenu(tray_menu): + if ITrayService._services_submenu is None: + from Qt import QtWidgets + + services_submenu = QtWidgets.QMenu("Services", tray_menu) + services_submenu.menuAction().setVisible(False) + ITrayService._services_submenu = services_submenu + return ITrayService._services_submenu + + @staticmethod + def add_service_action(action): + ITrayService._services_submenu.addAction(action) + if not ITrayService._services_submenu.menuAction().isVisible(): + ITrayService._services_submenu.menuAction().setVisible(True) + + @staticmethod + def _load_service_icons(): + from Qt import QtGui + + ITrayService._failed_icon = QtGui.QIcon( + resources.get_resource("icons", "circle_red.png") + ) + ITrayService._icon_running = QtGui.QIcon( + resources.get_resource("icons", "circle_green.png") + ) + ITrayService._icon_idle = QtGui.QIcon( + resources.get_resource("icons", "circle_orange.png") + ) + + @staticmethod + def get_icon_running(): + if ITrayService._icon_running is None: + ITrayService._load_service_icons() + return ITrayService._icon_running + + @staticmethod + def get_icon_idle(): + if ITrayService._icon_idle is None: + ITrayService._load_service_icons() + return ITrayService._icon_idle + + @staticmethod + def get_icon_failed(): + if ITrayService._failed_icon is None: + ITrayService._load_service_icons() + return ITrayService._failed_icon + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + action = QtWidgets.QAction( + self.label, + self.services_submenu(tray_menu) + ) + self.menu_action = action + + self.add_service_action(action) + + self.set_service_running_icon() + + def set_service_running_icon(self): + """Change icon of an QAction to green circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_running()) + + def set_service_failed_icon(self): + """Change icon of an QAction to red circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_failed()) + + def set_service_idle_icon(self): + """Change icon of an QAction to orange circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_idle()) diff --git a/openpype/modules/launcher_action.py b/openpype/modules/default_modules/launcher_action.py similarity index 89% rename from openpype/modules/launcher_action.py rename to openpype/modules/default_modules/launcher_action.py index 0059ff021b..e3252e3842 100644 --- a/openpype/modules/launcher_action.py +++ b/openpype/modules/default_modules/launcher_action.py @@ -1,7 +1,8 @@ -from . import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayAction -class LauncherAction(PypeModule, ITrayAction): +class LauncherAction(OpenPypeModule, ITrayAction): label = "Launcher" name = "launcher_tool" diff --git a/openpype/modules/log_viewer/__init__.py b/openpype/modules/default_modules/log_viewer/__init__.py similarity index 100% rename from openpype/modules/log_viewer/__init__.py rename to openpype/modules/default_modules/log_viewer/__init__.py diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py similarity index 89% rename from openpype/modules/log_viewer/log_view_module.py rename to openpype/modules/default_modules/log_viewer/log_view_module.py index dde482b04c..bc1a98f4ad 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/default_modules/log_viewer/log_view_module.py @@ -1,8 +1,9 @@ from openpype.api import Logger -from .. import PypeModule, ITrayModule +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayModule -class LogViewModule(PypeModule, ITrayModule): +class LogViewModule(OpenPypeModule, ITrayModule): name = "log_viewer" def initialize(self, modules_settings): diff --git a/openpype/modules/sync_server/providers/__init__.py b/openpype/modules/default_modules/log_viewer/tray/__init__.py similarity index 100% rename from openpype/modules/sync_server/providers/__init__.py rename to openpype/modules/default_modules/log_viewer/tray/__init__.py diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/default_modules/log_viewer/tray/app.py similarity index 100% rename from openpype/modules/log_viewer/tray/app.py rename to openpype/modules/default_modules/log_viewer/tray/app.py diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/default_modules/log_viewer/tray/models.py similarity index 100% rename from openpype/modules/log_viewer/tray/models.py rename to openpype/modules/default_modules/log_viewer/tray/models.py diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/default_modules/log_viewer/tray/widgets.py similarity index 100% rename from openpype/modules/log_viewer/tray/widgets.py rename to openpype/modules/default_modules/log_viewer/tray/widgets.py diff --git a/openpype/modules/muster/__init__.py b/openpype/modules/default_modules/muster/__init__.py similarity index 100% rename from openpype/modules/muster/__init__.py rename to openpype/modules/default_modules/muster/__init__.py diff --git a/openpype/modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py similarity index 97% rename from openpype/modules/muster/muster.py rename to openpype/modules/default_modules/muster/muster.py index 1a82926802..a0e72006af 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/default_modules/muster/muster.py @@ -2,14 +2,14 @@ import os import json import appdirs import requests -from .. import ( - PypeModule, +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class MusterModule(PypeModule, ITrayModule, IWebServerRoutes): +class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes): """ Module handling Muster Render credentials. This will display dialog asking for user credentials for Muster if not already specified. diff --git a/openpype/modules/muster/rest_api.py b/openpype/modules/default_modules/muster/rest_api.py similarity index 100% rename from openpype/modules/muster/rest_api.py rename to openpype/modules/default_modules/muster/rest_api.py diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/default_modules/muster/widget_login.py similarity index 100% rename from openpype/modules/muster/widget_login.py rename to openpype/modules/default_modules/muster/widget_login.py diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py similarity index 92% rename from openpype/modules/project_manager_action.py rename to openpype/modules/default_modules/project_manager_action.py index 1387aa258c..c1f984a8cb 100644 --- a/openpype/modules/project_manager_action.py +++ b/openpype/modules/default_modules/project_manager_action.py @@ -1,7 +1,8 @@ -from . import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayAction -class ProjectManagerAction(PypeModule, ITrayAction): +class ProjectManagerAction(OpenPypeModule, ITrayAction): label = "Project Manager (beta)" name = "project_manager" admin_action = True diff --git a/openpype/modules/python_console_interpreter/__init__.py b/openpype/modules/default_modules/python_console_interpreter/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/__init__.py diff --git a/openpype/modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py similarity index 83% rename from openpype/modules/python_console_interpreter/module.py rename to openpype/modules/default_modules/python_console_interpreter/module.py index b37f35dfe0..f4df3fb6d8 100644 --- a/openpype/modules/python_console_interpreter/module.py +++ b/openpype/modules/default_modules/python_console_interpreter/module.py @@ -1,7 +1,8 @@ -from .. import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayAction -class PythonInterpreterAction(PypeModule, ITrayAction): +class PythonInterpreterAction(OpenPypeModule, ITrayAction): label = "Console" name = "python_interpreter" admin_action = True @@ -25,7 +26,7 @@ class PythonInterpreterAction(PypeModule, ITrayAction): if self._interpreter_window: return - from openpype.modules.python_console_interpreter.window import ( + from openpype_modules.python_console_interpreter.window import ( PythonInterpreterWidget ) diff --git a/openpype/modules/python_console_interpreter/window/__init__.py b/openpype/modules/default_modules/python_console_interpreter/window/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/window/__init__.py diff --git a/openpype/modules/python_console_interpreter/window/widgets.py b/openpype/modules/default_modules/python_console_interpreter/window/widgets.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/widgets.py rename to openpype/modules/default_modules/python_console_interpreter/window/widgets.py diff --git a/openpype/modules/default_modules/settings_module/__init__.py b/openpype/modules/default_modules/settings_module/__init__.py new file mode 100644 index 0000000000..95510eba9d --- /dev/null +++ b/openpype/modules/default_modules/settings_module/__init__.py @@ -0,0 +1,9 @@ +from .settings_action import ( + LocalSettingsAction, + SettingsAction +) + +__all__ = ( + "LocalSettingsAction", + "SettingsAction" +) diff --git a/openpype/modules/default_modules/settings_module/interfaces.py b/openpype/modules/default_modules/settings_module/interfaces.py new file mode 100644 index 0000000000..42db395649 --- /dev/null +++ b/openpype/modules/default_modules/settings_module/interfaces.py @@ -0,0 +1,30 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ISettingsChangeListener(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + @abstractmethod + def on_system_settings_save( + self, old_value, new_value, changes, new_value_metadata + ): + pass + + @abstractmethod + def on_project_settings_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass + + @abstractmethod + def on_project_anatomy_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass diff --git a/openpype/modules/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py similarity index 80% rename from openpype/modules/settings_action.py rename to openpype/modules/default_modules/settings_module/settings_action.py index 9db4a252bc..7140c57bab 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/default_modules/settings_module/settings_action.py @@ -1,40 +1,8 @@ -from abc import ABCMeta, abstractmethod - -import six - -from . import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayAction -@six.add_metaclass(ABCMeta) -class ISettingsChangeListener: - """Module has plugin paths to return. - - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - @abstractmethod - def on_system_settings_save( - self, old_value, new_value, changes, new_value_metadata - ): - pass - - @abstractmethod - def on_project_settings_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass - - @abstractmethod - def on_project_anatomy_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass - - -class SettingsAction(PypeModule, ITrayAction): +class SettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "settings" label = "Studio Settings" @@ -103,7 +71,7 @@ class SettingsAction(PypeModule, ITrayAction): self.settings_window.reset() -class LocalSettingsAction(PypeModule, ITrayAction): +class LocalSettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "local_settings" label = "Settings" diff --git a/openpype/modules/slack/README.md b/openpype/modules/default_modules/slack/README.md similarity index 100% rename from openpype/modules/slack/README.md rename to openpype/modules/default_modules/slack/README.md diff --git a/openpype/modules/slack/__init__.py b/openpype/modules/default_modules/slack/__init__.py similarity index 100% rename from openpype/modules/slack/__init__.py rename to openpype/modules/default_modules/slack/__init__.py diff --git a/openpype/modules/slack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py similarity index 95% rename from openpype/modules/slack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py index a2c1f8a9e0..0f4bc22a34 100644 --- a/openpype/modules/slack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.slack import SLACK_MODULE_DIR +from openpype_modules.slack import SLACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/slack/manifest.yml b/openpype/modules/default_modules/slack/manifest.yml similarity index 100% rename from openpype/modules/slack/manifest.yml rename to openpype/modules/default_modules/slack/manifest.yml diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py similarity index 100% rename from openpype/modules/slack/plugins/publish/collect_slack_family.py rename to openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py similarity index 100% rename from openpype/modules/slack/plugins/publish/integrate_slack_api.py rename to openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py similarity index 80% rename from openpype/modules/slack/slack_module.py rename to openpype/modules/default_modules/slack/slack_module.py index 9dd5a3d02b..e3f7b4ad19 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/default_modules/slack/slack_module.py @@ -1,11 +1,14 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths, ILaunchHookPaths) +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( + IPluginPaths, + ILaunchHookPaths +) SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(PypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/default_modules/standalonepublish_action.py similarity index 90% rename from openpype/modules/standalonepublish_action.py rename to openpype/modules/default_modules/standalonepublish_action.py index 4f87f9704c..9321a415a9 100644 --- a/openpype/modules/standalonepublish_action.py +++ b/openpype/modules/default_modules/standalonepublish_action.py @@ -2,10 +2,11 @@ import os import platform import subprocess from openpype.lib import get_pype_execute_args -from . import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayAction -class StandAlonePublishAction(PypeModule, ITrayAction): +class StandAlonePublishAction(OpenPypeModule, ITrayAction): label = "Publish" name = "standalonepublish_tool" diff --git a/openpype/modules/sync_server/README.md b/openpype/modules/default_modules/sync_server/README.md similarity index 100% rename from openpype/modules/sync_server/README.md rename to openpype/modules/default_modules/sync_server/README.md diff --git a/openpype/modules/default_modules/sync_server/__init__.py b/openpype/modules/default_modules/sync_server/__init__.py new file mode 100644 index 0000000000..430ab53c91 --- /dev/null +++ b/openpype/modules/default_modules/sync_server/__init__.py @@ -0,0 +1,6 @@ +from .sync_server_module import SyncServerModule + + +__all__ = ( + "SyncServerModule", +) diff --git a/openpype/modules/default_modules/sync_server/providers/__init__.py b/openpype/modules/default_modules/sync_server/providers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/default_modules/sync_server/providers/abstract_provider.py similarity index 100% rename from openpype/modules/sync_server/providers/abstract_provider.py rename to openpype/modules/default_modules/sync_server/providers/abstract_provider.py diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/default_modules/sync_server/providers/gdrive.py similarity index 100% rename from openpype/modules/sync_server/providers/gdrive.py rename to openpype/modules/default_modules/sync_server/providers/gdrive.py diff --git a/openpype/modules/sync_server/providers/lib.py b/openpype/modules/default_modules/sync_server/providers/lib.py similarity index 100% rename from openpype/modules/sync_server/providers/lib.py rename to openpype/modules/default_modules/sync_server/providers/lib.py diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/default_modules/sync_server/providers/local_drive.py similarity index 100% rename from openpype/modules/sync_server/providers/local_drive.py rename to openpype/modules/default_modules/sync_server/providers/local_drive.py diff --git a/openpype/modules/sync_server/providers/resources/folder.png b/openpype/modules/default_modules/sync_server/providers/resources/folder.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/folder.png rename to openpype/modules/default_modules/sync_server/providers/resources/folder.png diff --git a/openpype/modules/sync_server/providers/resources/gdrive.png b/openpype/modules/default_modules/sync_server/providers/resources/gdrive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/gdrive.png rename to openpype/modules/default_modules/sync_server/providers/resources/gdrive.png diff --git a/openpype/modules/sync_server/providers/resources/local_drive.png b/openpype/modules/default_modules/sync_server/providers/resources/local_drive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/local_drive.png rename to openpype/modules/default_modules/sync_server/providers/resources/local_drive.png diff --git a/openpype/modules/sync_server/providers/resources/studio.png b/openpype/modules/default_modules/sync_server/providers/resources/studio.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/studio.png rename to openpype/modules/default_modules/sync_server/providers/resources/studio.png diff --git a/openpype/modules/sync_server/resources/paused.png b/openpype/modules/default_modules/sync_server/resources/paused.png similarity index 100% rename from openpype/modules/sync_server/resources/paused.png rename to openpype/modules/default_modules/sync_server/resources/paused.png diff --git a/openpype/modules/sync_server/resources/synced.png b/openpype/modules/default_modules/sync_server/resources/synced.png similarity index 100% rename from openpype/modules/sync_server/resources/synced.png rename to openpype/modules/default_modules/sync_server/resources/synced.png diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/default_modules/sync_server/sync_server.py similarity index 100% rename from openpype/modules/sync_server/sync_server.py rename to openpype/modules/default_modules/sync_server/sync_server.py diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py similarity index 99% rename from openpype/modules/sync_server/sync_server_module.py rename to openpype/modules/default_modules/sync_server/sync_server_module.py index 15de4b12e9..e65a410551 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/default_modules/sync_server/sync_server_module.py @@ -7,7 +7,8 @@ import copy from avalon.api import AvalonMongoDB -from .. import PypeModule, ITrayModule +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayModule from openpype.api import ( Anatomy, get_project_settings, @@ -28,7 +29,7 @@ from .utils import time_function, SyncStatus, EditableScopes log = PypeLogger().get_logger("SyncServer") -class SyncServerModule(PypeModule, ITrayModule): +class SyncServerModule(OpenPypeModule, ITrayModule): """ Synchronization server that is syncing published files from local to any of implemented providers (like GDrive, S3 etc.) diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/default_modules/sync_server/tray/app.py similarity index 98% rename from openpype/modules/sync_server/tray/app.py rename to openpype/modules/default_modules/sync_server/tray/app.py index dd2b4be749..106076d81c 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/default_modules/sync_server/tray/app.py @@ -5,7 +5,7 @@ from openpype.tools.settings import style from openpype.lib import PypeLogger from openpype import resources -from openpype.modules.sync_server.tray.widgets import ( +from .widgets import ( SyncProjectListWidget, SyncRepresentationSummaryWidget ) diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/default_modules/sync_server/tray/delegates.py similarity index 98% rename from openpype/modules/sync_server/tray/delegates.py rename to openpype/modules/default_modules/sync_server/tray/delegates.py index 9316ec2c3e..461b9fffb3 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/default_modules/sync_server/tray/delegates.py @@ -2,7 +2,7 @@ import os from Qt import QtCore, QtWidgets, QtGui from openpype.lib import PypeLogger -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/default_modules/sync_server/tray/lib.py similarity index 100% rename from openpype/modules/sync_server/tray/lib.py rename to openpype/modules/default_modules/sync_server/tray/lib.py diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/default_modules/sync_server/tray/models.py similarity index 99% rename from openpype/modules/sync_server/tray/models.py rename to openpype/modules/default_modules/sync_server/tray/models.py index efef039b8b..8c86d3b98f 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/default_modules/sync_server/tray/models.py @@ -11,7 +11,7 @@ from avalon.vendor import qtawesome from openpype.lib import PypeLogger from openpype.api import get_local_site_id -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/default_modules/sync_server/tray/widgets.py similarity index 99% rename from openpype/modules/sync_server/tray/widgets.py rename to openpype/modules/default_modules/sync_server/tray/widgets.py index d38416fbce..c9160733a0 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/default_modules/sync_server/tray/widgets.py @@ -17,13 +17,13 @@ from openpype.lib import PypeLogger from avalon.tools.delegates import pretty_timestamp from avalon.vendor import qtawesome -from openpype.modules.sync_server.tray.models import ( +from .models import ( SyncRepresentationSummaryModel, SyncRepresentationDetailModel ) -from openpype.modules.sync_server.tray import lib -from openpype.modules.sync_server.tray import delegates +from . import lib +from . import delegates log = PypeLogger().get_logger("SyncServer") @@ -187,7 +187,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget): detail_window = SyncServerDetailWindow( self.sync_server, _id, self.model.project, parent=self) detail_window.exec() - + def _on_context_menu(self, point): """ Shows menu with loader actions on Right-click. diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/default_modules/sync_server/utils.py similarity index 100% rename from openpype/modules/sync_server/utils.py rename to openpype/modules/default_modules/sync_server/utils.py diff --git a/openpype/modules/timers_manager/__init__.py b/openpype/modules/default_modules/timers_manager/__init__.py similarity index 51% rename from openpype/modules/timers_manager/__init__.py rename to openpype/modules/default_modules/timers_manager/__init__.py index 1b565cc59a..5d7a4166d3 100644 --- a/openpype/modules/timers_manager/__init__.py +++ b/openpype/modules/default_modules/timers_manager/__init__.py @@ -1,9 +1,7 @@ from .timers_manager import ( - ITimersManager, TimersManager ) __all__ = ( - "ITimersManager", - "TimersManager" + "TimersManager", ) diff --git a/openpype/modules/default_modules/timers_manager/interfaces.py b/openpype/modules/default_modules/timers_manager/interfaces.py new file mode 100644 index 0000000000..179013cffe --- /dev/null +++ b/openpype/modules/default_modules/timers_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ITimersManager(OpenPypeInterface): + timer_manager_module = None + + @abstractmethod + def stop_timer(self): + pass + + @abstractmethod + def start_timer(self, data): + pass + + def timer_started(self, data): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_started(self.id, data) + + def timer_stopped(self): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_stopped(self.id) diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/default_modules/timers_manager/rest_api.py similarity index 100% rename from openpype/modules/timers_manager/rest_api.py rename to openpype/modules/default_modules/timers_manager/rest_api.py diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py similarity index 90% rename from openpype/modules/timers_manager/timers_manager.py rename to openpype/modules/default_modules/timers_manager/timers_manager.py index 92edd5aeaa..80f448095f 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/default_modules/timers_manager/timers_manager.py @@ -1,37 +1,18 @@ import os import collections -from abc import ABCMeta, abstractmethod -import six -from .. import PypeModule, ITrayService, IIdleManager, IWebServerRoutes +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( + ITimersManager, + ITrayService, + IIdleManager, + IWebServerRoutes +) from avalon.api import AvalonMongoDB -@six.add_metaclass(ABCMeta) -class ITimersManager: - timer_manager_module = None - - @abstractmethod - def stop_timer(self): - pass - - @abstractmethod - def start_timer(self, data): - pass - - def timer_started(self, data): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_started(self.id, data) - - def timer_stopped(self): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_stopped(self.id) - - -class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): +class TimersManager( + OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes +): """ Handles about Timers. Should be able to start/stop all timers at once. diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/default_modules/timers_manager/widget_user_idle.py similarity index 100% rename from openpype/modules/timers_manager/widget_user_idle.py rename to openpype/modules/default_modules/timers_manager/widget_user_idle.py diff --git a/openpype/modules/webserver/__init__.py b/openpype/modules/default_modules/webserver/__init__.py similarity index 52% rename from openpype/modules/webserver/__init__.py rename to openpype/modules/default_modules/webserver/__init__.py index defd115e57..899b97d6d4 100644 --- a/openpype/modules/webserver/__init__.py +++ b/openpype/modules/default_modules/webserver/__init__.py @@ -1,10 +1,8 @@ from .webserver_module import ( - WebServerModule, - IWebServerRoutes + WebServerModule ) __all__ = ( "WebServerModule", - "IWebServerRoutes" ) diff --git a/openpype/modules/webserver/base_routes.py b/openpype/modules/default_modules/webserver/base_routes.py similarity index 100% rename from openpype/modules/webserver/base_routes.py rename to openpype/modules/default_modules/webserver/base_routes.py diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/default_modules/webserver/host_console_listener.py similarity index 99% rename from openpype/modules/webserver/host_console_listener.py rename to openpype/modules/default_modules/webserver/host_console_listener.py index 01a8af643e..bcf4cadf6a 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/default_modules/webserver/host_console_listener.py @@ -5,7 +5,7 @@ import logging from concurrent.futures import CancelledError from Qt import QtWidgets -from openpype.modules import ITrayService +from openpype_interfaces import ITrayService log = logging.getLogger(__name__) diff --git a/openpype/modules/default_modules/webserver/interfaces.py b/openpype/modules/default_modules/webserver/interfaces.py new file mode 100644 index 0000000000..779361a9ec --- /dev/null +++ b/openpype/modules/default_modules/webserver/interfaces.py @@ -0,0 +1,9 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IWebServerRoutes(OpenPypeInterface): + """Other modules interface to register their routes.""" + @abstractmethod + def webserver_initialization(self, server_manager): + pass diff --git a/openpype/modules/webserver/server.py b/openpype/modules/default_modules/webserver/server.py similarity index 92% rename from openpype/modules/webserver/server.py rename to openpype/modules/default_modules/webserver/server.py index 65c5795995..83a29e074e 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/default_modules/webserver/server.py @@ -10,8 +10,9 @@ log = PypeLogger.get_logger("WebServer") class WebServerManager: """Manger that care about web server thread.""" - def __init__(self, module): - self.module = module + def __init__(self, port=None, host=None): + self.port = port or 8079 + self.host = host or "localhost" self.client = None self.handlers = {} @@ -24,8 +25,8 @@ class WebServerManager: self.webserver_thread = WebServerThread(self) @property - def port(self): - return self.module.port + def url(self): + return "http://{}:{}".format(self.host, self.port) def add_route(self, *args, **kwargs): self.app.router.add_route(*args, **kwargs) @@ -78,6 +79,10 @@ class WebServerThread(threading.Thread): def port(self): return self.manager.port + @property + def host(self): + return self.manager.host + def run(self): self.is_running = True @@ -110,7 +115,7 @@ class WebServerThread(threading.Thread): """ Starts runner and TCPsite """ self.runner = web.AppRunner(self.manager.app) await self.runner.setup() - self.site = web.TCPSite(self.runner, 'localhost', self.port) + self.site = web.TCPSite(self.runner, self.host, self.port) await self.site.start() def stop(self): diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py similarity index 77% rename from openpype/modules/webserver/webserver_module.py rename to openpype/modules/default_modules/webserver/webserver_module.py index b61619acde..5bfb2d6390 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -1,31 +1,27 @@ import os import socket -from abc import ABCMeta, abstractmethod - -import six from openpype import resources -from .. import PypeModule, ITrayService +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( + ITrayService, + IWebServerRoutes +) -@six.add_metaclass(ABCMeta) -class IWebServerRoutes: - """Other modules interface to register their routes.""" - @abstractmethod - def webserver_initialization(self, server_manager): - pass - - -class WebServerModule(PypeModule, ITrayService): +class WebServerModule(OpenPypeModule, ITrayService): name = "webserver" label = "WebServer" + webserver_url_env = "OPENPYPE_WEBSERVER_URL" + def initialize(self, _module_settings): self.enabled = True self.server_manager = None self._host_listener = None self.port = self.find_free_port() + self.webserver_url = None def connect_with_modules(self, enabled_modules): if not self.server_manager: @@ -50,14 +46,12 @@ class WebServerModule(PypeModule, ITrayService): static_prefix = "/res" self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR) - webserver_url = "http://localhost:{}".format(self.port) - os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format( - webserver_url, static_prefix + self.webserver_url, static_prefix ) def _add_listeners(self): - from openpype.modules.webserver import host_console_listener + from openpype_modules.webserver import host_console_listener self._host_listener = host_console_listener.HostListener( self.server_manager, self @@ -71,17 +65,34 @@ class WebServerModule(PypeModule, ITrayService): if self.server_manager: self.server_manager.stop_server() + @staticmethod + def create_new_server_manager(port=None, host=None): + """Create webserver manager for passed port and host. + + Args: + port(int): Port on which wil webserver listen. + host(str): Host name or IP address. Default is 'localhost'. + + Returns: + WebServerManager: Prepared manager. + """ + from .server import WebServerManager + + return WebServerManager(port, host) + def create_server_manager(self): if self.server_manager: return - from .server import WebServerManager - - self.server_manager = WebServerManager(self) + self.server_manager = self.create_new_server_manager(self.port) self.server_manager.on_stop_callbacks.append( self.set_service_failed_icon ) + webserver_url = self.server_manager.url + os.environ[self.webserver_url_env] = str(webserver_url) + self.webserver_url = webserver_url + @staticmethod def find_free_port( port_from=None, port_to=None, exclude_ports=None, host=None diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py deleted file mode 100644 index a814f0db62..0000000000 --- a/openpype/modules/sync_server/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from openpype.modules.sync_server.sync_server_module import SyncServerModule - - -def tray_init(tray_widget, main_widget): - return SyncServerModule() diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index f121760e27..ec88d5669d 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -62,23 +62,10 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): "asset": asset_entity["name"], "hierarchy": hierarchy.replace("\\", "/"), "task": task_name, - "username": context.data["user"] + "username": context.data["user"], + "app": context.data["hostName"] } - # Use AVALON_APP as first if available it is the same as host name - # - only if is not defined use AVALON_APP_NAME (e.g. on Farm) and - # set it back to AVALON_APP env variable - host_name = os.environ.get("AVALON_APP") - if not host_name: - app_manager = ApplicationManager() - app_name = os.environ.get("AVALON_APP_NAME") - if app_name: - app = app_manager.applications.get(app_name) - if app: - host_name = app.host_name - os.environ["AVALON_APP"] = host_name - context_data["app"] = host_name - datetime_data = context.data.get("datetimeData") or {} context_data.update(datetime_data) diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py new file mode 100644 index 0000000000..b731e3ed26 --- /dev/null +++ b/openpype/plugins/publish/collect_host_name.py @@ -0,0 +1,37 @@ +""" +Requires: + None +Provides: + context -> host (str) +""" +import os +import pyblish.api + +from openpype.lib import ApplicationManager + + +class CollectHostName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect Host Name" + order = pyblish.api.CollectorOrder - 0.5 + + def process(self, context): + host_name = context.data.get("hostName") + # Don't override value if is already set + if host_name: + return + + # Use AVALON_APP as first if available it is the same as host name + # - only if is not defined use AVALON_APP_NAME (e.g. on Farm) and + # set it back to AVALON_APP env variable + host_name = os.environ.get("AVALON_APP") + if not host_name: + app_name = os.environ.get("AVALON_APP_NAME") + if app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + host_name = app.host_name + + context.data["hostName"] = host_name diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index 669e6752f3..ca12f2900c 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -11,14 +11,22 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder label = 'Collect Version' + hosts = [ + "aftereffects", + "blender", + "celaction", + "fusion", + "harmony", + "hiero", + "houdini", + "maya", + "nuke", + "photoshop", + "resolve", + "tvpaint" + ] def process(self, context): - if "standalonepublisher" in context.data.get("host", []): - return - - if "unreal" in pyblish.api.registered_hosts(): - return - assert context.data.get('currentFile'), "Cannot get current file" filename = os.path.basename(context.data.get('currentFile')) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 91e0a0f3ec..625125321c 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -45,6 +45,7 @@ class ExtractBurnin(openpype.api.Extractor): "fusion", "aftereffects", "tvpaint", + "webpublisher", "aftereffects" # "resolve" ] @@ -96,7 +97,7 @@ class ExtractBurnin(openpype.api.Extractor): def main_process(self, instance): # TODO get these data from context - host_name = os.environ["AVALON_APP"] + host_name = instance.context.data["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) diff --git a/openpype/plugins/publish/extract_jpeg.py b/openpype/plugins/publish/extract_jpeg_exr.py similarity index 96% rename from openpype/plugins/publish/extract_jpeg.py rename to openpype/plugins/publish/extract_jpeg_exr.py index b1289217e6..ae691285b5 100644 --- a/openpype/plugins/publish/extract_jpeg.py +++ b/openpype/plugins/publish/extract_jpeg_exr.py @@ -17,7 +17,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): "imagesequence", "render", "render2d", "source", "plate", "take" ] - hosts = ["shell", "fusion", "resolve"] + hosts = ["shell", "fusion", "resolve", "webpublisher"] enabled = False # presetable attribute @@ -95,7 +95,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): # use same input args like with mov jpeg_items.extend(ffmpeg_args.get("input") or []) # input file - jpeg_items.append("-i {}".format(full_input_path)) + jpeg_items.append("-i \"{}\"".format(full_input_path)) # output arguments from presets jpeg_items.extend(ffmpeg_args.get("output") or []) @@ -104,7 +104,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): jpeg_items.append("-vframes 1") # output file - jpeg_items.append(full_output_path) + jpeg_items.append("\"{}\"".format(full_output_path)) subprocess_jpeg = " ".join(jpeg_items) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index bdcd3b8e60..78cbea10be 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -45,6 +45,7 @@ class ExtractReview(pyblish.api.InstancePlugin): "fusion", "tvpaint", "resolve", + "webpublisher", "aftereffects" ] @@ -89,7 +90,7 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"].remove(repre) def main_process(self, instance): - host_name = os.environ["AVALON_APP"] + host_name = instance.context.data["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 67e9f9ca19..f9e9b43f08 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -301,7 +301,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): task_name = io.Session.get("AVALON_TASK") family = self.main_family_from_instance(instance) - key_values = {"families": family, "tasks": task_name} + key_values = {"families": family, + "tasks": task_name, + "hosts": instance.data["anatomyData"]["app"]} profile = filter_profiles(self.template_name_profiles, key_values, logger=self.log) diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6312294bf1 --- /dev/null +++ b/openpype/plugins/publish/start_timer.py @@ -0,0 +1,15 @@ +import pyblish.api + +from openpype.api import get_system_settings +from openpype.lib import change_timer_to_current_context + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + modules_settings = get_system_settings()["modules"] + if modules_settings["timers_manager"]["disregard_publishing"]: + change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..81afd16378 --- /dev/null +++ b/openpype/plugins/publish/stop_timer.py @@ -0,0 +1,19 @@ +import os +import requests + +import pyblish.api + +from openpype.api import get_system_settings + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.5 + hosts = ["*"] + + def process(self, context): + modules_settings = get_system_settings()["modules"] + if modules_settings["timers_manager"]["disregard_publishing"]: + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + requests.post(rest_api_url) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 7c47d8c613..c18fe36667 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -3,7 +3,7 @@ import os import sys import json -from pathlib import Path +from datetime import datetime from openpype.lib import PypeLogger from openpype.api import get_app_environments_for_context @@ -35,11 +35,17 @@ class PypeCommands: @staticmethod def launch_eventservercli(*args): - from openpype.modules.ftrack.ftrack_server.event_server_cli import ( + from openpype_modules.ftrack.ftrack_server.event_server_cli import ( run_event_server ) return run_event_server(*args) + @staticmethod + def launch_webpublisher_webservercli(*args, **kwargs): + from openpype.hosts.webpublisher.webserver_service.webserver_cli \ + import (run_webserver) + return run_webserver(*args, **kwargs) + @staticmethod def launch_standalone_publisher(): from openpype.tools import standalonepublish @@ -104,6 +110,123 @@ class PypeCommands: log.info("Publish finished.") uninstall() + @staticmethod + def remotepublish(project, batch_path, host, user, targets=None): + """Start headless publishing. + + Publish use json from passed paths argument. + + Args: + project (str): project to publish (only single context is expected + per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + targets (string): What module should be targeted + (to choose validator for example) + host (string) + user (string): email address for webpublisher + + Raises: + RuntimeError: When there is no path to process. + """ + if not batch_path: + raise RuntimeError("No publish paths specified") + + from openpype import install, uninstall + from openpype.api import Logger + from openpype.lib import OpenPypeMongoConnection + + # Register target and host + import pyblish.api + import pyblish.util + + log = Logger.get_logger() + + log.info("remotepublish command") + + install() + + if host: + pyblish.api.register_host(host) + + if targets: + if isinstance(targets, str): + targets = [targets] + for target in targets: + pyblish.api.register_target(target) + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + os.environ["AVALON_PROJECT"] = project + os.environ["AVALON_APP"] = host + + import avalon.api + from openpype.hosts.webpublisher import api as webpublisher + + avalon.api.install(webpublisher) + + log.info("Running publish ...") + + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + _, batch_id = os.path.split(batch_path) + _id = dbcon.insert_one({ + "batch_id": batch_id, + "start_date": datetime.now(), + "user": user, + "status": "in_progress" + }).inserted_id + + log_lines = [] + for result in pyblish.util.publish_iter(): + for record in result["records"]: + log_lines.append("{}: {}".format( + result["plugin"].label, record.msg)) + + if result["error"]: + log.error(error_format.format(**result)) + uninstall() + log_lines.append(error_format.format(**result)) + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "status": "error", + "log": os.linesep.join(log_lines) + + }} + ) + sys.exit(1) + else: + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "progress": max(result["progress"], 0.95), + "log": os.linesep.join(log_lines) + }} + ) + + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "status": "finished_ok", + "progress": 1, + "log": os.linesep.join(log_lines) + }} + ) + + log.info("Publish finished.") + uninstall() + + @staticmethod def extractenvironments(output_json_path, project, asset, task, app): env = os.environ.copy() if all((project, asset, task, app)): diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index b5810deef4..0adb5db0bd 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -1,7 +1,21 @@ +from .constants import ( + GLOBAL_SETTINGS_KEY, + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, + PROJECT_ANATOMY_KEY, + LOCAL_SETTING_KEY, + + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS, + + KEY_ALLOWED_SYMBOLS, + KEY_REGEX +) from .exceptions import ( SaveWarningExc ) from .lib import ( + get_general_environments, get_system_settings, get_project_settings, get_current_project_settings, @@ -16,8 +30,21 @@ from .entities import ( __all__ = ( + "GLOBAL_SETTINGS_KEY", + "SYSTEM_SETTINGS_KEY", + "PROJECT_SETTINGS_KEY", + "PROJECT_ANATOMY_KEY", + "LOCAL_SETTING_KEY", + + "SCHEMA_KEY_SYSTEM_SETTINGS", + "SCHEMA_KEY_PROJECT_SETTINGS", + + "KEY_ALLOWED_SYMBOLS", + "KEY_REGEX", + "SaveWarningExc", + "get_general_environments", "get_system_settings", "get_project_settings", "get_current_project_settings", diff --git a/openpype/settings/constants.py b/openpype/settings/constants.py index a53e88a91e..2ea19ead4b 100644 --- a/openpype/settings/constants.py +++ b/openpype/settings/constants.py @@ -14,13 +14,17 @@ METADATA_KEYS = ( M_DYNAMIC_KEY_LABEL ) -# File where studio's system overrides are stored +# Keys where studio's system overrides are stored GLOBAL_SETTINGS_KEY = "global_settings" SYSTEM_SETTINGS_KEY = "system_settings" PROJECT_SETTINGS_KEY = "project_settings" PROJECT_ANATOMY_KEY = "project_anatomy" LOCAL_SETTING_KEY = "local_settings" +# Schema hub names +SCHEMA_KEY_SYSTEM_SETTINGS = "system_schema" +SCHEMA_KEY_PROJECT_SETTINGS = "projects_schema" + DEFAULT_PROJECT_KEY = "__default_project__" KEY_ALLOWED_SYMBOLS = "a-zA-Z0-9-_ " @@ -39,6 +43,9 @@ __all__ = ( "PROJECT_ANATOMY_KEY", "LOCAL_SETTING_KEY", + "SCHEMA_KEY_SYSTEM_SETTINGS", + "SCHEMA_KEY_PROJECT_SETTINGS", + "DEFAULT_PROJECT_KEY", "KEY_ALLOWED_SYMBOLS", diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index aab8c2196c..a53ae14914 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -151,6 +151,7 @@ "template_name_profiles": [ { "families": [], + "hosts": [], "tasks": [], "template_name": "publish" }, @@ -160,6 +161,7 @@ "render", "prerender" ], + "hosts": [], "tasks": [], "template_name": "render" } @@ -249,6 +251,13 @@ ] }, "Workfiles": { + "workfile_template_profiles": [ + { + "task_types": [], + "hosts": [], + "workfile_template": "work" + } + ], "last_workfile_on_startup": [ { "hosts": [], diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 811a446e59..809c732d6f 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,4 +1,46 @@ { + "create": { + "CreateAlembicCamera": { + "enabled": true, + "defaults": [] + }, + "CreateCompositeSequence": { + "enabled": true, + "defaults": [] + }, + "CreatePointCache": { + "enabled": true, + "defaults": [] + }, + "CreateRedshiftROP": { + "enabled": true, + "defaults": [] + }, + "CreateRemotePublish": { + "enabled": true, + "defaults": [] + }, + "CreateVDBCache": { + "enabled": true, + "defaults": [] + }, + "CreateUSD": { + "enabled": false, + "defaults": [] + }, + "CreateUSDModel": { + "enabled": false, + "defaults": [] + }, + "USDCreateShadingWorkspace": { + "enabled": false, + "defaults": [] + }, + "CreateUSDRender": { + "enabled": false, + "defaults": [] + } + }, "publish": { "ValidateContainers": { "enabled": true, diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json new file mode 100644 index 0000000000..f57b79a609 --- /dev/null +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -0,0 +1,120 @@ +{ + "publish": { + "CollectPublishedFiles": { + "task_type_to_family": { + "Animation": { + "workfile": { + "is_sequence": false, + "extensions": [ + "tvp" + ], + "families": [], + "tags": [], + "subset_template_name": "" + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ], + "tags": [ + "review" + ], + "subset_template_name": "" + } + }, + "Compositing": { + "workfile": { + "is_sequence": false, + "extensions": [ + "aep" + ], + "families": [], + "tags": [], + "subset_template_name": "" + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ], + "tags": [ + "review" + ], + "subset_template_name": "" + } + }, + "Layout": { + "workfile": { + "is_sequence": false, + "extensions": [ + "psd" + ], + "families": [], + "tags": [], + "subset_template_name": "" + }, + "image": { + "is_sequence": false, + "extensions": [ + "png", + "jpg", + "jpeg", + "tiff", + "tif" + ], + "families": [ + "review" + ], + "tags": [ + "review" + ], + "subset_template_name": "" + } + }, + "default_task_type": { + "workfile": { + "is_sequence": false, + "extensions": [ + "tvp" + ], + "families": [], + "tags": [], + "subset_template_name": "{family}{Variant}" + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ], + "tags": [ + "review" + ], + "subset_template_name": "{family}{Variant}" + } + }, + "__dynamic_keys_labels__": { + "default_task_type": "Default task type" + } + } + } + } +} \ No newline at end of file diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 3a70b90590..229b867327 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -1,4 +1,9 @@ { + "addon_paths": { + "windows": [], + "darwin": [], + "linux": [] + }, "avalon": { "AVALON_TIMEOUT": 1000, "AVALON_THUMBNAIL_ROOT": { @@ -128,7 +133,8 @@ "enabled": true, "auto_stop": true, "full_time": 15.0, - "message_time": 0.5 + "message_time": 0.5, + "disregard_publishing": false }, "clockify": { "enabled": false, @@ -173,4 +179,4 @@ "slack": { "enabled": false } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index 9cda702e9a..8c30d5044c 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -106,7 +106,8 @@ from .enum_entity import ( ToolsEnumEntity, TaskTypeEnumEntity, ProvidersEnum, - DeadlineUrlEnumEntity + DeadlineUrlEnumEntity, + AnatomyTemplatesEnumEntity ) from .list_entity import ListEntity @@ -162,6 +163,7 @@ __all__ = ( "TaskTypeEnumEntity", "ProvidersEnum", "DeadlineUrlEnumEntity", + "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 851684520b..0e8274d374 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -104,6 +104,12 @@ class BaseItemEntity(BaseEntity): self.is_group = False # Entity's value will be stored into file with name of it's key self.is_file = False + # Default values are not stored to an openpype file + # - these must not be set through schemas directly + self.dynamic_schema_id = None + self.is_dynamic_schema_node = False + self.is_in_dynamic_schema_node = False + # Reference to parent entity which has `is_group` == True # - stays as None if none of parents is group self.group_item = None @@ -255,13 +261,22 @@ class BaseItemEntity(BaseEntity): ) # Group item can be only once in on hierarchy branch. - if self.is_group and self.group_item: + if self.is_group and self.group_item is not None: raise SchemeGroupHierarchyBug(self) + # Group item can be only once in on hierarchy branch. + if self.group_item is not None and self.is_dynamic_schema_node: + reason = ( + "Dynamic schema is inside grouped item {}." + " Change group hierarchy or remove dynamic" + " schema to be able work properly." + ).format(self.group_item.path) + raise EntitySchemaError(self, reason) + # Validate that env group entities will be stored into file. # - env group entities must store metadata which is not possible if # metadata would be outside of file - if not self.file_item and self.is_env_group: + if self.file_item is None and self.is_env_group: reason = ( "Environment item is not inside file" " item so can't store metadata for defaults." @@ -478,7 +493,15 @@ class BaseItemEntity(BaseEntity): @abstractmethod def settings_value(self): - """Value of an item without key.""" + """Value of an item without key without dynamic items.""" + pass + + @abstractmethod + def collect_dynamic_schema_entities(self): + """Collect entities that are on top of dynamically added schemas. + + This method make sence only when defaults are saved. + """ pass @abstractmethod @@ -808,6 +831,12 @@ class ItemEntity(BaseItemEntity): self.is_dynamic_item = is_dynamic_item self.is_file = self.schema_data.get("is_file", False) + # These keys have underscore as they must not be set in schemas + self.dynamic_schema_id = self.schema_data.get( + "_dynamic_schema_id", None + ) + self.is_dynamic_schema_node = self.dynamic_schema_id is not None + self.is_group = self.schema_data.get("is_group", False) self.is_in_dynamic_item = bool( not self.is_dynamic_item @@ -837,10 +866,20 @@ class ItemEntity(BaseItemEntity): self._require_restart_on_change = require_restart_on_change # File item reference - if self.parent.is_file: - self.file_item = self.parent - elif self.parent.file_item: - self.file_item = self.parent.file_item + if not self.is_dynamic_schema_node: + self.is_in_dynamic_schema_node = ( + self.parent.is_dynamic_schema_node + or self.parent.is_in_dynamic_schema_node + ) + + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + ): + if self.parent.is_file: + self.file_item = self.parent + elif self.parent.file_item: + self.file_item = self.parent.file_item # Group item reference if self.parent.is_group: @@ -891,6 +930,18 @@ class ItemEntity(BaseItemEntity): def root_key(self): return self.root_item.root_key + @abstractmethod + def collect_dynamic_schema_entities(self, collector): + """Collect entities that are on top of dynamically added schemas. + + This method make sence only when defaults are saved. + + Args: + collector(DynamicSchemaValueCollector): Object where dynamic + entities are stored. + """ + pass + def schema_validations(self): if not self.label and self.use_label_wrap: reason = ( @@ -899,7 +950,12 @@ class ItemEntity(BaseItemEntity): ) raise EntitySchemaError(self, reason) - if self.is_file and self.file_item is not None: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and self.is_file + and self.file_item is not None + ): reason = ( "Entity has set `is_file` to true but" " it's parent is already marked as file item." diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py index 988464d059..d7b416921c 100644 --- a/openpype/settings/entities/dict_conditional.py +++ b/openpype/settings/entities/dict_conditional.py @@ -469,6 +469,10 @@ class DictConditionalEntity(ItemEntity): return True return False + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET @@ -482,13 +486,7 @@ class DictConditionalEntity(ItemEntity): output = {} for key, child_obj in children_items: - child_value = child_obj.settings_value() - if not child_obj.is_file and not child_obj.file_item: - for _key, _value in child_value.items(): - new_key = "/".join([key, _key]) - output[new_key] = _value - else: - output[key] = child_value + output[key] = child_obj.settings_value() return output if self.is_group: diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index 73b08f101a..57e21ff5f3 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -330,15 +330,32 @@ class DictImmutableKeysEntity(ItemEntity): return True return False + def collect_dynamic_schema_entities(self, collector): + for child_obj in self.non_gui_children.values(): + child_obj.collect_dynamic_schema_entities(collector) + + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET if self._override_state is OverrideState.DEFAULTS: + is_dynamic_schema_node = ( + self.is_dynamic_schema_node or self.is_in_dynamic_schema_node + ) output = {} for key, child_obj in self.non_gui_children.items(): + if child_obj.is_dynamic_schema_node: + continue + child_value = child_obj.settings_value() - if not child_obj.is_file and not child_obj.file_item: + if ( + not is_dynamic_schema_node + and not child_obj.is_file + and not child_obj.file_item + ): for _key, _value in child_value.items(): new_key = "/".join([key, _key]) output[new_key] = _value diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index c3df935269..f75fb23d82 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -261,7 +261,7 @@ class DictMutableKeysEntity(EndpointEntity): raise EntitySchemaError(self, reason) # TODO Ability to store labels should be defined with different key - if self.collapsible_key and not self.file_item: + if self.collapsible_key and self.file_item is None: reason = ( "Modifiable dictionary with collapsible keys is not under" " file item so can't store metadata." diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 917e376904..cb532c5ae0 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -419,7 +419,7 @@ class ProvidersEnum(BaseEnumEntity): self.placeholder = None def _get_enum_values(self): - from openpype.modules.sync_server.providers import lib as lib_providers + from openpype_modules.sync_server.providers import lib as lib_providers providers = lib_providers.factory.providers @@ -458,27 +458,19 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): self.valid_value_types = (list,) self.value_on_not_set = [] else: - for key in self.valid_keys: - if self.value_on_not_set is NOT_SET: - self.value_on_not_set = key - break - self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" # GUI attribute self.placeholder = self.schema_data.get("placeholder") def _get_enum_values(self): - system_settings_entity = self.get_entity_from_path("system_settings") + deadline_urls_entity = self.get_entity_from_path( + "system_settings/modules/deadline/deadline_urls" + ) valid_keys = set() enum_items_list = [] - deadline_urls_entity = ( - system_settings_entity - ["modules"] - ["deadline"] - ["deadline_urls"] - ) for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( {server_name: "{}: {}".format(server_name, url_entity.value)}) @@ -489,8 +481,82 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): super(DeadlineUrlEnumEntity, self).set_override_state(*args, **kwargs) self.enum_items, self.valid_keys = self._get_enum_values() - new_value = [] - for key in self._current_value: - if key in self.valid_keys: - new_value.append(key) - self._current_value = new_value + if self.multiselection: + new_value = [] + for key in self._current_value: + if key in self.valid_keys: + new_value.append(key) + self._current_value = new_value + + else: + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] + + +class AnatomyTemplatesEnumEntity(BaseEnumEntity): + schema_types = ["anatomy-templates-enum"] + + def _item_initalization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + enum_default = self.schema_data.get("default") or "work" + + self.value_on_not_set = enum_default + self.valid_value_types = (STRING_TYPE,) + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + templates_entity = self.get_entity_from_path( + "project_anatomy/templates" + ) + + valid_keys = set() + enum_items_list = [] + + others_entity = None + for key, entity in templates_entity.items(): + # Skip defaults key + if key == "defaults": + continue + + if key == "others": + others_entity = entity + continue + + label = key + if hasattr(entity, "label"): + label = entity.label or label + + enum_items_list.append({key: label}) + valid_keys.add(key) + + if others_entity is not None: + get_child_label_func = getattr( + others_entity, "get_child_label", None + ) + for key, child_entity in others_entity.items(): + label = key + if callable(get_child_label_func): + label = get_child_label_func(child_entity) or label + + enum_items_list.append({key: label}) + valid_keys.add(key) + + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(AnatomyTemplatesEnumEntity, self).set_override_state( + *args, **kwargs + ) + + self.enum_items, self.valid_keys = self._get_enum_values() + if self._current_value not in self.valid_keys: + self._current_value = self.value_on_not_set diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 336d1f5c1e..ebc70b840d 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -49,6 +49,10 @@ class EndpointEntity(ItemEntity): super(EndpointEntity, self).schema_validations() + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + @abstractmethod def _settings_value(self): pass @@ -121,7 +125,11 @@ class InputEntity(EndpointEntity): def schema_validations(self): # Input entity must have file parent. - if not self.file_item: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and self.file_item is None + ): raise EntitySchemaError(self, "Missing parent file entity.") super(InputEntity, self).schema_validations() @@ -369,6 +377,9 @@ class NumberEntity(InputEntity): self.valid_value_types = valid_value_types self.value_on_not_set = value_on_not_set + # UI specific attributes + self.show_slider = self.schema_data.get("show_slider", False) + def _convert_to_valid_type(self, value): if isinstance(value, str): new_value = None diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index ac6b3e76dd..c7c9c3097e 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -115,6 +115,9 @@ class PathEntity(ItemEntity): def set(self, value): self.child_obj.set(value) + def collect_dynamic_schema_entities(self, *args, **kwargs): + self.child_obj.collect_dynamic_schema_entities(*args, **kwargs) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET @@ -236,7 +239,12 @@ class ListStrictEntity(ItemEntity): def schema_validations(self): # List entity must have file parent. - if not self.file_item and not self.is_file: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and not self.is_file + and self.file_item is None + ): raise EntitySchemaError( self, "Missing file entity in hierarchy." ) @@ -279,6 +287,10 @@ class ListStrictEntity(ItemEntity): for idx, item in enumerate(new_value): self.children[idx].set(item) + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 01f61d8bdf..f207322dee 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -3,6 +3,7 @@ import re import json import copy import inspect +import collections import contextlib from .exceptions import ( @@ -10,6 +11,12 @@ from .exceptions import ( SchemaDuplicatedEnvGroupKeys ) +from openpype.settings.constants import ( + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS +) try: STRING_TYPE = basestring except Exception: @@ -24,6 +31,10 @@ TEMPLATE_METADATA_KEYS = ( DEFAULT_VALUES_KEY, ) +SCHEMA_EXTEND_TYPES = ( + "schema", "template", "schema_template", "dynamic_schema" +) + template_key_pattern = re.compile(r"(\{.*?[^{0]*\})") @@ -102,8 +113,8 @@ class OverrideState: class SchemasHub: - def __init__(self, schema_subfolder, reset=True): - self._schema_subfolder = schema_subfolder + def __init__(self, schema_type, reset=True): + self._schema_type = schema_type self._loaded_types = {} self._gui_types = tuple() @@ -112,25 +123,56 @@ class SchemasHub: self._loaded_templates = {} self._loaded_schemas = {} + # Attributes for modules settings + self._dynamic_schemas_defs_by_id = {} + self._dynamic_schemas_by_id = {} + # Store validating and validated dynamic template or schemas self._validating_dynamic = set() self._validated_dynamic = set() - # It doesn't make sence to reload types on each reset as they can't be - # changed - self._load_types() - # Trigger reset if reset: self.reset() + @property + def schema_type(self): + return self._schema_type + def reset(self): + self._load_modules_settings_defs() + self._load_types() self._load_schemas() + def _load_modules_settings_defs(self): + from openpype.modules import get_module_settings_defs + + module_settings_defs = get_module_settings_defs() + for module_settings_def_cls in module_settings_defs: + module_settings_def = module_settings_def_cls() + def_id = module_settings_def.id + self._dynamic_schemas_defs_by_id[def_id] = module_settings_def + @property def gui_types(self): return self._gui_types + def resolve_dynamic_schema(self, dynamic_key): + output = [] + for def_id, def_keys in self._dynamic_schemas_by_id.items(): + if dynamic_key in def_keys: + def_schema = def_keys[dynamic_key] + if not def_schema: + continue + + if isinstance(def_schema, dict): + def_schema = [def_schema] + + for item in def_schema: + item["_dynamic_schema_id"] = def_id + output.extend(def_schema) + return output + def get_template_name(self, item_def, default=None): """Get template name from passed item definition. @@ -260,7 +302,7 @@ class SchemasHub: list: Resolved schema data. """ schema_type = schema_data["type"] - if schema_type not in ("schema", "template", "schema_template"): + if schema_type not in SCHEMA_EXTEND_TYPES: return [schema_data] if schema_type == "schema": @@ -268,6 +310,9 @@ class SchemasHub: self.get_schema(schema_data["name"]) ) + if schema_type == "dynamic_schema": + return self.resolve_dynamic_schema(schema_data["name"]) + template_name = schema_data["name"] template_def = self.get_template(template_name) @@ -368,14 +413,16 @@ class SchemasHub: self._crashed_on_load = {} self._loaded_templates = {} self._loaded_schemas = {} + self._dynamic_schemas_by_id = {} dirpath = os.path.join( os.path.dirname(os.path.abspath(__file__)), "schemas", - self._schema_subfolder + self.schema_type ) loaded_schemas = {} loaded_templates = {} + dynamic_schemas_by_id = {} for root, _, filenames in os.walk(dirpath): for filename in filenames: basename, ext = os.path.splitext(filename) @@ -425,8 +472,34 @@ class SchemasHub: ) loaded_schemas[basename] = schema_data + defs_iter = self._dynamic_schemas_defs_by_id.items() + for def_id, module_settings_def in defs_iter: + dynamic_schemas_by_id[def_id] = ( + module_settings_def.get_dynamic_schemas(self.schema_type) + ) + module_schemas = module_settings_def.get_settings_schemas( + self.schema_type + ) + for key, schema_data in module_schemas.items(): + if isinstance(schema_data, list): + if key in loaded_templates: + raise KeyError( + "Duplicated template key \"{}\"".format(key) + ) + loaded_templates[key] = schema_data + else: + if key in loaded_schemas: + raise KeyError( + "Duplicated schema key \"{}\"".format(key) + ) + loaded_schemas[key] = schema_data + self._loaded_templates = loaded_templates self._loaded_schemas = loaded_schemas + self._dynamic_schemas_by_id = dynamic_schemas_by_id + + def get_dynamic_modules_settings_defs(self, schema_def_id): + return self._dynamic_schemas_defs_by_id.get(schema_def_id) def _fill_template(self, child_data, template_def): """Fill template based on schema definition and template definition. @@ -660,3 +733,38 @@ class SchemasHub: if found_idx is not None: metadata_item = template_def.pop(found_idx) return metadata_item + + +class DynamicSchemaValueCollector: + # Map schema hub type to store keys + schema_hub_type_map = { + SCHEMA_KEY_SYSTEM_SETTINGS: SYSTEM_SETTINGS_KEY, + SCHEMA_KEY_PROJECT_SETTINGS: PROJECT_SETTINGS_KEY + } + + def __init__(self, schema_hub): + self._schema_hub = schema_hub + self._dynamic_entities = [] + + def add_entity(self, entity): + self._dynamic_entities.append(entity) + + def create_hierarchy(self): + output = collections.defaultdict(dict) + for entity in self._dynamic_entities: + output[entity.dynamic_schema_id][entity.path] = ( + entity.settings_value() + ) + return output + + def save_values(self): + hierarchy = self.create_hierarchy() + + for schema_def_id, schema_def_value in hierarchy.items(): + schema_def = self._schema_hub.get_dynamic_modules_settings_defs( + schema_def_id + ) + top_key = self.schema_hub_type_map.get( + self._schema_hub.schema_type + ) + schema_def.save_defaults(top_key, schema_def_value) diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 4a06d2d591..05d20ee60b 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -9,8 +9,11 @@ from .base_entity import BaseItemEntity from .lib import ( NOT_SET, WRAPPER_TYPES, + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS, OverrideState, - SchemasHub + SchemasHub, + DynamicSchemaValueCollector ) from .exceptions import ( SchemaError, @@ -28,6 +31,7 @@ from openpype.settings.lib import ( DEFAULTS_DIR, get_default_settings, + reset_default_settings, get_studio_system_settings_overrides, save_studio_settings, @@ -265,6 +269,16 @@ class RootEntity(BaseItemEntity): output[key] = child_obj.value return output + def collect_dynamic_schema_entities(self): + output = DynamicSchemaValueCollector(self.schema_hub) + if self._override_state is not OverrideState.DEFAULTS: + return output + + for child_obj in self.non_gui_children.values(): + child_obj.collect_dynamic_schema_entities(output) + + return output + def settings_value(self): """Value for current override state with metadata. @@ -276,6 +290,8 @@ class RootEntity(BaseItemEntity): if self._override_state is not OverrideState.DEFAULTS: output = {} for key, child_obj in self.non_gui_children.items(): + if child_obj.is_dynamic_schema_node: + continue value = child_obj.settings_value() if value is not NOT_SET: output[key] = value @@ -374,6 +390,7 @@ class RootEntity(BaseItemEntity): if self._override_state is OverrideState.DEFAULTS: self._save_default_values() + reset_default_settings() elif self._override_state is OverrideState.STUDIO: self._save_studio_values() @@ -421,6 +438,9 @@ class RootEntity(BaseItemEntity): with open(output_path, "w") as file_stream: json.dump(value, file_stream, indent=4) + dynamic_values_item = self.collect_dynamic_schema_entities() + dynamic_values_item.save_values() + @abstractmethod def _save_studio_values(self): """Save studio override values.""" @@ -476,7 +496,7 @@ class SystemSettings(RootEntity): ): if schema_hub is None: # Load system schemas - schema_hub = SchemasHub("system_schema") + schema_hub = SchemasHub(SCHEMA_KEY_SYSTEM_SETTINGS) super(SystemSettings, self).__init__(schema_hub, reset) @@ -607,7 +627,7 @@ class ProjectSettings(RootEntity): if schema_hub is None: # Load system schemas - schema_hub = SchemasHub("projects_schema") + schema_hub = SchemasHub(SCHEMA_KEY_PROJECT_SETTINGS) super(ProjectSettings, self).__init__(schema_hub, reset) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index 2034d4e463..9b53e89dd7 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -112,6 +112,22 @@ ``` - It is possible to define default values for unfilled fields to do so one of items in list must be dictionary with key `"__default_values__"` and value as dictionary with default key: values (as in example above). +### dynamic_schema +- dynamic templates that can be defined by class of `ModuleSettingsDef` +- example: +``` +{ + "type": "dynamic_schema", + "name": "project_settings/global" +} +``` +- all valid `ModuleSettingsDef` classes where calling of `get_settings_schemas` + will return dictionary where is key "project_settings/global" with schemas + will extend and replace this item +- works almost the same way as templates + - one item can be replaced by multiple items (or by 0 items) +- goal is to dynamically loaded settings of OpenPype addons without having + their schemas or default values in main repository ## Basic Dictionary inputs - these inputs wraps another inputs into {key: value} relation @@ -300,6 +316,7 @@ How output of the schema could look like on save: - key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`) - key `"minimum"` as minimum allowed number to enter (Default: `-99999`) - key `"maxium"` as maximum allowed number to enter (Default: `99999`) +- for UI it is possible to show slider to enable this option set `show_slider` to `true` ``` { "type": "number", @@ -311,6 +328,18 @@ How output of the schema could look like on save: } ``` +``` +{ + "type": "number", + "key": "ratio", + "label": "Ratio" + "decimal": 3, + "minimum": 0, + "maximum": 1, + "show_slider": true +} +``` + ### text - simple text input - key `"multiline"` allows to enter multiple lines of text (Default: `False`) @@ -380,6 +409,20 @@ How output of the schema could look like on save: } ``` +### anatomy-templates-enum +- enumeration of all available anatomy template keys +- have only single selection mode +- it is possible to define default value `default` + - `"work"` is used if default value is not specified +``` +{ + "key": "host", + "label": "Host name", + "type": "anatomy-templates-enum", + "default": "publish" +} +``` + ### hosts-enum - enumeration of available hosts - multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 4a8a9d496e..c9eca5dedd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -118,9 +118,17 @@ "type": "schema", "name": "schema_project_standalonepublisher" }, + { + "type": "schema", + "name": "schema_project_webpublisher" + }, { "type": "schema", "name": "schema_project_unreal" + }, + { + "type": "dynamic_schema", + "name": "project_settings/global" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index c6de257a61..cad99dde22 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -5,6 +5,10 @@ "label": "Houdini", "is_file": true, "children": [ + { + "type": "schema", + "name": "schema_houdini_create" + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json new file mode 100644 index 0000000000..91337da2b2 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -0,0 +1,69 @@ +{ + "type": "dict", + "collapsible": true, + "key": "webpublisher", + "label": "Web Publisher", + "is_file": true, + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "CollectPublishedFiles", + "label": "Collect Published Files", + "children": [ + { + "type": "dict-modifiable", + "collapsible": true, + "key": "task_type_to_family", + "label": "Task type to family mapping", + "collapsible_key": true, + "object_type": { + "type": "dict-modifiable", + "collapsible": false, + "key": "task_type", + "collapsible_key": false, + "object_type": { + "type": "dict", + "children": [ + { + "type": "boolean", + "key": "is_sequence", + "label": "Is Sequence" + }, + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "object_type": "text" + }, + { + "type": "list", + "key": "families", + "label": "Families", + "object_type": "text" + }, + { + "type": "schema", + "name": "schema_representation_tags" + }, + { + "type": "text", + "key": "subset_template_name", + "label": "Subset template name" + } + ] + } + } + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index d265988534..4b91072eb6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -496,6 +496,12 @@ "type": "list", "object_type": "text" }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, { "key": "tasks", "label": "Task names", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 9e39eeb39e..245560f115 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -65,6 +65,37 @@ "key": "Workfiles", "label": "Workfiles", "children": [ + { + "type": "list", + "key": "workfile_template_profiles", + "label": "Workfile template profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "type": "splitter" + }, + { + "key": "workfile_template", + "label": "Workfile template", + "type": "anatomy-templates-enum", + "multiselection": false + } + ] + } + }, { "type": "list", "key": "last_workfile_on_startup", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json new file mode 100644 index 0000000000..72b8032d4b --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json @@ -0,0 +1,54 @@ +{ + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "schema_template", + "name": "template_create_plugin", + "template_data": [ + { + "key": "CreateAlembicCamera", + "label": "Create Alembic Camera" + }, + { + "key": "CreateCompositeSequence", + "label": "Create Composite (Image Sequence)" + }, + { + "key": "CreatePointCache", + "label": "Create Point Cache" + }, + { + "key": "CreateRedshiftROP", + "label": "Create Redshift ROP" + }, + { + "key": "CreateRemotePublish", + "label": "Create Remote Publish" + }, + { + "key": "CreateVDBCache", + "label": "Create VDB Cache" + }, + { + "key": "CreateUSD", + "label": "Create USD" + }, + { + "key": "CreateUSDModel", + "label": "Create USD Model" + }, + { + "key": "USDCreateShadingWorkspace", + "label": "Create USD Shading Workspace" + }, + { + "key": "CreateUSDRender", + "label": "Create USD Render" + } + ] + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index b65de747e5..7607e1a8c1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -8,7 +8,10 @@ "burnin": "Add burnins" }, { - "ftrackreview": "Add to Ftrack" + "review": "Create review" + }, + { + "ftrackreview": "Add review to Ftrack" }, { "delete": "Delete output" diff --git a/openpype/settings/entities/schemas/system_schema/example_schema.json b/openpype/settings/entities/schemas/system_schema/example_schema.json index f633d5cb1a..af6a2d49f4 100644 --- a/openpype/settings/entities/schemas/system_schema/example_schema.json +++ b/openpype/settings/entities/schemas/system_schema/example_schema.json @@ -183,6 +183,15 @@ "minimum": -10, "maximum": -5 }, + { + "type": "number", + "key": "number_with_slider", + "label": "Number with slider", + "decimal": 2, + "minimum": 0.0, + "maximum": 1.0, + "show_slider": true + }, { "type": "text", "key": "singleline_text", diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 75c08b2cd9..31d8e04731 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -5,6 +5,18 @@ "collapsible": true, "is_file": true, "children": [ + { + "type": "path", + "key": "addon_paths", + "label": "OpenPype AddOn Paths", + "use_label_wrap": true, + "multiplatform": true, + "multipath": true, + "require_restart": true + }, + { + "type": "separator" + }, { "type": "dict", "key": "avalon", @@ -60,6 +72,11 @@ "decimal": 2, "key": "message_time", "label": "When dialog will show" + }, + { + "type": "boolean", + "key": "disregard_publishing", + "label": "Disregard Publishing" } ] }, diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5c2c0dcd94..60ed54bd4a 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -114,7 +114,8 @@ def save_studio_settings(data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener old_data = get_system_settings() default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] @@ -161,7 +162,8 @@ def save_project_settings(project_name, overrides): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_SETTINGS_KEY] if project_name: @@ -222,7 +224,8 @@ def save_project_anatomy(project_name, anatomy_data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_ANATOMY_KEY] if project_name: @@ -326,6 +329,45 @@ def reset_default_settings(): _DEFAULT_SETTINGS = None +def _get_default_settings(): + from openpype.modules import get_module_settings_defs + + defaults = load_openpype_default_settings() + + module_settings_defs = get_module_settings_defs() + for module_settings_def_cls in module_settings_defs: + module_settings_def = module_settings_def_cls() + system_defaults = module_settings_def.get_defaults( + SYSTEM_SETTINGS_KEY + ) or {} + for path, value in system_defaults.items(): + if not path: + continue + + subdict = defaults["system_settings"] + path_items = list(path.split("/")) + last_key = path_items.pop(-1) + for key in path_items: + subdict = subdict[key] + subdict[last_key] = value + + project_defaults = module_settings_def.get_defaults( + PROJECT_SETTINGS_KEY + ) or {} + for path, value in project_defaults.items(): + if not path: + continue + + subdict = defaults + path_items = list(path.split("/")) + last_key = path_items.pop(-1) + for key in path_items: + subdict = subdict[key] + subdict[last_key] = value + + return defaults + + def get_default_settings(): """Get default settings. @@ -335,12 +377,10 @@ def get_default_settings(): Returns: dict: Loaded default settings. """ - # TODO add cacher - return load_openpype_default_settings() - # global _DEFAULT_SETTINGS - # if _DEFAULT_SETTINGS is None: - # _DEFAULT_SETTINGS = load_jsons_from_dir(DEFAULTS_DIR) - # return copy.deepcopy(_DEFAULT_SETTINGS) + global _DEFAULT_SETTINGS + if _DEFAULT_SETTINGS is None: + _DEFAULT_SETTINGS = _get_default_settings() + return copy.deepcopy(_DEFAULT_SETTINGS) def load_json_file(fpath): @@ -377,8 +417,8 @@ def load_jsons_from_dir(path, *args, **kwargs): "data1": "CONTENT OF FILE" }, "folder2": { - "data1": { - "subfolder1": "CONTENT OF FILE" + "subfolder1": { + "data2": "CONTENT OF FILE" } } } diff --git a/openpype/tools/launcher/constants.py b/openpype/tools/launcher/constants.py index e6dbbb6e19..7f394cb5ac 100644 --- a/openpype/tools/launcher/constants.py +++ b/openpype/tools/launcher/constants.py @@ -8,5 +8,5 @@ ACTION_ID_ROLE = QtCore.Qt.UserRole + 3 ANIMATION_START_ROLE = QtCore.Qt.UserRole + 4 ANIMATION_STATE_ROLE = QtCore.Qt.UserRole + 5 - -ANIMATION_LEN = 10 +# Animation length in seconds +ANIMATION_LEN = 7 diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index 65d40cd0df..d6374f49d2 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -44,9 +44,12 @@ class ProjectHandler(QtCore.QObject): # Signal emmited when project has changed project_changed = QtCore.Signal(str) + projects_refreshed = QtCore.Signal() + timer_timeout = QtCore.Signal() def __init__(self, dbcon, model): super(ProjectHandler, self).__init__() + self._active = False # Store project model for usage self.model = model # Store dbcon @@ -54,6 +57,28 @@ class ProjectHandler(QtCore.QObject): self.current_project = dbcon.Session.get("AVALON_PROJECT") + refresh_timer = QtCore.QTimer() + refresh_timer.setInterval(self.refresh_interval) + refresh_timer.timeout.connect(self._on_timeout) + + self.refresh_timer = refresh_timer + + def _on_timeout(self): + if self._active: + self.timer_timeout.emit() + self.refresh_model() + + def set_active(self, active): + self._active = active + + def start_timer(self, trigger=False): + self.refresh_timer.start() + if trigger: + self._on_timeout() + + def stop_timer(self): + self.refresh_timer.stop() + def set_project(self, project_name): # Change current project of this handler self.current_project = project_name @@ -66,6 +91,7 @@ class ProjectHandler(QtCore.QObject): def refresh_model(self): self.model.refresh() + self.projects_refreshed.emit() def get_action_icon(action): diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 846a07e081..4988829c11 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -122,7 +122,6 @@ class ActionModel(QtGui.QStandardItemModel): self.application_manager = ApplicationManager() - self._groups = {} self.default_icon = qtawesome.icon("fa.cube", color="white") # Cache of available actions self._registered_actions = list() @@ -138,14 +137,18 @@ class ActionModel(QtGui.QStandardItemModel): actions.extend(app_actions) self._registered_actions = actions - self.items_by_id.clear() + + self.filter_actions() def get_application_actions(self): actions = [] if not self.dbcon.Session.get("AVALON_PROJECT"): return actions - project_doc = self.dbcon.find_one({"type": "project"}) + project_doc = self.dbcon.find_one( + {"type": "project"}, + {"config.apps": True} + ) if not project_doc: return actions @@ -182,16 +185,12 @@ class ActionModel(QtGui.QStandardItemModel): return icon def filter_actions(self): + self.items_by_id.clear() # Validate actions based on compatibility self.clear() - self.items_by_id.clear() - self._groups.clear() - actions = self.filter_compatible_actions(self._registered_actions) - self.beginResetModel() - single_actions = [] varianted_actions = collections.defaultdict(list) grouped_actions = collections.defaultdict(list) @@ -274,12 +273,17 @@ class ActionModel(QtGui.QStandardItemModel): items_by_order[order].append(item) + self.beginResetModel() + + items = [] for order in sorted(items_by_order.keys()): for item in items_by_order[order]: item_id = str(uuid.uuid4()) item.setData(item_id, ACTION_ID_ROLE) self.items_by_id[item_id] = item - self.appendRow(item) + items.append(item) + + self.invisibleRootItem().appendRows(items) self.endResetModel() diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 048210115c..35c7d98be1 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -40,16 +40,11 @@ class ProjectBar(QtWidgets.QWidget): QtWidgets.QSizePolicy.Maximum ) - refresh_timer = QtCore.QTimer() - refresh_timer.setInterval(project_handler.refresh_interval) - self.project_handler = project_handler self.project_delegate = project_delegate self.project_combobox = project_combobox - self.refresh_timer = refresh_timer # Signals - refresh_timer.timeout.connect(self._on_refresh_timeout) self.project_combobox.currentIndexChanged.connect(self.on_index_change) project_handler.project_changed.connect(self._on_project_change) @@ -58,20 +53,6 @@ class ProjectBar(QtWidgets.QWidget): if project_name: self.set_project(project_name) - def showEvent(self, event): - if not self.refresh_timer.isActive(): - self.refresh_timer.start() - super(ProjectBar, self).showEvent(event) - - def _on_refresh_timeout(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.project_handler.refresh_model() - def _on_project_change(self, project_name): if self.get_current_project() == project_name: return @@ -103,9 +84,10 @@ class ActionBar(QtWidgets.QWidget): action_clicked = QtCore.Signal(object) - def __init__(self, dbcon, parent=None): + def __init__(self, project_handler, dbcon, parent=None): super(ActionBar, self).__init__(parent) + self.project_handler = project_handler self.dbcon = dbcon layout = QtWidgets.QHBoxLayout(self) @@ -152,17 +134,25 @@ class ActionBar(QtWidgets.QWidget): self.set_row_height(1) + project_handler.projects_refreshed.connect(self._on_projects_refresh) view.clicked.connect(self.on_clicked) def discover_actions(self): + if self._animation_timer.isActive(): + self._animation_timer.stop() self.model.discover() def filter_actions(self): + if self._animation_timer.isActive(): + self._animation_timer.stop() self.model.filter_actions() def set_row_height(self, rows): self.setMinimumHeight(rows * 75) + def _on_projects_refresh(self): + self.discover_actions() + def _on_animation(self): time_now = time.time() for action_id in tuple(self._animated_items): @@ -182,6 +172,8 @@ class ActionBar(QtWidgets.QWidget): self.update() def _start_animation(self, index): + # Offset refresh timout + self.project_handler.start_timer() action_id = index.data(ACTION_ID_ROLE) item = self.model.items_by_id.get(action_id) if item: @@ -202,6 +194,9 @@ class ActionBar(QtWidgets.QWidget): self.action_clicked.emit(action) return + # Offset refresh timout + self.project_handler.start_timer() + actions = index.data(ACTION_ROLE) menu = QtWidgets.QMenu(self) diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index 979aab42cf..bd37a9b89c 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -103,14 +103,9 @@ class ProjectsPanel(QtWidgets.QWidget): layout.addWidget(view) - refresh_timer = QtCore.QTimer() - refresh_timer.setInterval(project_handler.refresh_interval) - - refresh_timer.timeout.connect(self._on_refresh_timeout) view.clicked.connect(self.on_clicked) self.view = view - self.refresh_timer = refresh_timer self.project_handler = project_handler def on_clicked(self, index): @@ -118,21 +113,6 @@ class ProjectsPanel(QtWidgets.QWidget): project_name = index.data(QtCore.Qt.DisplayRole) self.project_handler.set_project(project_name) - def showEvent(self, event): - self.project_handler.refresh_model() - if not self.refresh_timer.isActive(): - self.refresh_timer.start() - super(ProjectsPanel, self).showEvent(event) - - def _on_refresh_timeout(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.project_handler.refresh_model() - class AssetsPanel(QtWidgets.QWidget): """Assets page""" @@ -268,8 +248,6 @@ class AssetsPanel(QtWidgets.QWidget): class LauncherWindow(QtWidgets.QDialog): """Launcher interface""" - # Refresh actions each 10000msecs - actions_refresh_timeout = 10000 def __init__(self, parent=None): super(LauncherWindow, self).__init__(parent) @@ -304,7 +282,7 @@ class LauncherWindow(QtWidgets.QDialog): page_slider.addWidget(asset_panel) # actions - actions_bar = ActionBar(self.dbcon, self) + actions_bar = ActionBar(project_handler, self.dbcon, self) # statusbar statusbar = QtWidgets.QWidget() @@ -342,10 +320,6 @@ class LauncherWindow(QtWidgets.QDialog): layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) - actions_refresh_timer = QtCore.QTimer() - actions_refresh_timer.setInterval(self.actions_refresh_timeout) - - self.actions_refresh_timer = actions_refresh_timer self.project_handler = project_handler self.message_label = message_label @@ -357,22 +331,31 @@ class LauncherWindow(QtWidgets.QDialog): self._page = 0 # signals - actions_refresh_timer.timeout.connect(self._on_action_timer) actions_bar.action_clicked.connect(self.on_action_clicked) action_history.trigger_history.connect(self.on_history_action) project_handler.project_changed.connect(self.on_project_change) + project_handler.timer_timeout.connect(self._on_refresh_timeout) asset_panel.back_clicked.connect(self.on_back_clicked) asset_panel.session_changed.connect(self.on_session_changed) self.resize(520, 740) def showEvent(self, event): - if not self.actions_refresh_timer.isActive(): - self.actions_refresh_timer.start() - self.discover_actions() + self.project_handler.set_active(True) + self.project_handler.start_timer(True) super(LauncherWindow, self).showEvent(event) + def _on_refresh_timeout(self): + # Stop timer if widget is not visible + if not self.isVisible(): + self.project_handler.stop_timer() + + def changeEvent(self, event): + if event.type() == QtCore.QEvent.ActivationChange: + self.project_handler.set_active(self.isActiveWindow()) + super(LauncherWindow, self).changeEvent(event) + def set_page(self, page): current = self.page_slider.currentIndex() if current == page and self._page == page: @@ -392,20 +375,10 @@ class LauncherWindow(QtWidgets.QDialog): def discover_actions(self): self.actions_bar.discover_actions() - self.filter_actions() def filter_actions(self): self.actions_bar.filter_actions() - def _on_action_timer(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.actions_refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.discover_actions() - def on_project_change(self, project_name): # Update the Action plug-ins available for the current project self.set_page(1) diff --git a/openpype/tools/settings/settings/breadcrumbs_widget.py b/openpype/tools/settings/settings/breadcrumbs_widget.py index b625a7bb07..d25cbdc8cb 100644 --- a/openpype/tools/settings/settings/breadcrumbs_widget.py +++ b/openpype/tools/settings/settings/breadcrumbs_widget.py @@ -325,7 +325,9 @@ class BreadcrumbsButton(QtWidgets.QToolButton): self.setSizePolicy(size_policy) menu.triggered.connect(self._on_menu_click) - self.clicked.connect(self._on_click) + # Don't allow to go to root with mouse click + if path: + self.clicked.connect(self._on_click) self._path = path self._path_prefix = path_prefix diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index d1babd7fdb..c420a8cdc5 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -203,6 +203,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): refresh_btn.setIcon(refresh_icon) footer_layout = QtWidgets.QHBoxLayout() + footer_layout.setContentsMargins(5, 5, 5, 5) if self.user_role == "developer": self._add_developer_ui(footer_layout) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index d29fa6f42b..736ba77652 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -21,6 +21,7 @@ from .base import ( BaseWidget, InputWidget ) +from openpype.widgets.sliders import NiceSlider from openpype.tools.settings import CHILD_OFFSET @@ -48,6 +49,10 @@ class DictImmutableKeysWidget(BaseWidget): self._ui_item_base() label = self.entity.label + # Set stretch of second column to 1 + if isinstance(self.content_layout, QtWidgets.QGridLayout): + self.content_layout.setColumnStretch(1, 1) + self._direct_children_widgets = [] self._parent_widget_by_entity_id = {} self._added_wrapper_ids = set() @@ -89,6 +94,25 @@ class DictImmutableKeysWidget(BaseWidget): self._prepare_entity_layouts(child["children"], wrapper) + def set_focus(self, scroll_to=False): + """Set focus of a widget. + + Args: + scroll_to(bool): Also scroll to widget in category widget. + """ + if self.body_widget: + if scroll_to: + self.scroll_to(self.body_widget.top_part) + self.body_widget.top_part.setFocus() + + else: + if scroll_to: + if not self.input_fields: + self.scroll_to(self) + else: + self.scroll_to(self.input_fields[0]) + self.setFocus() + def _ui_item_base(self): self.setObjectName("DictInvisible") @@ -312,8 +336,12 @@ class BoolWidget(InputWidget): self.setFocusProxy(self.input_field) + self.input_field.focused_in.connect(self._on_input_focus) self.input_field.stateChanged.connect(self._on_value_change) + def _on_input_focus(self): + self.focused_in() + def _on_entity_change(self): if self.entity.value != self.input_field.isChecked(): self.set_entity_value() @@ -377,6 +405,8 @@ class TextWidget(InputWidget): class NumberWidget(InputWidget): + _slider_widget = None + def _add_inputs_to_layout(self): kwargs = { "minimum": self.entity.minimum, @@ -384,14 +414,39 @@ class NumberWidget(InputWidget): "decimal": self.entity.decimal } self.input_field = NumberSpinBox(self.content_widget, **kwargs) + input_field_stretch = 1 + + slider_multiplier = 1 + if self.entity.show_slider: + # Slider can't handle float numbers so all decimals are converted + # to integer range. + slider_multiplier = 10 ** self.entity.decimal + slider_widget = NiceSlider(QtCore.Qt.Horizontal, self) + slider_widget.setRange( + int(self.entity.minimum * slider_multiplier), + int(self.entity.maximum * slider_multiplier) + ) + + self.content_layout.addWidget(slider_widget, 1) + + slider_widget.valueChanged.connect(self._on_slider_change) + + self._slider_widget = slider_widget + + input_field_stretch = 0 + + self._slider_multiplier = slider_multiplier self.setFocusProxy(self.input_field) - self.content_layout.addWidget(self.input_field, 1) + self.content_layout.addWidget(self.input_field, input_field_stretch) self.input_field.valueChanged.connect(self._on_value_change) self.input_field.focused_in.connect(self._on_input_focus) + self._ignore_slider_change = False + self._ignore_input_change = False + def _on_input_focus(self): self.focused_in() @@ -402,10 +457,25 @@ class NumberWidget(InputWidget): def set_entity_value(self): self.input_field.setValue(self.entity.value) + def _on_slider_change(self, new_value): + if self._ignore_slider_change: + return + + self._ignore_input_change = True + self.input_field.setValue(new_value / self._slider_multiplier) + self._ignore_input_change = False + def _on_value_change(self): if self.ignore_input_changes: return - self.entity.set(self.input_field.value()) + + value = self.input_field.value() + if self._slider_widget is not None and not self._ignore_input_change: + self._ignore_slider_change = True + self._slider_widget.setValue(value * self._slider_multiplier) + self._ignore_slider_change = False + + self.entity.set(value) class RawJsonInput(SettingsPlainTextEdit): diff --git a/openpype/tools/settings/settings/style/style.css b/openpype/tools/settings/settings/style/style.css index 250c15063f..d9d85a481e 100644 --- a/openpype/tools/settings/settings/style/style.css +++ b/openpype/tools/settings/settings/style/style.css @@ -114,6 +114,30 @@ QPushButton[btn-type="expand-toggle"] { background: #21252B; } +/* SLider */ +QSlider::groove { + border: 1px solid #464b54; + border-radius: 0.3em; +} +QSlider::groove:horizontal { + height: 8px; +} +QSlider::groove:vertical { + width: 8px; +} +QSlider::handle { + width: 10px; + height: 10px; + + border-radius: 5px; +} +QSlider::handle:horizontal { + margin: -2px 0; +} +QSlider::handle:vertical { + margin: 0 -2px; +} + #GroupWidget { border-bottom: 1px solid #21252B; } diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 34b222dd8e..b821c3bb2c 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -221,6 +221,8 @@ class ExpandingWidget(QtWidgets.QWidget): self.main_layout.setSpacing(0) self.main_layout.addWidget(top_part) + self.top_part = top_part + def hide_toolbox(self, hide_content=False): self.button_toggle.setArrowType(QtCore.Qt.NoArrow) self.toolbox_hidden = True @@ -459,6 +461,7 @@ class NiceCheckbox(QtWidgets.QFrame): stateChanged = QtCore.Signal(int) checked_bg_color = QtGui.QColor(69, 128, 86) unchecked_bg_color = QtGui.QColor(170, 80, 80) + focused_in = QtCore.Signal() def set_bg_color(self, color): self._bg_color = color @@ -583,6 +586,10 @@ class NiceCheckbox(QtWidgets.QFrame): self._on_checkstate_change() + def mousePressEvent(self, event): + self.focused_in.emit() + super(NiceCheckbox, self).mousePressEvent(event) + def mouseReleaseEvent(self, event): if event.button() == QtCore.Qt.LeftButton: self.setCheckState() diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 794312f389..ed66f1a80f 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -15,11 +15,7 @@ from openpype.api import ( get_system_settings ) from openpype.lib import get_pype_execute_args -from openpype.modules import ( - TrayModulesManager, - ITrayAction, - ITrayService -) +from openpype.modules import TrayModulesManager from openpype import style from .pype_info_widget import PypeInfoWidget @@ -80,6 +76,10 @@ class TrayManager: def initialize_modules(self): """Add modules to tray.""" + from openpype_interfaces import ( + ITrayAction, + ITrayService + ) self.modules_manager.initialize(self, self.tray_widget.menu) diff --git a/openpype/tools/tray_app/app.py b/openpype/tools/tray_app/app.py index 339e6343f8..03f8321464 100644 --- a/openpype/tools/tray_app/app.py +++ b/openpype/tools/tray_app/app.py @@ -9,7 +9,7 @@ import itertools from datetime import datetime from avalon import style -from openpype.modules.webserver import host_console_listener +from openpype_modules.webserver import host_console_listener from Qt import QtWidgets, QtCore diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 42f0e422ae..3d2633f8dc 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -12,10 +12,15 @@ from avalon import style, io, api, pipeline from avalon.tools import lib as tools_lib from avalon.tools.widgets import AssetWidget -from avalon.tools.models import TasksModel from avalon.tools.delegates import PrettyTimeDelegate -from .model import FilesModel +from .model import ( + TASK_NAME_ROLE, + TASK_TYPE_ROLE, + FilesModel, + TasksModel, + TasksProxyModel +) from .view import FilesView from openpype.lib import ( @@ -23,7 +28,8 @@ from openpype.lib import ( get_workdir, get_workfile_doc, create_workfile_doc, - save_workfile_data_to_doc + save_workfile_data_to_doc, + get_workfile_template_key ) log = logging.getLogger(__name__) @@ -55,9 +61,13 @@ class NameWindow(QtWidgets.QDialog): # Set work file data for template formatting asset_name = session["AVALON_ASSET"] - project_doc = io.find_one({ - "type": "project" - }) + project_doc = io.find_one( + {"type": "project"}, + { + "name": True, + "data.code": True + } + ) self.data = { "project": { "name": project_doc["name"], @@ -126,10 +136,14 @@ class NameWindow(QtWidgets.QDialog): # for "{version". if "{version" in self.template: inputs_layout.addRow("Version:", version_widget) + else: + version_widget.setVisible(False) # Add subversion only if template containt `{comment}` if "{comment}" in self.template: inputs_layout.addRow("Subversion:", subversion_input) + else: + subversion_input.setVisible(False) inputs_layout.addRow("Extension:", ext_combo) inputs_layout.addRow("Preview:", preview_label) @@ -305,48 +319,46 @@ class TasksWidget(QtWidgets.QWidget): task_changed = QtCore.Signal() - def __init__(self, parent=None): + def __init__(self, dbcon=None, parent=None): super(TasksWidget, self).__init__(parent) - self.setContentsMargins(0, 0, 0, 0) - view = QtWidgets.QTreeView() - view.setIndentation(0) - model = TasksModel(io) - view.setModel(model) + tasks_view = QtWidgets.QTreeView(self) + tasks_view.setIndentation(0) + tasks_view.setSortingEnabled(True) + if dbcon is None: + dbcon = io + + tasks_model = TasksModel(dbcon) + tasks_proxy = TasksProxyModel() + tasks_proxy.setSourceModel(tasks_model) + tasks_view.setModel(tasks_proxy) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(view) + layout.addWidget(tasks_view) - # Hide the default tasks "count" as we don't need that data here. - view.setColumnHidden(1, True) + selection_model = tasks_view.selectionModel() + selection_model.currentChanged.connect(self.task_changed) - selection = view.selectionModel() - selection.currentChanged.connect(self.task_changed) - - self.models = { - "tasks": model - } - - self.widgets = { - "view": view, - } + self._tasks_model = tasks_model + self._tasks_proxy = tasks_proxy + self._tasks_view = tasks_view self._last_selected_task = None - def set_asset(self, asset): - if asset is None: - # Asset deselected + def set_asset(self, asset_doc): + # Asset deselected + if asset_doc is None: return # Try and preserve the last selected task and reselect it # after switching assets. If there's no currently selected # asset keep whatever the "last selected" was prior to it. - current = self.get_current_task() + current = self.get_current_task_name() if current: self._last_selected_task = current - self.models["tasks"].set_assets(asset_docs=[asset]) + self._tasks_model.set_asset(asset_doc) if self._last_selected_task: self.select_task(self._last_selected_task) @@ -354,7 +366,7 @@ class TasksWidget(QtWidgets.QWidget): # Force a task changed emit. self.task_changed.emit() - def select_task(self, task): + def select_task(self, task_name): """Select a task by name. If the task does not exist in the current model then selection is only @@ -366,39 +378,40 @@ class TasksWidget(QtWidgets.QWidget): """ # Clear selection - view = self.widgets["view"] - model = view.model() - selection_model = view.selectionModel() + selection_model = self._tasks_view.selectionModel() selection_model.clearSelection() # Select the task mode = selection_model.Select | selection_model.Rows - for row in range(model.rowCount(QtCore.QModelIndex())): - index = model.index(row, 0, QtCore.QModelIndex()) - name = index.data(QtCore.Qt.DisplayRole) - if name == task: + for row in range(self._tasks_model.rowCount()): + index = self._tasks_model.index(row, 0) + name = index.data(TASK_NAME_ROLE) + if name == task_name: selection_model.select(index, mode) # Set the currently active index - view.setCurrentIndex(index) + self._tasks_view.setCurrentIndex(index) + break - def get_current_task(self): + def get_current_task_name(self): """Return name of task at current index (selected) Returns: str: Name of the current task. """ - view = self.widgets["view"] - index = view.currentIndex() - index = index.sibling(index.row(), 0) # ensure column zero for name + index = self._tasks_view.currentIndex() + selection_model = self._tasks_view.selectionModel() + if index.isValid() and selection_model.isSelected(index): + return index.data(TASK_NAME_ROLE) + return None - selection = view.selectionModel() - if selection.isSelected(index): - # Ignore when the current task is not selected as the "No task" - # placeholder might be the current index even though it's - # disallowed to be selected. So we only return if it is selected. - return index.data(QtCore.Qt.DisplayRole) + def get_current_task_type(self): + index = self._tasks_view.currentIndex() + selection_model = self._tasks_view.selectionModel() + if index.isValid() and selection_model.isSelected(index): + return index.data(TASK_TYPE_ROLE) + return None class FilesWidget(QtWidgets.QWidget): @@ -411,12 +424,12 @@ class FilesWidget(QtWidgets.QWidget): # Setup self._asset = None - self._task = None + self._task_name = None + self._task_type = None # Pype's anatomy object for current project self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) # Template key used to get work template from anatomy templates - # TODO change template key based on task self.template_key = "work" # This is not root but workfile directory @@ -506,14 +519,15 @@ class FilesWidget(QtWidgets.QWidget): self.btn_browse = btn_browse self.btn_save = btn_save - def set_asset_task(self, asset, task): + def set_asset_task(self, asset, task_name, task_type): self._asset = asset - self._task = task + self._task_name = task_name + self._task_type = task_type # Define a custom session so we can query the work root # for a "Work area" that is not our current Session. # This way we can browse it even before we enter it. - if self._asset and self._task: + if self._asset and self._task_name and self._task_type: session = self._get_session() self.root = self.host.work_root(session) self.files_model.set_root(self.root) @@ -533,10 +547,16 @@ class FilesWidget(QtWidgets.QWidget): """Return a modified session for the current asset and task""" session = api.Session.copy() + self.template_key = get_workfile_template_key( + self._task_type, + session["AVALON_APP"], + project_name=session["AVALON_PROJECT"] + ) changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, + template_key=self.template_key ) session.update(changes) @@ -549,14 +569,19 @@ class FilesWidget(QtWidgets.QWidget): changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, + template_key=self.template_key ) if not changes: # Return early if we're already in the right Session context # to avoid any unwanted Task Changed callbacks to be triggered. return - api.update_current_task(asset=self._asset, task=self._task) + api.update_current_task( + asset=self._asset, + task=self._task_name, + template_key=self.template_key + ) def open_file(self, filepath): host = self.host @@ -606,7 +631,7 @@ class FilesWidget(QtWidgets.QWidget): result = messagebox.exec_() if result == messagebox.Yes: return True - elif result == messagebox.No: + if result == messagebox.No: return False return None @@ -700,7 +725,7 @@ class FilesWidget(QtWidgets.QWidget): self._enter_session() # Make sure we are in the right session self.host.save_file(file_path) - self.set_asset_task(self._asset, self._task) + self.set_asset_task(self._asset, self._task_name, self._task_type) pipeline.emit("after.workfile.save", [file_path]) @@ -727,7 +752,8 @@ class FilesWidget(QtWidgets.QWidget): changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, + template_key=self.template_key ) session.update(changes) @@ -750,7 +776,7 @@ class FilesWidget(QtWidgets.QWidget): # Force a full to the asset as opposed to just self.refresh() so # that it will actually check again whether the Work directory exists - self.set_asset_task(self._asset, self._task) + self.set_asset_task(self._asset, self._task_name, self._task_type) def refresh(self): """Refresh listed files for current selection in the interface""" @@ -927,7 +953,7 @@ class Window(QtWidgets.QMainWindow): assets_widget = AssetWidget(io, parent=home_body_widget) assets_widget.set_current_asset_btn_visibility(True) - tasks_widget = TasksWidget(home_body_widget) + tasks_widget = TasksWidget(io, home_body_widget) files_widget = FilesWidget(home_body_widget) side_panel = SidePanelWidget(home_body_widget) @@ -999,7 +1025,7 @@ class Window(QtWidgets.QMainWindow): if asset_docs: asset_doc = asset_docs[0] - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() workfile_doc = None if asset_doc and task_name and filepath: @@ -1026,7 +1052,7 @@ class Window(QtWidgets.QMainWindow): def _get_current_workfile_doc(self, filepath=None): if filepath is None: filepath = self.files_widget._get_selected_filepath() - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() asset_docs = self.assets_widget.get_selected_assets() if not task_name or not asset_docs or not filepath: return @@ -1046,7 +1072,7 @@ class Window(QtWidgets.QMainWindow): workdir, filename = os.path.split(filepath) asset_docs = self.assets_widget.get_selected_assets() asset_doc = asset_docs[0] - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() create_workfile_doc(asset_doc, task_name, filename, workdir, io) def set_context(self, context): @@ -1065,7 +1091,6 @@ class Window(QtWidgets.QMainWindow): # Select the asset self.assets_widget.select_assets([asset], expand=True) - # Force a refresh on Tasks? self.tasks_widget.set_asset(asset_document) if "task" in context: @@ -1095,12 +1120,13 @@ class Window(QtWidgets.QMainWindow): asset = self.assets_widget.get_selected_assets() or None if asset is not None: asset = asset[0] - task = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() + task_type = self.tasks_widget.get_current_task_type() self.tasks_widget.setEnabled(bool(asset)) - self.files_widget.setEnabled(all([bool(task), bool(asset)])) - self.files_widget.set_asset_task(asset, task) + self.files_widget.setEnabled(all([bool(task_name), bool(asset)])) + self.files_widget.set_asset_task(asset, task_name, task_type) self.files_widget.refresh() diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index 368988fd4e..92fbf76b95 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -1,7 +1,7 @@ import os import logging -from Qt import QtCore +from Qt import QtCore, QtGui from avalon import style from avalon.vendor import qtawesome @@ -9,6 +9,10 @@ from avalon.tools.models import TreeModel, Item log = logging.getLogger(__name__) +TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 +TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 +TASK_ORDER_ROLE = QtCore.Qt.UserRole + 3 + class FilesModel(TreeModel): """Model listing files with specified extensions in a root folder""" @@ -151,3 +155,142 @@ class FilesModel(TreeModel): return "Date modified" return super(FilesModel, self).headerData(section, orientation, role) + + +class TasksProxyModel(QtCore.QSortFilterProxyModel): + def lessThan(self, x_index, y_index): + x_order = x_index.data(TASK_ORDER_ROLE) + y_order = y_index.data(TASK_ORDER_ROLE) + if x_order is not None and y_order is not None: + if x_order < y_order: + return True + if x_order > y_order: + return False + + elif x_order is None and y_order is not None: + return True + + elif y_order is None and x_order is not None: + return False + + x_name = x_index.data(QtCore.Qt.DisplayRole) + y_name = y_index.data(QtCore.Qt.DisplayRole) + if x_name == y_name: + return True + + if x_name == tuple(sorted((x_name, y_name)))[0]: + return False + return True + + +class TasksModel(QtGui.QStandardItemModel): + """A model listing the tasks combined for a list of assets""" + def __init__(self, dbcon, parent=None): + super(TasksModel, self).__init__(parent=parent) + self.dbcon = dbcon + self._default_icon = qtawesome.icon( + "fa.male", + color=style.colors.default + ) + self._no_tasks_icon = qtawesome.icon( + "fa.exclamation-circle", + color=style.colors.mid + ) + self._cached_icons = {} + self._project_task_types = {} + + self._refresh_task_types() + + def _refresh_task_types(self): + # Get the project configured icons from database + project = self.dbcon.find_one( + {"type": "project"}, + {"config.tasks"} + ) + tasks = project["config"].get("tasks") or {} + self._project_task_types = tasks + + def _try_get_awesome_icon(self, icon_name): + icon = None + if icon_name: + try: + icon = qtawesome.icon( + "fa.{}".format(icon_name), + color=style.colors.default + ) + + except Exception: + pass + return icon + + def headerData(self, section, orientation, role): + # Show nice labels in the header + if ( + role == QtCore.Qt.DisplayRole + and orientation == QtCore.Qt.Horizontal + ): + if section == 0: + return "Tasks" + + return super(TasksModel, self).headerData(section, orientation, role) + + def _get_icon(self, task_icon, task_type_icon): + if task_icon in self._cached_icons: + return self._cached_icons[task_icon] + + icon = self._try_get_awesome_icon(task_icon) + if icon is not None: + self._cached_icons[task_icon] = icon + return icon + + if task_type_icon in self._cached_icons: + icon = self._cached_icons[task_type_icon] + self._cached_icons[task_icon] = icon + return icon + + icon = self._try_get_awesome_icon(task_type_icon) + if icon is None: + icon = self._default_icon + + self._cached_icons[task_icon] = icon + self._cached_icons[task_type_icon] = icon + + return icon + + def set_asset(self, asset_doc): + """Set assets to track by their database id + + Arguments: + asset_doc (dict): Asset document from MongoDB. + """ + self.clear() + + if not asset_doc: + return + + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + items = [] + for task_name, task_info in asset_tasks.items(): + task_icon = task_info.get("icon") + task_type = task_info.get("type") + task_order = task_info.get("order") + task_type_info = self._project_task_types.get(task_type) or {} + task_type_icon = task_type_info.get("icon") + icon = self._get_icon(task_icon, task_type_icon) + + label = "{} ({})".format(task_name, task_type or "type N/A") + item = QtGui.QStandardItem(label) + item.setData(task_name, TASK_NAME_ROLE) + item.setData(task_type, TASK_TYPE_ROLE) + item.setData(task_order, TASK_ORDER_ROLE) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable) + items.append(item) + + if not items: + item = QtGui.QStandardItem("No task") + item.setData(self._no_tasks_icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + items.append(item) + + self.invisibleRootItem().appendRows(items) diff --git a/openpype/version.py b/openpype/version.py index e804077e54..17bd0ff892 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.3.1-nightly.1" +__version__ = "3.4.0-nightly.4" diff --git a/openpype/widgets/sliders.py b/openpype/widgets/sliders.py new file mode 100644 index 0000000000..32ade58af5 --- /dev/null +++ b/openpype/widgets/sliders.py @@ -0,0 +1,139 @@ +from Qt import QtWidgets, QtCore, QtGui + + +class NiceSlider(QtWidgets.QSlider): + def __init__(self, *args, **kwargs): + super(NiceSlider, self).__init__(*args, **kwargs) + self._mouse_clicked = False + self._handle_size = 0 + + self._bg_brush = QtGui.QBrush(QtGui.QColor("#21252B")) + self._fill_brush = QtGui.QBrush(QtGui.QColor("#5cadd6")) + + def mousePressEvent(self, event): + self._mouse_clicked = True + if event.button() == QtCore.Qt.LeftButton: + self._set_value_to_pos(event.pos()) + return event.accept() + return super(NiceSlider, self).mousePressEvent(event) + + def mouseMoveEvent(self, event): + if self._mouse_clicked: + self._set_value_to_pos(event.pos()) + + super(NiceSlider, self).mouseMoveEvent(event) + + def mouseReleaseEvent(self, event): + self._mouse_clicked = True + super(NiceSlider, self).mouseReleaseEvent(event) + + def _set_value_to_pos(self, pos): + if self.orientation() == QtCore.Qt.Horizontal: + self._set_value_to_pos_x(pos.x()) + else: + self._set_value_to_pos_y(pos.y()) + + def _set_value_to_pos_x(self, pos_x): + _range = self.maximum() - self.minimum() + handle_size = self._handle_size + half_handle = handle_size / 2 + pos_x -= half_handle + width = self.width() - handle_size + value = ((_range * pos_x) / width) + self.minimum() + self.setValue(value) + + def _set_value_to_pos_y(self, pos_y): + _range = self.maximum() - self.minimum() + handle_size = self._handle_size + half_handle = handle_size / 2 + pos_y = self.height() - pos_y - half_handle + height = self.height() - handle_size + value = (_range * pos_y / height) + self.minimum() + self.setValue(value) + + def paintEvent(self, event): + painter = QtGui.QPainter(self) + opt = QtWidgets.QStyleOptionSlider() + self.initStyleOption(opt) + + painter.fillRect(event.rect(), QtCore.Qt.transparent) + + painter.setRenderHint(QtGui.QPainter.HighQualityAntialiasing) + + horizontal = self.orientation() == QtCore.Qt.Horizontal + + rect = self.style().subControlRect( + QtWidgets.QStyle.CC_Slider, + opt, + QtWidgets.QStyle.SC_SliderGroove, + self + ) + + _range = self.maximum() - self.minimum() + _offset = self.value() - self.minimum() + if horizontal: + _handle_half = rect.height() / 2 + _handle_size = _handle_half * 2 + width = rect.width() - _handle_size + pos_x = ((width / _range) * _offset) + pos_y = rect.center().y() - _handle_half + 1 + else: + _handle_half = rect.width() / 2 + _handle_size = _handle_half * 2 + height = rect.height() - _handle_size + pos_x = rect.center().x() - _handle_half + 1 + pos_y = height - ((height / _range) * _offset) + + handle_rect = QtCore.QRect( + pos_x, pos_y, _handle_size, _handle_size + ) + + self._handle_size = _handle_size + _offset = 2 + _size = _handle_size - _offset + if horizontal: + if rect.height() > _size: + new_rect = QtCore.QRect(0, 0, rect.width(), _size) + center_point = QtCore.QPoint( + rect.center().x(), handle_rect.center().y() + ) + new_rect.moveCenter(center_point) + rect = new_rect + + ratio = rect.height() / 2 + fill_rect = QtCore.QRect( + rect.x(), + rect.y(), + handle_rect.right() - rect.x(), + rect.height() + ) + + else: + if rect.width() > _size: + new_rect = QtCore.QRect(0, 0, _size, rect.height()) + center_point = QtCore.QPoint( + handle_rect.center().x(), rect.center().y() + ) + new_rect.moveCenter(center_point) + rect = new_rect + + ratio = rect.width() / 2 + fill_rect = QtCore.QRect( + rect.x(), + handle_rect.y(), + rect.width(), + rect.height() - handle_rect.y(), + ) + + painter.save() + painter.setPen(QtCore.Qt.NoPen) + painter.setBrush(self._bg_brush) + painter.drawRoundedRect(rect, ratio, ratio) + + painter.setBrush(self._fill_brush) + painter.drawRoundedRect(fill_rect, ratio, ratio) + + painter.setPen(QtCore.Qt.NoPen) + painter.setBrush(self._fill_brush) + painter.drawEllipse(handle_rect) + painter.restore() diff --git a/poetry.lock b/poetry.lock index e193325469..6dae442c9d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -80,7 +80,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.5.6" +version = "2.7.3" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -89,6 +89,7 @@ python-versions = "~=3.6" [package.dependencies] lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} wrapt = ">=1.11,<1.13" [[package]] @@ -146,11 +147,11 @@ pytz = ">=2015.7" [[package]] name = "blessed" -version = "1.18.0" +version = "1.18.1" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7" [package.dependencies] jinxed = {version = ">=0.5.4", markers = "platform_system == \"Windows\""} @@ -175,7 +176,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.14.6" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -192,6 +193,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "charset-normalizer" +version = "2.0.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "7.1.2" @@ -253,7 +265,7 @@ toml = ["toml"] [[package]] name = "cryptography" -version = "3.4.7" +version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -272,15 +284,20 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "cx-freeze" -version = "6.6" +version = "6.7" description = "Create standalone executables from Python scripts" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -cx-Logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} -importlib-metadata = ">=3.1.1" +cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} +importlib-metadata = ">=4.3.1" + +[package.source] +type = "legacy" +url = "https://distribute.openpype.io/wheels" +reference = "openpype" [[package]] name = "cx-logging" @@ -386,19 +403,19 @@ smmap = ">=3.0.1,<5" [[package]] name = "gitpython" -version = "3.1.17" +version = "3.1.20" description = "Python Git Library" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "google-api-core" -version = "1.30.0" +version = "1.31.2" description = "Google API client core library" category = "main" optional = false @@ -436,7 +453,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "1.31.0" +version = "1.35.0" description = "Google Authentication Library" category = "main" optional = false @@ -493,11 +510,11 @@ pyparsing = ">=2.4.2,<3" [[package]] name = "idna" -version = "2.10" +version = "3.2" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "imagesize" @@ -509,7 +526,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.5.0" +version = "4.8.1" description = "Read metadata from Python packages" category = "main" optional = false @@ -521,7 +538,8 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +perf = ["ipython"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -533,16 +551,17 @@ python-versions = "*" [[package]] name = "isort" -version = "5.8.0" +version = "5.9.3" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.6.1,<4.0" [package.extras] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] [[package]] name = "jedi" @@ -560,14 +579,15 @@ testing = ["colorama", "docopt", "pytest (>=3.1.0)"] [[package]] name = "jeepney" -version = "0.6.0" +version = "0.7.1" description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false python-versions = ">=3.6" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio"] +test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"] +trio = ["trio", "async-generator"] [[package]] name = "jinja2" @@ -695,11 +715,11 @@ reference = "openpype" [[package]] name = "packaging" -version = "20.9" +version = "21.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2" @@ -718,7 +738,7 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathlib2" -version = "2.3.5" +version = "2.3.6" description = "Object-oriented filesystem paths" category = "main" optional = false @@ -729,25 +749,38 @@ six = "*" [[package]] name = "pillow" -version = "8.2.0" +version = "8.3.2" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "platformdirs" +version = "2.3.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "prefixed" @@ -849,7 +882,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.9.0" +version = "2.10.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -857,22 +890,23 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.8.3" +version = "2.10.2" description = "python code static checker" category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] -astroid = "2.5.6" +astroid = ">=2.7.2,<2.8" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" +platformdirs = ">=2.2.0" toml = ">=0.7.1" [[package]] name = "pymongo" -version = "3.11.4" +version = "3.12.0" description = "Python driver for MongoDB " category = "main" optional = false @@ -880,9 +914,9 @@ python-versions = "*" [package.extras] aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongocrypt (<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] gssapi = ["pykerberos"] -ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] snappy = ["python-snappy"] srv = ["dnspython (>=1.16.0,<1.17.0)"] tls = ["ipaddress"] @@ -942,16 +976,44 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] -name = "pyrsistent" -version = "0.17.3" -description = "Persistent/Functional/Immutable data structures" +name = "pyqt5" +version = "5.15.4" +description = "Python bindings for the Qt cross platform application toolkit" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +PyQt5-Qt5 = ">=5.15" +PyQt5-sip = ">=12.8,<13" + +[[package]] +name = "pyqt5-qt5" +version = "5.15.2" +description = "The subset of a Qt installation needed by PyQt5." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pyqt5-sip" +version = "12.9.0" +description = "The sip module support for PyQt5" category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "pyrsistent" +version = "0.18.0" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "pytest" -version = "6.2.4" +version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -964,7 +1026,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0.0a1" +pluggy = ">=0.12,<2.0" py = ">=1.8.2" toml = "*" @@ -989,21 +1051,21 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "pytest-print" -version = "0.2.1" +version = "0.3.0" description = "pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -pytest = ">=3.0.0" +pytest = ">=6" [package.extras] -test = ["coverage (>=5)", "pytest (>=4)"] +test = ["coverage (>=5)"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -1014,7 +1076,7 @@ six = ">=1.5" [[package]] name = "python-xlib" -version = "0.30" +version = "0.31" description = "Python X Library" category = "main" optional = false @@ -1057,7 +1119,7 @@ python-versions = "*" [[package]] name = "qt.py" -version = "1.3.3" +version = "1.3.6" description = "Python 2 & 3 compatibility wrapper around all Qt bindings - PySide, PySide2, PyQt4 and PyQt5." category = "main" optional = false @@ -1078,21 +1140,21 @@ sphinx = ">=1.3.1" [[package]] name = "requests" -version = "2.25.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rsa" @@ -1135,15 +1197,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.6.0" +version = "3.10.1" description = "The Slack API Platform SDK for Python" category = "main" optional = false python-versions = ">=3.6.0" [package.extras] -optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=0.57,<1)"] -testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.5b1)", "psutil (>=5,<6)", "databases (>=0.3)"] +optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=1,<2)"] +testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.7b0)", "psutil (>=5,<6)", "databases (>=0.3)", "boto3 (<=2)", "moto (<2)"] [[package]] name = "smmap" @@ -1171,7 +1233,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "4.0.2" +version = "4.1.2" description = "Python documentation generator" category = "dev" optional = false @@ -1190,14 +1252,14 @@ requests = ">=2.5.0" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "docutils-stubs", "types-typed-ast", "types-pkg-resources", "types-requests"] test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] @@ -1339,7 +1401,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.0" +version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" optional = false @@ -1355,7 +1417,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.5" +version = "1.26.6" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1425,7 +1487,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.4.1" +version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -1433,12 +1495,12 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "70c5951f20ded8f10757ea030f7a99a49c1ea6773ad944f922533b692a3c5166" +content-hash = "ca2a0258a784674ff489a07d0dc8dd2a22373ee39add02cb4676898b8a6993a1" [metadata.files] acre = [] @@ -1502,8 +1564,8 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.5.6-py3-none-any.whl", hash = "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e"}, - {file = "astroid-2.5.6.tar.gz", hash = "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"}, + {file = "astroid-2.7.3-py3-none-any.whl", hash = "sha256:dc1e8b28427d6bbef6b8842b18765ab58f558c42bb80540bd7648c98412af25e"}, + {file = "astroid-2.7.3.tar.gz", hash = "sha256:3b680ce0419b8a771aba6190139a3998d14b413852506d99aff8dc2bf65ee67c"}, ] async-timeout = [ {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, @@ -1526,8 +1588,8 @@ babel = [ {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] blessed = [ - {file = "blessed-1.18.0-py2.py3-none-any.whl", hash = "sha256:5b5e2f0563d5a668c282f3f5946f7b1abb70c85829461900e607e74d7725106e"}, - {file = "blessed-1.18.0.tar.gz", hash = "sha256:1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755"}, + {file = "blessed-1.18.1-py2.py3-none-any.whl", hash = "sha256:dd7c0d33db9a2e7f597b446996484d0ed46e1586239db064fb5025008937dcae"}, + {file = "blessed-1.18.1.tar.gz", hash = "sha256:8b09936def6bc06583db99b65636b980075733e13550cb6af262ce724a55da23"}, ] cachetools = [ {file = "cachetools-4.2.2-py3-none-any.whl", hash = "sha256:2cc0b89715337ab6dbba85b5b50effe2b0c74e035d83ee8ed637cf52f12ae001"}, @@ -1538,60 +1600,60 @@ certifi = [ {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"}, + {file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"}, + {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, + {file = "cffi-1.14.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5"}, + {file = "cffi-1.14.6-cp35-cp35m-win32.whl", hash = "sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca"}, + {file = "cffi-1.14.6-cp35-cp35m-win_amd64.whl", hash = "sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218"}, + {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"}, + {file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"}, + {file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"}, + {file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"}, + {file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"}, + {file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"}, + {file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"}, + {file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"}, + {file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"}, + {file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"}, + {file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"}, + {file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"}, + {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, ] chardet = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, + {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, +] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, @@ -1667,30 +1729,25 @@ coverage = [ {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] cryptography = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, -] -cx-freeze = [ - {file = "cx_Freeze-6.6-cp36-cp36m-win32.whl", hash = "sha256:b3d3a6bcd1a07c50b4e1c907f14842642156110e63a99cd5c73b8a24751e9b97"}, - {file = "cx_Freeze-6.6-cp36-cp36m-win_amd64.whl", hash = "sha256:1935266ec644ea4f7e584985f44cefc0622a449a09980d990833a1a2afcadac8"}, - {file = "cx_Freeze-6.6-cp37-cp37m-win32.whl", hash = "sha256:1eac2b0f254319cc641ce25bd83337effd7936092562fde701f3ffb40e0274ec"}, - {file = "cx_Freeze-6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:2bc46ef6d510811b6002f34a3ae4cbfdea44e18644febd2a404d3ee8e48a9fc4"}, - {file = "cx_Freeze-6.6-cp38-cp38-win32.whl", hash = "sha256:46eb50ebc46f7ae236d16c6a52671ab0f7bb479bea668da19f4b6de3cc413e9e"}, - {file = "cx_Freeze-6.6-cp38-cp38-win_amd64.whl", hash = "sha256:8c3b00476ce385bb58595bffce55aed031e5a6e16ab6e14d8bee9d1d569e46c3"}, - {file = "cx_Freeze-6.6-cp39-cp39-win32.whl", hash = "sha256:6e9340cbcf52d4836980ecc83ddba4f7704ff6654dd41168c146b74f512977ce"}, - {file = "cx_Freeze-6.6-cp39-cp39-win_amd64.whl", hash = "sha256:2fcf1c8b77ae5c06f45be3a9aff79e1dd808c0d624e97561f840dec5ea9b214a"}, - {file = "cx_Freeze-6.6.tar.gz", hash = "sha256:c4af8ad3f7e7d71e291c1dec5d0fb26bbe92df834b098ed35434c901fbd6762f"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, ] +cx-freeze = [] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, {file = "cx_Logging-3.0-cp36-cp36m-win32.whl", hash = "sha256:0df4be47c5022cc54316949e283403214568ef599817ced0c0972183d6d4fabb"}, @@ -1737,20 +1794,20 @@ gitdb = [ {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ - {file = "GitPython-3.1.17-py3-none-any.whl", hash = "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135"}, - {file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"}, + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, ] google-api-core = [ - {file = "google-api-core-1.30.0.tar.gz", hash = "sha256:0724d354d394b3d763bc10dfee05807813c5210f0bd9b8e2ddf6b6925603411c"}, - {file = "google_api_core-1.30.0-py2.py3-none-any.whl", hash = "sha256:92cd9e9f366e84bfcf2524e34d2dc244906c645e731962617ba620da1620a1e0"}, + {file = "google-api-core-1.31.2.tar.gz", hash = "sha256:8500aded318fdb235130bf183c726a05a9cb7c4b09c266bd5119b86cdb8a4d10"}, + {file = "google_api_core-1.31.2-py2.py3-none-any.whl", hash = "sha256:384459a0dc98c1c8cd90b28dc5800b8705e0275a673a7144a513ae80fc77950b"}, ] google-api-python-client = [ {file = "google-api-python-client-1.12.8.tar.gz", hash = "sha256:f3b9684442eec2cfe9f9bb48e796ef919456b82142c7528c5fd527e5224f08bb"}, {file = "google_api_python_client-1.12.8-py2.py3-none-any.whl", hash = "sha256:3c4c4ca46b5c21196bec7ee93453443e477d82cbfa79234d1ce0645f81170eaf"}, ] google-auth = [ - {file = "google-auth-1.31.0.tar.gz", hash = "sha256:154f7889c5d679a6f626f36adb12afbd4dbb0a9a04ec575d989d6ba79c4fd65e"}, - {file = "google_auth-1.31.0-py2.py3-none-any.whl", hash = "sha256:6d47c79b5d09fbc7e8355fd9594cc4cf65fdde5d401c63951eaac4baa1ba2ae1"}, + {file = "google-auth-1.35.0.tar.gz", hash = "sha256:b7033be9028c188ee30200b204ea00ed82ea1162e8ac1df4aa6ded19a191d88e"}, + {file = "google_auth-1.35.0-py2.py3-none-any.whl", hash = "sha256:997516b42ecb5b63e8d80f5632c1a61dddf41d2a4c2748057837e06e00014258"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -1765,32 +1822,32 @@ httplib2 = [ {file = "httplib2-0.19.1.tar.gz", hash = "sha256:0b12617eeca7433d4c396a100eaecfa4b08ee99aa881e6df6e257a7aad5d533d"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, ] imagesize = [ {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, - {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, + {file = "importlib_metadata-4.8.1-py3-none-any.whl", hash = "sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15"}, + {file = "importlib_metadata-4.8.1.tar.gz", hash = "sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, - {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, + {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, + {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, ] jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] jeepney = [ - {file = "jeepney-0.6.0-py3-none-any.whl", hash = "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae"}, - {file = "jeepney-0.6.0.tar.gz", hash = "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657"}, + {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, + {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, ] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, @@ -1836,12 +1893,22 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1850,14 +1917,21 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1867,6 +1941,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1916,55 +1993,79 @@ multidict = [ ] opentimelineio = [] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, + {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, ] parso = [ {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, ] pathlib2 = [ - {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"}, - {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"}, + {file = "pathlib2-2.3.6-py2.py3-none-any.whl", hash = "sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8"}, + {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"}, ] pillow = [ - {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"}, - {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"}, - {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"}, - {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"}, - {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"}, - {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"}, - {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"}, - {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"}, - {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"}, - {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"}, - {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"}, - {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c691b26283c3a31594683217d746f1dad59a7ae1d4cfc24626d7a064a11197d4"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f514c2717012859ccb349c97862568fdc0479aad85b0270d6b5a6509dbc142e2"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be25cb93442c6d2f8702c599b51184bd3ccd83adebd08886b682173e09ef0c3f"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675a876b295afa114ca8bf42d7f86b5fb1298e1b6bb9a24405a3f6c8338811c"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59697568a0455764a094585b2551fd76bfd6b959c9f92d4bdec9d0e14616303a"}, + {file = "Pillow-8.3.2-cp310-cp310-win32.whl", hash = "sha256:2d5e9dc0bf1b5d9048a94c48d0813b6c96fccfa4ccf276d9c36308840f40c228"}, + {file = "Pillow-8.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:11c27e74bab423eb3c9232d97553111cc0be81b74b47165f07ebfdd29d825875"}, + {file = "Pillow-8.3.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:11eb7f98165d56042545c9e6db3ce394ed8b45089a67124298f0473b29cb60b2"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f23b2d3079522fdf3c09de6517f625f7a964f916c956527bed805ac043799b8"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19ec4cfe4b961edc249b0e04b5618666c23a83bc35842dea2bfd5dfa0157f81b"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a31c07cea5edbaeb4bdba6f2b87db7d3dc0f446f379d907e51cc70ea375629"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15ccb81a6ffc57ea0137f9f3ac2737ffa1d11f786244d719639df17476d399a7"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8f284dc1695caf71a74f24993b7c7473d77bc760be45f776a2c2f4e04c170550"}, + {file = "Pillow-8.3.2-cp36-cp36m-win32.whl", hash = "sha256:4abc247b31a98f29e5224f2d31ef15f86a71f79c7f4d2ac345a5d551d6393073"}, + {file = "Pillow-8.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a048dad5ed6ad1fad338c02c609b862dfaa921fcd065d747194a6805f91f2196"}, + {file = "Pillow-8.3.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:06d1adaa284696785375fa80a6a8eb309be722cf4ef8949518beb34487a3df71"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd24054aaf21e70a51e2a2a5ed1183560d3a69e6f9594a4bfe360a46f94eba83"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a330bf7014ee034046db43ccbb05c766aa9e70b8d6c5260bfc38d73103b0ba"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13654b521fb98abdecec105ea3fb5ba863d1548c9b58831dd5105bb3873569f1"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1bd983c565f92779be456ece2479840ec39d386007cd4ae83382646293d681b"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4326ea1e2722f3dc00ed77c36d3b5354b8fb7399fb59230249ea6d59cbed90da"}, + {file = "Pillow-8.3.2-cp37-cp37m-win32.whl", hash = "sha256:085a90a99404b859a4b6c3daa42afde17cb3ad3115e44a75f0d7b4a32f06a6c9"}, + {file = "Pillow-8.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:18a07a683805d32826c09acfce44a90bf474e6a66ce482b1c7fcd3757d588df3"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4e59e99fd680e2b8b11bbd463f3c9450ab799305d5f2bafb74fefba6ac058616"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d89a2e9219a526401015153c0e9dd48319ea6ab9fe3b066a20aa9aee23d9fd3"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fd98c8294f57636084f4b076b75f86c57b2a63a8410c0cd172bc93695ee979"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b11c9d310a3522b0fd3c35667914271f570576a0e387701f370eb39d45f08a4"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0412516dcc9de9b0a1e0ae25a280015809de8270f134cc2c1e32c4eeb397cf30"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bcb04ff12e79b28be6c9988f275e7ab69f01cc2ba319fb3114f87817bb7c74b6"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b9911ec70731711c3b6ebcde26caea620cbdd9dcb73c67b0730c8817f24711b"}, + {file = "Pillow-8.3.2-cp38-cp38-win32.whl", hash = "sha256:ce2e5e04bb86da6187f96d7bab3f93a7877830981b37f0287dd6479e27a10341"}, + {file = "Pillow-8.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35d27687f027ad25a8d0ef45dd5208ef044c588003cdcedf05afb00dbc5c2deb"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:04835e68ef12904bc3e1fd002b33eea0779320d4346082bd5b24bec12ad9c3e9"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10e00f7336780ca7d3653cf3ac26f068fa11b5a96894ea29a64d3dc4b810d630"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cde7a4d3687f21cffdf5bb171172070bb95e02af448c4c8b2f223d783214056"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c3ff00110835bdda2b1e2b07f4a2548a39744bb7de5946dc8e95517c4fb2ca6"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d409030bf3bd05fa66fb5fdedc39c521b397f61ad04309c90444e893d05f7d"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bff50ba9891be0a004ef48828e012babaaf7da204d81ab9be37480b9020a82b"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7dbfbc0020aa1d9bc1b0b8bcf255a7d73f4ad0336f8fd2533fcc54a4ccfb9441"}, + {file = "Pillow-8.3.2-cp39-cp39-win32.whl", hash = "sha256:963ebdc5365d748185fdb06daf2ac758116deecb2277ec5ae98139f93844bc09"}, + {file = "Pillow-8.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:cc9d0dec711c914ed500f1d0d3822868760954dce98dfb0b7382a854aee55d19"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2c661542c6f71dfd9dc82d9d29a8386287e82813b0375b3a02983feac69ef864"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:548794f99ff52a73a156771a0402f5e1c35285bd981046a502d7e4793e8facaa"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b68f565a4175e12e68ca900af8910e8fe48aaa48fd3ca853494f384e11c8bcd"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:838eb85de6d9307c19c655c726f8d13b8b646f144ca6b3771fa62b711ebf7624"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:feb5db446e96bfecfec078b943cc07744cc759893cef045aa8b8b6d6aaa8274e"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:fc0db32f7223b094964e71729c0361f93db43664dd1ec86d3df217853cedda87"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd4fd83aa912d7b89b4b4a1580d30e2a4242f3936882a3f433586e5ab97ed0d5"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0c8ebbfd439c37624db98f3877d9ed12c137cadd99dde2d2eae0dab0bbfc355"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cb3dd7f23b044b0737317f892d399f9e2f0b3a02b22b2c692851fb8120d82c6"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66566f8a22561fc1a88dc87606c69b84fa9ce724f99522cf922c801ec68f5c1"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ce651ca46d0202c302a535d3047c55a0131a720cf554a578fc1b8a2aff0e7d96"}, + {file = "Pillow-8.3.2.tar.gz", hash = "sha256:dde3f3ed8d00c72631bc19cbfff8ad3b6215062a5eed402381ad365f82f0c18c"}, +] +platformdirs = [ + {file = "platformdirs-2.3.0-py3-none-any.whl", hash = "sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648"}, + {file = "platformdirs-2.3.0.tar.gz", hash = "sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, @@ -1989,9 +2090,13 @@ protobuf = [ {file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"}, {file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"}, {file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"}, + {file = "protobuf-3.17.3-cp38-cp38-win32.whl", hash = "sha256:59e5cf6b737c3a376932fbfb869043415f7c16a0cf176ab30a5bbc419cd709c1"}, + {file = "protobuf-3.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ebcb546f10069b56dc2e3da35e003a02076aaa377caf8530fe9789570984a8d2"}, {file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"}, {file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"}, {file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"}, + {file = "protobuf-3.17.3-cp39-cp39-win32.whl", hash = "sha256:a38bac25f51c93e4be4092c88b2568b9f407c27217d3dd23c7a57fa522a17554"}, + {file = "protobuf-3.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:85d6303e4adade2827e43c2b54114d9a6ea547b671cb63fafd5011dc47d0e13d"}, {file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"}, {file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"}, ] @@ -2054,78 +2159,112 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, - {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, + {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, + {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pylint = [ - {file = "pylint-2.8.3-py3-none-any.whl", hash = "sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"}, - {file = "pylint-2.8.3.tar.gz", hash = "sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8"}, + {file = "pylint-2.10.2-py3-none-any.whl", hash = "sha256:e178e96b6ba171f8ef51fbce9ca30931e6acbea4a155074d80cc081596c9e852"}, + {file = "pylint-2.10.2.tar.gz", hash = "sha256:6758cce3ddbab60c52b57dcc07f0c5d779e5daf0cf50f6faacbef1d3ea62d2a1"}, ] pymongo = [ - {file = "pymongo-3.11.4-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:b7efc7e7049ef366777cfd35437c18a4166bb50a5606a1c840ee3b9624b54fc9"}, - {file = "pymongo-3.11.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:517ba47ca04a55b1f50ee8df9fd97f6c37df5537d118fb2718952b8623860466"}, - {file = "pymongo-3.11.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:225c61e08fe517aede7912937939e09adf086c8e6f7e40d4c85ad678c2c2aea3"}, - {file = "pymongo-3.11.4-cp27-cp27m-win32.whl", hash = "sha256:e4e9db78b71db2b1684ee4ecc3e32c4600f18cdf76e6b9ae03e338e52ee4b168"}, - {file = "pymongo-3.11.4-cp27-cp27m-win_amd64.whl", hash = "sha256:8e0004b0393d72d76de94b4792a006cb960c1c65c7659930fbf9a81ce4341982"}, - {file = "pymongo-3.11.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:fedf0dee7a412ca6d1d6d92c158fe9cbaa8ea0cae90d268f9ccc0744de7a97d0"}, - {file = "pymongo-3.11.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f947b359cc4769af8b49be7e37af01f05fcf15b401da2528021148e4a54426d1"}, - {file = "pymongo-3.11.4-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:3a3498a8326111221560e930f198b495ea6926937e249f475052ffc6893a6680"}, - {file = "pymongo-3.11.4-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:9a4f6e0b01df820ba9ed0b4e618ca83a1c089e48d4f268d0e00dcd49893d4549"}, - {file = "pymongo-3.11.4-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d65bac5f6724d9ea6f0b5a0f0e4952fbbf209adcf6b5583b54c54bd2fcd74dc0"}, - {file = "pymongo-3.11.4-cp34-cp34m-win32.whl", hash = "sha256:15b083d1b789b230e5ac284442d9ecb113c93f3785a6824f748befaab803b812"}, - {file = "pymongo-3.11.4-cp34-cp34m-win_amd64.whl", hash = "sha256:f08665d3cc5abc2f770f472a9b5f720a9b3ab0b8b3bb97c7c1487515e5653d39"}, - {file = "pymongo-3.11.4-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:977b1d4f868986b4ba5d03c317fde4d3b66e687d74473130cd598e3103db34fa"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:510cd3bfabb63a07405b7b79fae63127e34c118b7531a2cbbafc7a24fd878594"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:071552b065e809d24c5653fcc14968cfd6fde4e279408640d5ac58e3353a3c5f"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:f4ba58157e8ae33ee86fadf9062c506e535afd904f07f9be32731f4410a23b7f"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:b413117210fa6d92664c3d860571e8e8727c3e8f2ff197276c5d0cb365abd3ad"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:08b8723248730599c9803ae4c97b8f3f76c55219104303c88cb962a31e3bb5ee"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:8a41fdc751dc4707a4fafb111c442411816a7c225ebb5cadb57599534b5d5372"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:f664ed7613b8b18f0ce5696b146776266a038c19c5cd6efffa08ecc189b01b73"}, - {file = "pymongo-3.11.4-cp35-cp35m-win32.whl", hash = "sha256:5c36428cc4f7fae56354db7f46677fd21222fc3cb1e8829549b851172033e043"}, - {file = "pymongo-3.11.4-cp35-cp35m-win_amd64.whl", hash = "sha256:d0a70151d7de8a3194cdc906bcc1a42e14594787c64b0c1c9c975e5a2af3e251"}, - {file = "pymongo-3.11.4-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:9b9298964389c180a063a9e8bac8a80ed42de11d04166b20249bfa0a489e0e0f"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b2f41261b648cf5dee425f37ff14f4ad151c2f24b827052b402637158fd056ef"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e02beaab433fd1104b2804f909e694cfbdb6578020740a9051597adc1cd4e19f"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:8898f6699f740ca93a0879ed07d8e6db02d68af889d0ebb3d13ab017e6b1af1e"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:62c29bc36a6d9be68fe7b5aaf1e120b4aa66a958d1e146601fcd583eb12cae7b"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:424799c71ff435094e5fb823c40eebb4500f0e048133311e9c026467e8ccebac"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:3551912f5c34d8dd7c32c6bb00ae04192af47f7b9f653608f107d19c1a21a194"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:5db59223ed1e634d842a053325f85f908359c6dac9c8ddce8ef145061fae7df8"}, - {file = "pymongo-3.11.4-cp36-cp36m-win32.whl", hash = "sha256:fea5cb1c63efe1399f0812532c7cf65458d38fd011be350bc5021dfcac39fba8"}, - {file = "pymongo-3.11.4-cp36-cp36m-win_amd64.whl", hash = "sha256:d4e62417e89b717a7bcd8576ac3108cd063225942cc91c5b37ff5465fdccd386"}, - {file = "pymongo-3.11.4-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:4c7e8c8e1e1918dcf6a652ac4b9d87164587c26fd2ce5dd81e73a5ab3b3d492f"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38a7b5140a48fc91681cdb5cb95b7cd64640b43d19259fdd707fa9d5a715f2b2"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:aff3656af2add93f290731a6b8930b23b35c0c09569150130a58192b3ec6fc61"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:03be7ad107d252bb7325d4af6309fdd2c025d08854d35f0e7abc8bf048f4245e"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:6060794aac9f7b0644b299f46a9c6cbc0bc470bd01572f4134df140afd41ded6"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:73326b211e7410c8bd6a74500b1e3f392f39cf10862e243d00937e924f112c01"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:20d75ea11527331a2980ab04762a9d960bcfea9475c54bbeab777af880de61cd"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:3135dd574ef1286189f3f04a36c8b7a256376914f8cbbce66b94f13125ded858"}, - {file = "pymongo-3.11.4-cp37-cp37m-win32.whl", hash = "sha256:7c97554ea521f898753d9773891d0347ebfaddcc1dee2ad94850b163171bf1f1"}, - {file = "pymongo-3.11.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a08c8b322b671857c81f4c30cd3c8df2895fd3c0e9358714f39e0ef8fb327702"}, - {file = "pymongo-3.11.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3d851af3852f16ad4adc7ee054fd9c90a7a5063de94d815b7f6a88477b9f4c6"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3bfc7689a1bacb9bcd2f2d5185d99507aa29f667a58dd8adaa43b5a348139e46"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b8f94acd52e530a38f25e4d5bf7ddfdd4bea9193e718f58419def0d4406b58d3"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e4b631688dfbdd61b5610e20b64b99d25771c6d52d9da73349342d2a0f11c46a"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:474e21d0e07cd09679e357d1dac76e570dab86665e79a9d3354b10a279ac6fb3"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:421d13523d11c57f57f257152bc4a6bb463aadf7a3918e9c96fefdd6be8dbfb8"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:0cabfc297f4cf921f15bc789a8fbfd7115eb9f813d3f47a74b609894bc66ab0d"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:fe4189846448df013cd9df11bba38ddf78043f8c290a9f06430732a7a8601cce"}, - {file = "pymongo-3.11.4-cp38-cp38-win32.whl", hash = "sha256:eb4d176394c37a76e8b0afe54b12d58614a67a60a7f8c0dd3a5afbb013c01092"}, - {file = "pymongo-3.11.4-cp38-cp38-win_amd64.whl", hash = "sha256:fffff7bfb6799a763d3742c59c6ee7ffadda21abed557637bc44ed1080876484"}, - {file = "pymongo-3.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13acf6164ead81c9fc2afa0e1ea6d6134352973ce2bb35496834fee057063c04"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d360e5d5dd3d55bf5d1776964625018d85b937d1032bae1926dd52253decd0db"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0aaf4d44f1f819360f9432df538d54bbf850f18152f34e20337c01b828479171"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:08bda7b2c522ff9f1e554570da16298271ebb0c56ab9699446aacba249008988"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:1a994a42f49dab5b6287e499be7d3d2751776486229980d8857ad53b8333d469"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:161fcd3281c42f644aa8dec7753cca2af03ce654e17d76da4f0dab34a12480ca"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:78f07961f4f214ea8e80be63cffd5cc158eb06cd922ffbf6c7155b11728f28f9"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ad31f184dcd3271de26ab1f9c51574afb99e1b0e484ab1da3641256b723e4994"}, - {file = "pymongo-3.11.4-cp39-cp39-win32.whl", hash = "sha256:5e606846c049ed40940524057bfdf1105af6066688c0e6a1a3ce2038589bae70"}, - {file = "pymongo-3.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:3491c7de09e44eded16824cb58cf9b5cc1dc6f066a0bb7aa69929d02aa53b828"}, - {file = "pymongo-3.11.4-py2.7-macosx-10.14-intel.egg", hash = "sha256:506a6dab4c7ffdcacdf0b8e70bd20eb2e77fa994519547c9d88d676400fcad58"}, - {file = "pymongo-3.11.4.tar.gz", hash = "sha256:539d4cb1b16b57026999c53e5aab857fe706e70ae5310cc8c232479923f932e6"}, + {file = "pymongo-3.12.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:072ba7cb65c8aa4d5c5659bf6722ee85781c9d7816dc00679b8b6f3dff1ddafc"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d6e11ffd43184d529d6752d6dcb62b994f903038a17ea2168ef1910c96324d26"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7412a36798966624dc4c57d64aa43c2d1100b348abd98daaac8e99e57d87e1d7"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8a82e35d52ad6f867e88096a1a2b9bdc7ec4d5e65c7b4976a248bf2d1a32a93"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dcd3d0009fbb6e454d729f8b22d0063bd9171c31a55e0f0271119bd4f2700023"}, + {file = "pymongo-3.12.0-cp27-cp27m-win32.whl", hash = "sha256:1bc6fe7279ff40c6818db002bf5284aa03ec181ea1b1ceaeee33c289d412afa7"}, + {file = "pymongo-3.12.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e2b7670c0c8c6b501464150dd49dd0d6be6cb7f049e064124911cec5514fa19e"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:316c1b8723afa9870567cd6dff35d440b2afeda53aa13da6c5ab85f98ed6f5ca"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:255a35bf29185f44b412e31a927d9dcedda7c2c380127ecc4fbf2f61b72fa978"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ffbae429ba9e42d0582d3ac63fdb410338892468a2107d8ff68228ec9a39a0ed"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c188db6cf9e14dbbb42f5254292be96f05374a35e7dfa087cc2140f0ff4f10f6"}, + {file = "pymongo-3.12.0-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:6fb3f85870ae26896bb44e67db94045f2ebf00c5d41e6b66cdcbb5afd644fc18"}, + {file = "pymongo-3.12.0-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:aaa038eafb7186a4abbb311fcf20724be9363645882bbce540bef4797e812a7a"}, + {file = "pymongo-3.12.0-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:7d98ce3c42921bb91566121b658e0d9d59a9082a9bd6f473190607ff25ab637f"}, + {file = "pymongo-3.12.0-cp34-cp34m-win32.whl", hash = "sha256:b0a0cf39f589e52d801fdef418305562bc030cdf8929217463c8433c65fd5c2f"}, + {file = "pymongo-3.12.0-cp34-cp34m-win_amd64.whl", hash = "sha256:ceae3ab9e11a27aaab42878f1d203600dfd24f0e43678b47298219a0f10c0d30"}, + {file = "pymongo-3.12.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:5e574664f1468872cd40f74e4811e22b1aa4de9399d6bcfdf1ee6ea94c017fcf"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73b400fdc22de84bae0dbf1a22613928a41612ec0a3d6ed47caf7ad4d3d0f2ff"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:cbf8672edeb7b7128c4a939274801f0e32bbf5159987815e3d1eace625264a46"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:a634a4730ce0b0934ed75e45beba730968e12b4dafbb22f69b3b2f616d9e644e"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:c55782a55f4a013a78ac5b6ee4b8731a192dea7ab09f1b6b3044c96d5128edd4"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:11f9e0cfc84ade088a38df2708d0b958bb76360181df1b2e1e1a41beaa57952b"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:186104a94d39b8412f8e3de385acd990a628346a4402d4f3a288a82b8660bd22"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:70761fd3c576b027eec882b43ee0a8e5b22ff9c20cdf4d0400e104bc29e53e34"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:333bfad77aa9cd11711febfb75eed0bb537a1d022e1c252714dad38993590240"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fa8957e9a1b202cb45e6b839c241cd986c897be1e722b81d2f32e9c6aeee80b0"}, + {file = "pymongo-3.12.0-cp35-cp35m-win32.whl", hash = "sha256:4ba0def4abef058c0e5101e05e3d5266e6fffb9795bbf8be0fe912a7361a0209"}, + {file = "pymongo-3.12.0-cp35-cp35m-win_amd64.whl", hash = "sha256:a0e5dff6701fa615f165306e642709e1c1550d5b237c5a7a6ea299886828bd50"}, + {file = "pymongo-3.12.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:b542d56ed1b8d5cf3bb36326f814bd2fbe8812dfd2582b80a15689ea433c0e35"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a325600c83e61e3c9cebc0c2b1c8c4140fa887f789085075e8f44c8ff2547eb9"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:48d5bc80ab0af6b60c4163c5617f5cd23f2f880d7600940870ea5055816af024"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5cab230e7cabdae9ff23c12271231283efefb944c1b79bed79a91beb65ba547"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d73e10772152605f6648ba4410318594f1043bbfe36d2fadee7c4b8912eff7c5"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:b1c4874331ab960429caca81acb9d2932170d66d6d6f87e65dc4507a85aca152"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:a3566acfbcde46911c52810374ecc0354fdb841284a3efef6ff7105bc007e9a8"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:b3b5b3cbc3fdf4fcfa292529df2a85b5d9c7053913a739d3069af1e12e12219f"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd3854148005c808c485c754a184c71116372263709958b42aefbef2e5dd373a"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f55c1ddcc1f6050b07d468ce594f55dbf6107b459e16f735d26818d7be1e9538"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced944dcdd561476deef7cb7bfd4987c69fffbfeff6d02ca4d5d4fd592d559b7"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ecb8d42f50d393af912bfb1fb1dcc9aabe9967973efb49ee577e8f1cea494c"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1970cfe2aec1bf74b40cf30c130ad10cd968941694630386db33e1d044c22a2e"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8bf42d3b32f586f4c9e37541769993783a534ad35531ce8a4379f6fa664fba9"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc9ac81e73573516070d24ce15da91281922811f385645df32bd3c8a45ab4684"}, + {file = "pymongo-3.12.0-cp36-cp36m-win32.whl", hash = "sha256:d04ca462cb99077e6c059e97c072957caf2918e6e4191e3161c01c439e0193de"}, + {file = "pymongo-3.12.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f2acf9bbcd514e901f82c4ca6926bbd2ae61716728f110b4343eb0a69612d018"}, + {file = "pymongo-3.12.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b754240daafecd9d5fce426b0fbaaed03f4ebb130745c8a4ae9231fffb8d75e5"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:af586e85144023686fb0af09c8cdf672484ea182f352e7ceead3d832de381e1b"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fe5872ce6f9627deac8314bdffd3862624227c3de4c17ef0cc78bbf0402999eb"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f6977a520bd96e097c8a37a8cbb9faa1ea99d21bf84190195056e25f688af73d"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:2dbfbbded947a83a3dffc2bd1ec4750c17e40904692186e2c55a3ad314ca0222"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:a752ecd1a26000a6d67be7c9a2e93801994a8b3f866ac95b672fbc00225ca91a"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:1bab889ae7640eba739f67fcbf8eff252dddc60d4495e6ddd3a87cd9a95fdb52"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:f94c7d22fb36b184734dded7345a04ec5f95130421c775b8b0c65044ef073f34"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec5ca7c0007ce268048bbe0ffc6846ed1616cf3d8628b136e81d5e64ff3f52a2"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c72d08acdf573455b2b9d2b75b8237654841d63a48bc2327dc102c6ee89b75a"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6ea08758b6673610b3c5bdf47189286cf9c58b1077558706a2f6f8744922527"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d5ec90276f71af3a29917b30f2aec2315a2759b5f8d45b3b63a07ca8a070a3"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:625befa3bc9b40746a749115cc6a15bf20b9bd7597ca55d646205b479a2c99c7"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d1131562ddc2ea8a446f66c2648d7dabec2b3816fc818528eb978a75a6d23b2e"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eee42a1cc06565f6b21caa1f504ec15e07de7ebfd520ab57f8cb3308bc118e22"}, + {file = "pymongo-3.12.0-cp37-cp37m-win32.whl", hash = "sha256:94d38eba4d1b5eb3e6bfece0651b855a35c44f32fd91f512ab4ba41b8c0d3e66"}, + {file = "pymongo-3.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e018a4921657c2d3f89c720b7b90b9182e277178a04a7e9542cc79d7d787ca51"}, + {file = "pymongo-3.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c6a9948916a7bbcc6d3a9f6fb75db1acb5546078023bfb3db6efabcd5a67527"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e9faf8d4712d5ea301d74abfcf6dafe4b7f4af7936e91f283b0ad7bf69ed3e3a"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cc2894fe91f31a513860238ede69fe47fada21f9e7ddfe73f7f9fef93a971e41"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:053b4ebf91c7395d1fcd2ce6a9edff0024575b7b2de6781554a4114448a8adc9"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:39dafa2eaf577d1969f289dc9a44501859a1897eb45bd589e93ce843fc610800"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:246ec420e4c8744fceb4e259f906211b9c198e1f345e6158dcd7cbad3737e11e"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:208debdcf76ed39ebf24f38509f50dc1c100e31e8653817fedb8e1f867850a13"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:18290649759f9db660972442aa606f845c368db9b08c4c73770f6da14113569b"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657ad80de8ec9ed656f28844efc801a0802961e8c6a85038d97ff6f555ef4919"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b772bab31cbd9cb911e41e1a611ebc9497f9a32a7348e2747c38210f75c00f41"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2399a85b54f68008e483b2871f4a458b4c980469c7fe921595ede073e4844f1e"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e66780f14c2efaf989cd3ac613b03ee6a8e3a0ba7b96c0bb14adca71a427e55"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02dc0b0f48ed3cd06c13b7e31b066bf91e00dac5f8147b0a0a45f9009bfab857"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:070a4ef689c9438a999ec3830e69b208ff0d12251846e064d947f97d819d1d05"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:db93608a246da44d728842b8fa9e45aa9782db76955f634a707739a8d53ff544"}, + {file = "pymongo-3.12.0-cp38-cp38-win32.whl", hash = "sha256:5af390fa9faf56c93252dab09ea57cd020c9123aa921b63a0ed51832fdb492e7"}, + {file = "pymongo-3.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:a2239556ff7241584ce57be1facf25081669bb457a9e5cbe68cce4aae6567aa1"}, + {file = "pymongo-3.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cda9e628b1315beec8341e8c04aac9a0b910650b05e0751e42e399d5694aeacb"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:845a8b83798b2fb11b09928413cb32692866bfbc28830a433d9fa4c8c3720dd0"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:da8288bc4a7807c6715416deed1c57d94d5e03e93537889e002bf985be503f1a"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a9ba2a63777027b06b116e1ea8248e66fd1bedc2c644f93124b81a91ddbf6d88"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:9a13661681d17e43009bb3e85e837aa1ec5feeea1e3654682a01b8821940f8b3"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:6b89dc51206e4971c5568c797991eaaef5dc2a6118d67165858ad11752dba055"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:701e08457183da70ed96b35a6b43e6ba1df0b47c837b063cde39a1fbe1aeda81"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:e7a33322e08021c37e89cae8ff06327503e8a1719e97c69f32c31cbf6c30d72c"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd1f49f949a658c4e8f81ed73f9aad25fcc7d4f62f767f591e749e30038c4e1d"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6d055f01b83b1a4df8bb0c61983d3bdffa913764488910af3620e5c2450bf83"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd6ff2192f34bd622883c745a56f492b1c9ccd44e14953e8051c33024a2947d5"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19d4bd0fc29aa405bb1781456c9cfff9fceabb68543741eb17234952dbc2bbb0"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24f8aeec4d6b894a6128844e50ff423dd02462ee83addf503c598ee3a80ddf3d"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b6055e0ef451ff73c93d0348d122a0750dddf323b9361de5835dac2f6cf7fc1"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6261bee7c5abadeac7497f8f1c43e521da78dd13b0a2439f526a7b0fc3788824"}, + {file = "pymongo-3.12.0-cp39-cp39-win32.whl", hash = "sha256:2e92aa32300a0b5e4175caec7769f482b292769807024a86d674b3f19b8e3755"}, + {file = "pymongo-3.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ce83f17f641a62a4dfb0ba1b8a3c1ced7c842f511b5450d90c030c7828e3693"}, + {file = "pymongo-3.12.0-py2.7-macosx-10.14-intel.egg", hash = "sha256:d1740776b70367277323fafb76bcf09753a5cc9824f5d705bac22a34ff3668ea"}, + {file = "pymongo-3.12.0.tar.gz", hash = "sha256:b88d1742159bc93a078733f9789f563cef26f5e370eba810476a71aa98e5fbc2"}, ] pynput = [ {file = "pynput-1.7.3-py2.py3-none-any.whl", hash = "sha256:fea5777454f896bd79d35393088cd29a089f3b2da166f0848a922b1d5a807d4f"}, @@ -2160,28 +2299,80 @@ pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] +pyqt5 = [ + {file = "PyQt5-5.15.4-cp36.cp37.cp38.cp39-abi3-macosx_10_13_intel.whl", hash = "sha256:8c0848ba790a895801d5bfd171da31cad3e551dbcc4e59677a3b622de2ceca98"}, + {file = "PyQt5-5.15.4-cp36.cp37.cp38.cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:883a549382fc22d29a0568f3ef20b38c8e7ab633a59498ac4eb63a3bf36d3fd3"}, + {file = "PyQt5-5.15.4-cp36.cp37.cp38.cp39-none-win32.whl", hash = "sha256:a88526a271e846e44779bb9ad7a738c6d3c4a9d01e15a128ecfc6dd4696393b7"}, + {file = "PyQt5-5.15.4-cp36.cp37.cp38.cp39-none-win_amd64.whl", hash = "sha256:213bebd51821ed89b4d5b35bb10dbe67564228b3568f463a351a08e8b1677025"}, + {file = "PyQt5-5.15.4.tar.gz", hash = "sha256:2a69597e0dd11caabe75fae133feca66387819fc9bc050f547e5551bce97e5be"}, +] +pyqt5-qt5 = [ + {file = "PyQt5_Qt5-5.15.2-py3-none-macosx_10_13_intel.whl", hash = "sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win32.whl", hash = "sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win_amd64.whl", hash = "sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962"}, +] +pyqt5-sip = [ + {file = "PyQt5_sip-12.9.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d85002238b5180bce4b245c13d6face848faa1a7a9e5c6e292025004f2fd619a"}, + {file = "PyQt5_sip-12.9.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:83c3220b1ca36eb8623ba2eb3766637b19eb0ce9f42336ad8253656d32750c0a"}, + {file = "PyQt5_sip-12.9.0-cp36-cp36m-win32.whl", hash = "sha256:d8b2bdff7bbf45bc975c113a03b14fd669dc0c73e1327f02706666a7dd51a197"}, + {file = "PyQt5_sip-12.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:69a3ad4259172e2b1aa9060de211efac39ddd734a517b1924d9c6c0cc4f55f96"}, + {file = "PyQt5_sip-12.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42274a501ab4806d2c31659170db14c282b8313d2255458064666d9e70d96206"}, + {file = "PyQt5_sip-12.9.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a8701892a01a5a2a4720872361197cc80fdd5f49c8482d488ddf38c9c84f055"}, + {file = "PyQt5_sip-12.9.0-cp37-cp37m-win32.whl", hash = "sha256:ac57d796c78117eb39edd1d1d1aea90354651efac9d3590aac67fa4983f99f1f"}, + {file = "PyQt5_sip-12.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4347bd81d30c8e3181e553b3734f91658cfbdd8f1a19f254777f906870974e6d"}, + {file = "PyQt5_sip-12.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c446971c360a0a1030282a69375a08c78e8a61d568bfd6dab3dcc5cf8817f644"}, + {file = "PyQt5_sip-12.9.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fc43f2d7c438517ee33e929e8ae77132749c15909afab6aeece5fcf4147ffdb5"}, + {file = "PyQt5_sip-12.9.0-cp38-cp38-win32.whl", hash = "sha256:055581c6fed44ba4302b70eeb82e979ff70400037358908f251cd85cbb3dbd93"}, + {file = "PyQt5_sip-12.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:c5216403d4d8d857ec4a61f631d3945e44fa248aa2415e9ee9369ab7c8a4d0c7"}, + {file = "PyQt5_sip-12.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a25b9843c7da6a1608f310879c38e6434331aab1dc2fe6cb65c14f1ecf33780e"}, + {file = "PyQt5_sip-12.9.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:dd05c768c2b55ffe56a9d49ce6cc77cdf3d53dbfad935258a9e347cbfd9a5850"}, + {file = "PyQt5_sip-12.9.0-cp39-cp39-win32.whl", hash = "sha256:4f8e05fe01d54275877c59018d8e82dcdd0bc5696053a8b830eecea3ce806121"}, + {file = "PyQt5_sip-12.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:b09f4cd36a4831229fb77c424d89635fa937d97765ec90685e2f257e56a2685a"}, + {file = "PyQt5_sip-12.9.0.tar.gz", hash = "sha256:d3e4489d7c2b0ece9d203ae66e573939f7f60d4d29e089c9f11daa17cfeaae32"}, +] pyrsistent = [ - {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, + {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, + {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, ] pytest = [ - {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, - {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] pytest-cov = [ {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-print = [ - {file = "pytest_print-0.2.1-py2.py3-none-any.whl", hash = "sha256:2cfcdeee8b398457d3e3488f1fde5f8303b404c30187be5fcb4c7818df5f4529"}, - {file = "pytest_print-0.2.1.tar.gz", hash = "sha256:8f61e5bb2d031ee88d19a5a7695a0c863caee7b1478f1a82d080c2128b76ad83"}, + {file = "pytest_print-0.3.0-py2.py3-none-any.whl", hash = "sha256:53fb0f71d371f137ac2e7171d92f204eb45055580e8c7920df619d9b2ee45359"}, + {file = "pytest_print-0.3.0.tar.gz", hash = "sha256:769f1b1b0943b2941dbeeaac6985766e76b341130ed538f88c23ebcd7087b90d"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] python-xlib = [ - {file = "python-xlib-0.30.tar.gz", hash = "sha256:74131418faf9e7b83178c71d9d80297fbbd678abe99ae9258f5a20cd027acb5f"}, - {file = "python_xlib-0.30-py2.py3-none-any.whl", hash = "sha256:c4c92cd47e07588b2cbc7d52de18407b2902c3812d7cdec39cd2177b060828e2"}, + {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, + {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, ] python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, @@ -2207,16 +2398,16 @@ pywin32-ctypes = [ {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] "qt.py" = [ - {file = "Qt.py-1.3.3-py2.py3-none-any.whl", hash = "sha256:9e3f5417187c98d246918a9b27a9e1f8055e089bdb2b063a2739986bc19a3d2e"}, - {file = "Qt.py-1.3.3.tar.gz", hash = "sha256:601606127f70be9adc82c248d209d696cccbd1df242c24d3fb1a9e399f3ecaf1"}, + {file = "Qt.py-1.3.6-py2.py3-none-any.whl", hash = "sha256:7edf6048d07a6924707506b5ba34a6e05d66dde9a3f4e3a62f9996ccab0b91c7"}, + {file = "Qt.py-1.3.6.tar.gz", hash = "sha256:0d78656a2f814602eee304521c7bf5da0cec414818b3833712c77524294c404a"}, ] recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] rsa = [ {file = "rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2"}, @@ -2235,8 +2426,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] slack-sdk = [ - {file = "slack_sdk-3.6.0-py2.py3-none-any.whl", hash = "sha256:e1b257923a1ef88b8620dd3abff94dc5b3eee16ef37975d101ba9e60123ac3af"}, - {file = "slack_sdk-3.6.0.tar.gz", hash = "sha256:195f044e02a2844579a7a26818ce323e85dde8de224730c859644918d793399e"}, + {file = "slack_sdk-3.10.1-py2.py3-none-any.whl", hash = "sha256:f17b71a578e94204d9033bffded634475f4ca0a6274c6c7a4fd8a9cb0ac7cd8b"}, + {file = "slack_sdk-3.10.1.tar.gz", hash = "sha256:2b4dde7728eb4ff5a581025d204578ccff25a5d8f0fe11ae175e3ce6e074434f"}, ] smmap = [ {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, @@ -2251,8 +2442,8 @@ speedcopy = [ {file = "speedcopy-2.1.0.tar.gz", hash = "sha256:8bb1a6c735900b83901a7be84ba2175ed3887c13c6786f97dea48f2ea7d504c2"}, ] sphinx = [ - {file = "Sphinx-4.0.2-py3-none-any.whl", hash = "sha256:d1cb10bee9c4231f1700ec2e24a91be3f3a3aba066ea4ca9f3bbe47e59d5a1d4"}, - {file = "Sphinx-4.0.2.tar.gz", hash = "sha256:b5c2ae4120bf00c799ba9b3699bc895816d272d120080fbc967292f29b52b48c"}, + {file = "Sphinx-4.1.2-py3-none-any.whl", hash = "sha256:46d52c6cee13fec44744b8c01ed692c18a640f6910a725cbb938bc36e8d64544"}, + {file = "Sphinx-4.1.2.tar.gz", hash = "sha256:3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13"}, ] sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.3-py3-none-any.whl", hash = "sha256:bee247cb9e4fc03fc496d07adfdb943100e1103320c3e5e820e0cfa7c790d9b6"}, @@ -2330,17 +2521,17 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, - {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, + {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, + {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2397,6 +2588,6 @@ yarl = [ {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, + {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] diff --git a/pyproject.toml b/pyproject.toml index 1e797130db..9cbf4e5383 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ slack-sdk = "^3.6.0" flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "^6.6" +cx_freeze = { version = "6.7", source = "openpype" } GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" diff --git a/repos/avalon-core b/repos/avalon-core index 82d5b8137e..f48fce09c0 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 82d5b8137eea3b49d4781a4af51d7f375bb9f628 +Subproject commit f48fce09c0986c1fd7f6731de33907be46b436c5 diff --git a/start.py b/start.py index 2b475e07ac..c740f68b61 100644 --- a/start.py +++ b/start.py @@ -183,6 +183,12 @@ else: ssl_cert_file = certifi.where() os.environ["SSL_CERT_FILE"] = ssl_cert_file +if "--headless" in sys.argv: + os.environ["OPENPYPE_HEADLESS_MODE"] = "1" + sys.argv.remove("--headless") +else: + if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 @@ -347,7 +353,7 @@ def _process_arguments() -> tuple: # check for `--use-version=3.0.0` argument and `--use-staging` use_version = None use_staging = False - print_versions = False + commands = [] for arg in sys.argv: if arg == "--use-version": _print("!!! Please use option --use-version like:") @@ -370,17 +376,38 @@ def _process_arguments() -> tuple: " proper version string.")) sys.exit(1) + if arg == "--validate-version": + _print("!!! Please use option --validate-version like:") + _print(" --validate-version=3.0.0") + sys.exit(1) + + if arg.startswith("--validate-version="): + m = re.search( + r"--validate-version=(?P\d+\.\d+\.\d+(?:\S*)?)", arg) + if m and m.group('version'): + use_version = m.group('version') + sys.argv.remove(arg) + commands.append("validate") + else: + _print("!!! Requested version isn't in correct format.") + _print((" Use --list-versions to find out" + " proper version string.")) + sys.exit(1) + if "--use-staging" in sys.argv: use_staging = True sys.argv.remove("--use-staging") if "--list-versions" in sys.argv: - print_versions = True + commands.append("print_versions") sys.argv.remove("--list-versions") # handle igniter # this is helper to run igniter before anything else if "igniter" in sys.argv: + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": + _print("!!! Cannot open Igniter dialog in headless mode.") + sys.exit(1) import igniter return_code = igniter.open_dialog() @@ -393,7 +420,7 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging, print_versions + return use_version, use_staging, commands def _determine_mongodb() -> str: @@ -428,6 +455,11 @@ def _determine_mongodb() -> str: if not openpype_mongo: _print("*** No DB connection string specified.") + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": + _print("!!! Cannot open Igniter dialog in headless mode.") + _print( + "!!! Please use `OPENPYPE_MONGO` to specify server address.") + sys.exit(1) _print("--- launching setup UI ...") result = igniter.open_dialog() @@ -531,6 +563,9 @@ def _find_frozen_openpype(use_version: str = None, except IndexError: # no OpenPype version found, run Igniter and ask for them. _print('*** No OpenPype versions found.') + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": + _print("!!! Cannot open Igniter dialog in headless mode.") + sys.exit(1) _print("--- launching setup UI ...") import igniter return_code = igniter.open_dialog() @@ -594,8 +629,16 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - version_path = bootstrap.install_version( - openpype_version, force=True) + if os.getenv("OPENPYPE_HEADLESS_MODE", "0") != "1": + import igniter + version_path = igniter.open_update_window(openpype_version) + else: + version_path = bootstrap.install_version( + openpype_version, force=True) + + openpype_version.path = version_path + _initialize_environment(openpype_version) + return openpype_version.path if openpype_version.path.is_file(): _print(">>> Extracting zip file ...") @@ -742,7 +785,7 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, print_versions = _process_arguments() + use_version, use_staging, commands = _process_arguments() if os.getenv("OPENPYPE_VERSION"): if use_version: @@ -770,13 +813,47 @@ def boot(): # Get openpype path from database and set it to environment so openpype can # find its versions there and bootstrap them. openpype_path = get_openpype_path_from_db(openpype_mongo) + + if getattr(sys, 'frozen', False): + local_version = bootstrap.get_version(Path(OPENPYPE_ROOT)) + else: + local_version = bootstrap.get_local_live_version() + + if "validate" in commands: + _print(f">>> Validating version [ {use_version} ]") + openpype_versions = bootstrap.find_openpype(include_zips=True, + staging=True) + openpype_versions += bootstrap.find_openpype(include_zips=True, + staging=False) + v: OpenPypeVersion + found = [v for v in openpype_versions if str(v) == use_version] + if not found: + _print(f"!!! Version [ {use_version} ] not found.") + list_versions(openpype_versions, local_version) + sys.exit(1) + + # print result + result = bootstrap.validate_openpype_version( + bootstrap.get_version_path_from_list( + use_version, openpype_versions)) + + _print("{}{}".format( + ">>> " if result[0] else "!!! ", + bootstrap.validate_openpype_version( + bootstrap.get_version_path_from_list( + use_version, openpype_versions) + )[1]) + ) + sys.exit(1) + + if not openpype_path: _print("*** Cannot get OpenPype path from database.") if not os.getenv("OPENPYPE_PATH") and openpype_path: os.environ["OPENPYPE_PATH"] = openpype_path - if print_versions: + if "print_versions" in commands: if not use_staging: _print("--- This will list only non-staging versions detected.") _print(" To see staging versions, use --use-staging argument.") @@ -807,6 +884,13 @@ def boot(): # no version to run _print(f"!!! {e}") sys.exit(1) + # validate version + _print(f">>> Validating version [ {str(version_path)} ]") + result = bootstrap.validate_openpype_version(version_path) + if not result[0]: + _print(f"!!! Invalid version: {result[1]}") + sys.exit(1) + _print(f"--- version is valid") else: version_path = _bootstrap_from_code(use_version, use_staging) diff --git a/tools/build.sh b/tools/build.sh index 4f352b0157..bc79f03db7 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -90,6 +90,7 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } local version_command version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version @@ -173,7 +174,7 @@ main () { else echo -e "${BIYellow}NOT FOUND${RST}" echo -e "${BIYellow}***${RST} We need to install Poetry and virtual env ..." - . "$openpype_root/tools/create_env.sh" || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return; } + . "$openpype_root/tools/create_env.sh" || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } fi if [ "$disable_submodule_update" == 1 ]; then @@ -184,9 +185,9 @@ if [ "$disable_submodule_update" == 1 ]; then fi echo -e "${BIGreen}>>>${RST} Building ..." if [[ "$OSTYPE" == "linux-gnu"* ]]; then - "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build &> "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; } elif [[ "$OSTYPE" == "darwin"* ]]; then - "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac &> "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; } fi "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" @@ -210,5 +211,6 @@ if [ "$disable_submodule_update" == 1 ]; then echo -e "${BIWhite}$openpype_root/build${RST} directory." } -main -exit $? +return_code=0 +main || return_code=$? +exit $return_code diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index 3898450471..e5a430e220 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -135,6 +135,16 @@ progress_bar.close() # iterate over frozen libs and create list to delete libs_dir = build_dir / "lib" +# On Windows "python3.dll" is needed for PyQt5 from the build. +if platform.system().lower() == "windows": + src = Path(libs_dir / "PyQt5" / "python3.dll") + dst = Path(deps_dir / "PyQt5" / "python3.dll") + if src.exists(): + shutil.copyfile(src, dst) + else: + _print("Could not find {}".format(src), 1) + sys.exit(1) + to_delete = [] # _print("Finding duplicates ...") deps_items = list(deps_dir.iterdir()) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 436551c243..3c1aaae991 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -36,7 +36,7 @@ def get_log_since_tag(version): def release_type(log): regex_minor = ["feature/", "(feat)"] - regex_patch = ["bugfix/", "fix/", "(fix)"] + regex_patch = ["bugfix/", "fix/", "(fix)", "enhancement/"] for reg in regex_minor: if re.search(reg, log): return "minor" diff --git a/tools/create_env.sh b/tools/create_env.sh index ed4f43e5f6..4ed6412c43 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -88,6 +88,7 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version="$(python <<< ${version_command})" oIFS="$IFS" @@ -166,7 +167,7 @@ main () { echo -e "${BIGreen}OK${RST}" else echo -e "${BIYellow}NOT FOUND${RST}" - install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return; } + install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } fi if [ -f "$openpype_root/poetry.lock" ]; then @@ -175,7 +176,11 @@ main () { echo -e "${BIGreen}>>>${RST} Installing dependencies ..." fi - "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity --ansi || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } + "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return 1; } + if [ $? -ne 0 ] ; then + echo -e "${BIRed}!!!${RST} Virtual environment creation failed." + return 1 + fi echo -e "${BIGreen}>>>${RST} Cleaning cache files ..." clean_pyc @@ -184,10 +189,11 @@ main () { # cx_freeze will crash on missing __pychache__ on these but # reinstalling them solves the problem. echo -e "${BIGreen}>>>${RST} Fixing pycache bug ..." - "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip - "$POETRY_HOME/bin/poetry" run pip install --force-reinstall setuptools - "$POETRY_HOME/bin/poetry" run pip install --force-reinstall wheel - "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip + "$POETRY_HOME/bin/poetry" run pip install --disable-pip-version-check --force-reinstall setuptools + "$POETRY_HOME/bin/poetry" run pip install --disable-pip-version-check --force-reinstall wheel + "$POETRY_HOME/bin/poetry" run python -m pip install --disable-pip-version-check --force-reinstall pip } -main -3 +return_code=0 +main || return_code=$? +exit $return_code diff --git a/tools/docker_build.sh b/tools/docker_build.sh index a6df2a099e..04c26424eb 100755 --- a/tools/docker_build.sh +++ b/tools/docker_build.sh @@ -20,6 +20,41 @@ realpath () { echo $(cd $(dirname "$1"); pwd)/$(basename "$1") } +create_container () { + if [ ! -f "$openpype_root/build/docker-image.id" ]; then + echo -e "${BIRed}!!!${RST} Docker command failed, cannot find image id." + exit 1 + fi + local id=$(<"$openpype_root/build/docker-image.id") + echo -e "${BIYellow}---${RST} Creating container from $id ..." + cid="$(docker create $id bash)" + if [ $? -ne 0 ] ; then + echo -e "${BIRed}!!!${RST} Cannot create container." + exit 1 + fi +} + +retrieve_build_log () { + create_container + echo -e "${BIYellow}***${RST} Copying build log to ${BIWhite}$openpype_root/build/build.log${RST}" + docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" +} + +openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) + + +if [ -z $1 ]; then + dockerfile="Dockerfile" +else + dockerfile="Dockerfile.$1" + if [ ! -f "$openpype_root/$dockerfile" ]; then + echo -e "${BIRed}!!!${RST} Dockerfile for specifed platform ${BIWhite}$1${RST} doesn't exist." + exit 1 + else + echo -e "${BIGreen}>>>${RST} Using Dockerfile for ${BIWhite}$1${RST} ..." + fi +fi + # Main main () { openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) @@ -28,36 +63,35 @@ main () { echo -e "${BIYellow}---${RST} Cleaning build directory ..." rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null - version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);" - openpype_version="$(python3 <<< ${version_command})" + local version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);" + local openpype_version="$(python3 <<< ${version_command})" echo -e "${BIGreen}>>>${RST} Running docker build ..." - docker build --pull -t pypeclub/openpype:$openpype_version . + # docker build --pull --no-cache -t pypeclub/openpype:$openpype_version . + docker build --pull --iidfile $openpype_root/build/docker-image.id -t pypeclub/openpype:$openpype_version -f $dockerfile . if [ $? -ne 0 ] ; then + echo $? echo -e "${BIRed}!!!${RST} Docker build failed." + retrieve_build_log return 1 fi echo -e "${BIGreen}>>>${RST} Copying build from container ..." - echo -e "${BIYellow}---${RST} Creating container from pypeclub/openpype:$openpype_version ..." - id="$(docker create -ti pypeclub/openpype:$openpype_version bash)" - if [ $? -ne 0 ] ; then - echo -e "${BIRed}!!!${RST} Cannot create just built container." - return 1 - fi + create_container echo -e "${BIYellow}---${RST} Copying ..." - docker cp "$id:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" + docker cp "$cid:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" + docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" if [ $? -ne 0 ] ; then echo -e "${BIRed}!!!${RST} Copying failed." return 1 fi echo -e "${BIGreen}>>>${RST} Fixing user ownership ..." - username="$(logname)" + local username="$(logname)" chown -R $username ./build echo -e "${BIGreen}>>>${RST} All done, you can delete container:" - echo -e "${BIYellow}$id${RST}" + echo -e "${BIYellow}$cid${RST}" } return_code=0 diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 1a91e2e7fe..d6ccc883b0 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -18,11 +18,14 @@ Running OpenPype without any commands will default to `tray`. ```shell openpype_console --use-version=3.0.0-foo+bar ``` +`--headless` - to run OpenPype in headless mode (without using graphical UI) `--use-staging` - to use staging versions of OpenPype. `--list-versions [--use-staging]` - to list available versions. +`--validate-version` to validate integrity of given version + For more information [see here](admin_use#run-openpype). ## Commands diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 4ad08a0174..178241ad19 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -56,6 +56,19 @@ openpype_console --list-versions You can add `--use-staging` to list staging versions. ::: +If you want to validate integrity of some available version, you can use: + +```shell +openpype_console --validate-version=3.3.0 +``` + +This will go through the version and validate file content against sha 256 hashes +stored in `checksums` file. + +:::tip Headless mode +Add `--headless` to run OpenPype without graphical UI (useful on server or on automated tasks, etc.) +::: + ### Details When you run OpenPype from executable, few check are made: diff --git a/website/docs/admin_webserver_for_webpublisher.md b/website/docs/admin_webserver_for_webpublisher.md new file mode 100644 index 0000000000..6e72ccaf32 --- /dev/null +++ b/website/docs/admin_webserver_for_webpublisher.md @@ -0,0 +1,83 @@ +--- +id: admin_webserver_for_webpublisher +title: Webserver for webpublisher +sidebar_label: Webserver for webpublisher +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Running Openpype webserver is needed as a backend part for Web publishing. +Any OS supported by Openpype could be used as a host server. + +Webpublishing consists of two sides, Front end (FE) and Openpype backend. This documenation is only targeted on OP side. + +It is expected that FE and OP will live on two separate servers, FE publicly available, OP safely in customer network. + +# Requirements for servers +- OP server allows access to its `8079` port for FE. (It is recommended to whitelist only FE IP.) +- have shared folder for published resources (images, workfiles etc) on both servers + +# Prepare Ftrack +Current webpublish process expects authentication via Slack. It is expected that customer has users created on a Ftrack +with same email addresses as on Slack. As some customer might have usernames different from emails, conversion from email to username is needed. + +For this "pype.club" user needs to be present on Ftrack, creation of this user should be standard part of Ftrack preparation for Openpype. +Next create API key on Ftrack, store this information temporarily as you won't have access to this key after creation. + + +# Prepare Openpype + +Deploy OP build distribution (Openpype Igniter) on an OS of your choice. + +##Run webserver as a Linux service: + +(This expects that OP Igniter is deployed to `opt/openpype` and log should be stored in `/tmp/openpype.log`) + +- create file `sudo vi /opt/openpype/webpublisher_webserver.sh` + +- paste content +```sh +#!/usr/bin/env bash +export OPENPYPE_DEBUG=3 +export FTRACK_BOT_API_USER=YOUR_API_USER +export FTRACK_BOT_API_KEY=YOUR_API_KEY +export PYTHONDONTWRITEBYTECODE=1 +export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION + +pushd /opt/openpype +./openpype_console webpublisherwebserver --upload_dir YOUR_SHARED_FOLDER_ON_HOST --executable /opt/openpype/openpype_console --host YOUR_HOST_IP --port YOUR_HOST_PORT > /tmp/openpype.log 2>&1 +``` + +1. create service file `sudo vi /etc/systemd/system/openpye-webserver.service` + +2. paste content +```sh +[Unit] +Description=Run OpenPype Ftrack Webserver Service +After=network.target + +[Service] +Type=idle +ExecStart=/opt/openpype/webpublisher_webserver.sh +Restart=on-failure +RestartSec=10s +StandardOutput=append:/tmp/openpype.log +StandardError=append:/tmp/openpype.log + +[Install] +WantedBy=multi-user.target +``` + +5. change file permission: + `sudo chmod 0755 /etc/systemd/system/openpype-webserver.service` + +6. enable service: + `sudo systemctl enable openpype-webserver` + +7. start service: + `sudo systemctl start openpype-webserver` + +8. Check `/tmp/openpype.log` if OP got started + +(Note: service could be restarted by `service openpype-webserver restart` - this will result in purge of current log file!) \ No newline at end of file diff --git a/website/docs/artist_hosts_houdini.md b/website/docs/artist_hosts_houdini.md new file mode 100644 index 0000000000..d2aadf05cb --- /dev/null +++ b/website/docs/artist_hosts_houdini.md @@ -0,0 +1,78 @@ +--- +id: artist_hosts_houdini +title: Houdini +sidebar_label: Houdini +--- + +## OpenPype global tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Publishing Alembic Cameras +You can publish baked camera in Alembic format. Select your camera and go **OpenPype -> Create** and select **Camera (abc)**. +This will create Alembic ROP in **out** with path and frame range already set. This node will have a name you've +assigned in the **Creator** menu. For example if you name the subset `Default`, output Alembic Driver will be named +`cameraDefault`. After that, you can **OpenPype -> Publish** and after some validations your camera will be published +to `abc` file. + +## Publishing Composites - Image Sequences +You can publish image sequence directly from Houdini. You can use any `cop` network you have and publish image +sequence generated from it. For example I've created simple **cop** graph to generate some noise: +![Noise COP](assets/houdini_imagesequence_cop.png) + +If I want to publish it, I'll select node I like - in this case `radialblur1` and go **OpenPype -> Create** and +select **Composite (Image Sequence)**. This will create `/out/imagesequenceNoise` Composite ROP (I've named my subset +*Noise*) with frame range set. When you hit **Publish** it will render image sequence from selected node. + +## Publishing Point Caches (alembic) +Publishing point caches in alembic format is pretty straightforward, but it is by default enforcing better compatibility +with other DCCs, so it needs data do be exported prepared in certain way. You need to add `path` attribute so objects +in alembic are better structured. When using alembic round trip in Houdini (loading alembics, modifying then and +then publishing modifications), `path` is automatically resolved by alembic nodes. + +In this example, I've created this node graph on **sop** level, and I want to publish it as point cache. + +![Pointcache setup](assets/houdini_pointcache_path.png) + +*Note: `connectivity` will add index for each primitive and `primitivewrangle1` will add `path` attribute, so it will +be for each primitive (`sphere1` and `sphere2`) as Maya is expecting - `strange_GRP/strange0_GEO/strange0_GEOShape`. How +you handle `path` attribute is up to you, this is just an example.* + +Now select the `output0` node and go **OpenPype -> Create** and select **Point Cache**. It will create +Alembic ROP `/out/pointcacheStrange` + + +## Redshift +:::note Work in progress +This part of documentation is still work in progress. +::: + +## USD (experimental support) +### Publishing USD +You can publish your Solaris Stage as USD file. +![Solaris USD](assets/houdini_usd_stage.png) + +This is very simple test stage. I've selected `output` **lop** node and went to **OpenPype -> Create** where I've +selected **USD**. This created `/out/usdDefault` USD ROP node. + +### Publishing USD render + +USD Render works in similar manner as USD file, except it will create **USD Render** ROP node in out and will publish +images produced by it. If you have selected node in Solaris Stage it will by added as **lop path** to ROP. + +## Publishing VDB + +Publishing VDB files works as with other data types. In this example I've created simple PyroFX explosion from +sphere. In `pyro_import` I've converted the volume to VDB: + +![VDB Setup](assets/houdini_vdb_setup.png) + +I've selected `vdb1` and went **OpenPype -> Create** and selected **VDB Cache**. This will create +geometry ROP in `/out` and sets its paths to output vdb files. During the publishing process +whole dops are cooked. + diff --git a/website/docs/assets/houdini_imagesequence_cop.png b/website/docs/assets/houdini_imagesequence_cop.png new file mode 100644 index 0000000000..54ed5977b9 Binary files /dev/null and b/website/docs/assets/houdini_imagesequence_cop.png differ diff --git a/website/docs/assets/houdini_pointcache_path.png b/website/docs/assets/houdini_pointcache_path.png new file mode 100644 index 0000000000..3687a9c0dd Binary files /dev/null and b/website/docs/assets/houdini_pointcache_path.png differ diff --git a/website/docs/assets/houdini_usd_stage.png b/website/docs/assets/houdini_usd_stage.png new file mode 100644 index 0000000000..cba9428604 Binary files /dev/null and b/website/docs/assets/houdini_usd_stage.png differ diff --git a/website/docs/assets/houdini_vdb_setup.png b/website/docs/assets/houdini_vdb_setup.png new file mode 100644 index 0000000000..e27e0b6c36 Binary files /dev/null and b/website/docs/assets/houdini_vdb_setup.png differ diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index b3e0c24fc2..f71118eba6 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -84,6 +84,13 @@ You can use Docker to build OpenPype. Just run: ```shell $ sudo ./tools/docker_build.sh ``` + +This will by default use Debian as base image. If you need to make Centos 7 compatible build, please run: + +```sh +sudo ./tools/docker_build.sh centos7 +``` + and you should have built OpenPype in `build` directory. It is using **Centos 7** as a base image. @@ -323,14 +330,18 @@ Same as: poetry run python ./tools/create_zip.py ``` -### docker_build.sh +### docker_build.sh *[variant]* Script to build OpenPype on [Docker](https://www.docker.com/) enabled systems - usually Linux and Windows with [Docker Desktop](https://docs.docker.com/docker-for-windows/install/) and [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/about) (WSL) installed. It must be run with administrative privileges - `sudo ./docker_build.sh`. -It will use **Centos 7** base image to build OpenPype. You'll see your build in `./build` folder. +It will use latest **Debian** base image to build OpenPype. If you need to build OpenPype for +older systems like Centos 7, use `centos7` as argument. This will use another Dockerfile to build +OpenPype with **Centos 7** as base image. + +You'll see your build in `./build` folder. ### fetch_thirdparty_libs This script will download necessary tools for OpenPype defined in `pyproject.toml` like FFMpeg, diff --git a/website/sidebars.js b/website/sidebars.js index 488814a385..3a4b933b9a 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -22,6 +22,7 @@ module.exports = { "artist_hosts_maya", "artist_hosts_blender", "artist_hosts_harmony", + "artist_hosts_houdini", "artist_hosts_aftereffects", "artist_hosts_resolve", "artist_hosts_photoshop", diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 6a233ddb66..00cf002aec 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -120,7 +120,12 @@ const studios = [ title: "Bad Clay", image: "/img/badClay_logo.png", infoLink: "https://www.bad-clay.com/", - } + }, + { + title: "Moonrock Animation Studio", + image: "/img/moonrock_logo.png", + infoLink: "https://www.moonrock.eu/", + } ]; function Service({imageUrl, title, description}) { diff --git a/website/static/img/moonrock_logo.png b/website/static/img/moonrock_logo.png new file mode 100644 index 0000000000..249db7c247 Binary files /dev/null and b/website/static/img/moonrock_logo.png differ diff --git a/website/yarn.lock b/website/yarn.lock index a63bf37731..b4c12edeb6 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -6168,9 +6168,9 @@ path-key@^3.0.0, path-key@^3.1.0: integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" @@ -8341,9 +8341,9 @@ url-parse-lax@^3.0.0: prepend-http "^2.0.0" url-parse@^1.4.3, url-parse@^1.4.7: - version "1.5.1" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.1.tgz#d5fa9890af8a5e1f274a2c98376510f6425f6e3b" - integrity sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q== + version "1.5.3" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" + integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0"