From d5fccbbf7d41723b96201ba1340ae83f155588ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 20 Jul 2022 19:02:55 +0200 Subject: [PATCH 001/480] :package: update to python 3.9 and update dependencies --- poetry.lock | 732 +++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 339 insertions(+), 395 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7221e191ff..c4f061743e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,12 +24,10 @@ python-versions = ">=3.6" [package.dependencies] aiosignal = ">=1.1.2" async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} attrs = ">=17.3.0" charset-normalizer = ">=2.0,<3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] @@ -107,7 +105,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.11.5" +version = "2.11.7" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -115,7 +113,6 @@ python-versions = ">=3.6.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<2" @@ -127,20 +124,9 @@ category = "main" optional = false python-versions = ">=3.6" -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -category = "main" -optional = false -python-versions = ">=3.5" - [[package]] name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" description = "Atomic file writes." category = "dev" optional = false @@ -174,11 +160,11 @@ toml = "*" [[package]] name = "babel" -version = "2.9.1" +version = "2.10.3" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] pytz = ">=2015.7" @@ -221,7 +207,7 @@ python-versions = "~=3.7" [[package]] name = "certifi" -version = "2022.5.18.1" +version = "2022.6.15" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -229,7 +215,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -272,7 +258,7 @@ test = ["pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)" [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.5" description = "Cross-platform colored terminal text." category = "dev" optional = false @@ -299,7 +285,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.4.1" +version = "6.4.2" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -313,7 +299,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "37.0.2" +version = "37.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -401,7 +387,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "dropbox" -version = "11.31.0" +version = "11.33.0" description = "Official Dropbox API Client" category = "main" optional = false @@ -426,7 +412,7 @@ prefixed = ">=0.3.2" [[package]] name = "evdev" -version = "1.5.0" +version = "1.6.0" description = "Bindings to the Linux input handling subsystem" category = "main" optional = false @@ -441,7 +427,6 @@ optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" @@ -482,7 +467,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "gazu" -version = "0.8.28" +version = "0.8.30" description = "Gazu is a client for Zou, the API to store the data of your CG production." category = "main" optional = false @@ -518,11 +503,10 @@ python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [[package]] name = "google-api-core" -version = "2.8.1" +version = "2.8.2" description = "Google API client core library" category = "main" optional = false @@ -531,13 +515,11 @@ python-versions = ">=3.6" [package.dependencies] google-auth = ">=1.25.0,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" -protobuf = ">=3.15.0,<4.0.0dev" +protobuf = ">=3.15.0,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" [package.extras] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] [[package]] name = "google-api-python-client" @@ -557,7 +539,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "2.7.0" +version = "2.9.1" description = "Google Authentication Library" category = "main" optional = false @@ -590,14 +572,14 @@ six = "*" [[package]] name = "googleapis-common-protos" -version = "1.56.2" +version = "1.56.4" description = "Common protobufs used in Google APIs" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -protobuf = ">=3.15.0,<4.0.0dev" +protobuf = ">=3.15.0,<5.0.0dev" [package.extras] grpc = ["grpcio (>=1.0.0,<2.0.0dev)"] @@ -623,7 +605,7 @@ python-versions = ">=3.5" [[package]] name = "imagesize" -version = "1.3.0" +version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false @@ -631,20 +613,19 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.11.4" +version = "4.12.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -684,14 +665,14 @@ testing = ["colorama", "docopt", "pytest (>=3.1.0)"] [[package]] name = "jeepney" -version = "0.7.1" +version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"] +test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] trio = ["trio", "async-generator"] [[package]] @@ -769,11 +750,11 @@ pymongo = "*" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mccabe" @@ -867,14 +848,14 @@ six = "*" [[package]] name = "pillow" -version = "9.1.1" +version = "9.2.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -897,9 +878,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -922,11 +900,11 @@ python-versions = "*" [[package]] name = "protobuf" -version = "3.19.4" -description = "Protocol Buffers" +version = "4.21.2" +description = "" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "py" @@ -1017,24 +995,26 @@ python-versions = ">=3.6" [[package]] name = "pylint" -version = "2.13.9" +version = "2.14.5" description = "python code static checker" category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.11.5,<=2.12.0-dev0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} +astroid = ">=2.11.6,<=2.12.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -testutil = ["gitpython (>3)"] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] [[package]] name = "pymongo" @@ -1159,7 +1139,6 @@ python-versions = ">=3.6" atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -1346,7 +1325,7 @@ use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rsa" -version = "4.8" +version = "4.9" description = "Pure-Python RSA implementation" category = "main" optional = false @@ -1385,7 +1364,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.17.0" +version = "3.18.0" description = "The Slack API Platform SDK for Python" category = "main" optional = false @@ -1421,7 +1400,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "5.0.1" +version = "5.0.2" description = "Python documentation generator" category = "dev" optional = false @@ -1606,20 +1585,20 @@ optional = false python-versions = ">=3.7" [[package]] -name = "typed-ast" -version = "1.5.4" -description = "a fork of Python 2 and 3 ast modules with type comment support" +name = "tomlkit" +version = "0.11.1" +description = "Style preserving TOML library" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6,<4.0" [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" -category = "main" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "uritemplate" @@ -1631,11 +1610,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.10" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] @@ -1697,24 +1676,23 @@ python-versions = ">=3.6" [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.8.0" +version = "3.8.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" -python-versions = "3.7.*" -content-hash = "bd8e0a03668c380c6e76c8cd6c71020692f4ea9f32de7a4f09433564faa9dad0" +python-versions = "3.9.*" +content-hash = "fc91ecccdb9048af4335a1dc4ea7837ff5ef80a53f2417323bc295c7e4505438" [metadata.files] acre = [] @@ -1821,20 +1799,15 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.11.5-py3-none-any.whl", hash = "sha256:14ffbb4f6aa2cf474a0834014005487f7ecd8924996083ab411e7fa0b508ce0b"}, - {file = "astroid-2.11.5.tar.gz", hash = "sha256:f4e4ec5294c4b07ac38bab9ca5ddd3914d4bf46f9006eb5c0ae755755061044e"}, + {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"}, + {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"}, ] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] -asynctest = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, @@ -1845,8 +1818,8 @@ autopep8 = [ {file = "autopep8-1.5.7.tar.gz", hash = "sha256:276ced7e9e3cb22e5d7c14748384a5cf5d9002257c0ed50c0e075b68011bb6d0"}, ] babel = [ - {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, - {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, ] bcrypt = [ {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, @@ -1870,60 +1843,74 @@ cachetools = [ {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, ] certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, @@ -1938,8 +1925,8 @@ clique = [ {file = "clique-1.6.1.tar.gz", hash = "sha256:90165c1cf162d4dd1baef83ceaa1afc886b453e379094fa5b60ea470d1733e66"}, ] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] commonmark = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, @@ -1950,71 +1937,71 @@ coolname = [ {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] coverage = [ - {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, - {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, - {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, - {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, - {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, - {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, - {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, - {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, - {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, - {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, - {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, - {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, - {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, + {file = "coverage-6.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a9032f9b7d38bdf882ac9f66ebde3afb8145f0d4c24b2e600bc4c6304aafb87e"}, + {file = "coverage-6.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e0524adb49c716ca763dbc1d27bedce36b14f33e6b8af6dba56886476b42957c"}, + {file = "coverage-6.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4548be38a1c810d79e097a38107b6bf2ff42151900e47d49635be69943763d8"}, + {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23876b018dfa5d3e98e96f5644b109090f16a4acb22064e0f06933663005d39"}, + {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fe75dcfcb889b6800f072f2af5a331342d63d0c1b3d2bf0f7b4f6c353e8c9c0"}, + {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2f8553878a24b00d5ab04b7a92a2af50409247ca5c4b7a2bf4eabe94ed20d3ee"}, + {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d774d9e97007b018a651eadc1b3970ed20237395527e22cbeb743d8e73e0563d"}, + {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d56f105592188ce7a797b2bd94b4a8cb2e36d5d9b0d8a1d2060ff2a71e6b9bbc"}, + {file = "coverage-6.4.2-cp310-cp310-win32.whl", hash = "sha256:d230d333b0be8042ac34808ad722eabba30036232e7a6fb3e317c49f61c93386"}, + {file = "coverage-6.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:5ef42e1db047ca42827a85e34abe973971c635f83aed49611b7f3ab49d0130f0"}, + {file = "coverage-6.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:25b7ec944f114f70803d6529394b64f8749e93cbfac0fe6c5ea1b7e6c14e8a46"}, + {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb00521ab4f99fdce2d5c05a91bddc0280f0afaee0e0a00425e28e209d4af07"}, + {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dff52b3e7f76ada36f82124703f4953186d9029d00d6287f17c68a75e2e6039"}, + {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:147605e1702d996279bb3cc3b164f408698850011210d133a2cb96a73a2f7996"}, + {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:422fa44070b42fef9fb8dabd5af03861708cdd6deb69463adc2130b7bf81332f"}, + {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8af6c26ba8df6338e57bedbf916d76bdae6308e57fc8f14397f03b5da8622b4e"}, + {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5336e0352c0b12c7e72727d50ff02557005f79a0b8dcad9219c7c4940a930083"}, + {file = "coverage-6.4.2-cp37-cp37m-win32.whl", hash = "sha256:0f211df2cba951ffcae210ee00e54921ab42e2b64e0bf2c0befc977377fb09b7"}, + {file = "coverage-6.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a13772c19619118903d65a91f1d5fea84be494d12fd406d06c849b00d31bf120"}, + {file = "coverage-6.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7bd0ffbcd03dc39490a1f40b2669cc414fae0c4e16b77bb26806a4d0b7d1452"}, + {file = "coverage-6.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0895ea6e6f7f9939166cc835df8fa4599e2d9b759b02d1521b574e13b859ac32"}, + {file = "coverage-6.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e7ced84a11c10160c0697a6cc0b214a5d7ab21dfec1cd46e89fbf77cc66fae"}, + {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80db4a47a199c4563d4a25919ff29c97c87569130375beca3483b41ad5f698e8"}, + {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3def6791adf580d66f025223078dc84c64696a26f174131059ce8e91452584e1"}, + {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f89d8e03c8a3757aae65570d14033e8edf192ee9298303db15955cadcff0c63"}, + {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d0b48aff8e9720bdec315d67723f0babd936a7211dc5df453ddf76f89c59933"}, + {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b20286c2b726f94e766e86a3fddb7b7e37af5d0c635bdfa7e4399bc523563de"}, + {file = "coverage-6.4.2-cp38-cp38-win32.whl", hash = "sha256:d714af0bdba67739598849c9f18efdcc5a0412f4993914a0ec5ce0f1e864d783"}, + {file = "coverage-6.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:5f65e5d3ff2d895dab76b1faca4586b970a99b5d4b24e9aafffc0ce94a6022d6"}, + {file = "coverage-6.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a697977157adc052284a7160569b36a8bbec09db3c3220642e6323b47cec090f"}, + {file = "coverage-6.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c77943ef768276b61c96a3eb854eba55633c7a3fddf0a79f82805f232326d33f"}, + {file = "coverage-6.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54d8d0e073a7f238f0666d3c7c0d37469b2aa43311e4024c925ee14f5d5a1cbe"}, + {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f22325010d8824594820d6ce84fa830838f581a7fd86a9235f0d2ed6deb61e29"}, + {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b04d305ea172ccb21bee5bacd559383cba2c6fcdef85b7701cf2de4188aa55"}, + {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:866ebf42b4c5dbafd64455b0a1cd5aa7b4837a894809413b930026c91e18090b"}, + {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e36750fbbc422c1c46c9d13b937ab437138b998fe74a635ec88989afb57a3978"}, + {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:79419370d6a637cb18553ecb25228893966bd7935a9120fa454e7076f13b627c"}, + {file = "coverage-6.4.2-cp39-cp39-win32.whl", hash = "sha256:b5e28db9199dd3833cc8a07fa6cf429a01227b5d429facb56eccd765050c26cd"}, + {file = "coverage-6.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:edfdabe7aa4f97ed2b9dd5dde52d2bb29cb466993bb9d612ddd10d0085a683cf"}, + {file = "coverage-6.4.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:e2618cb2cf5a7cc8d698306e42ebcacd02fb7ef8cfc18485c59394152c70be97"}, + {file = "coverage-6.4.2.tar.gz", hash = "sha256:6c3ccfe89c36f3e5b9837b9ee507472310164f352c9fe332120b764c9d60adbe"}, ] cryptography = [ - {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:ef15c2df7656763b4ff20a9bc4381d8352e6640cfeb95c2972c38ef508e75181"}, - {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3c81599befb4d4f3d7648ed3217e00d21a9341a9a688ecdd615ff72ffbed7336"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bd1096476aaac820426239ab534b636c77d71af66c547b9ddcd76eb9c79e004"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31fe38d14d2e5f787e0aecef831457da6cec68e0bb09a35835b0b44ae8b988fe"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093cb351031656d3ee2f4fa1be579a8c69c754cf874206be1d4cf3b542042804"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59b281eab51e1b6b6afa525af2bd93c16d49358404f814fe2c2410058623928c"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:0cc20f655157d4cfc7bada909dc5cc228211b075ba8407c46467f63597c78178"}, - {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f8ec91983e638a9bcd75b39f1396e5c0dc2330cbd9ce4accefe68717e6779e0a"}, - {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:46f4c544f6557a2fefa7ac8ac7d1b17bf9b647bd20b16decc8fbcab7117fbc15"}, - {file = "cryptography-37.0.2-cp36-abi3-win32.whl", hash = "sha256:731c8abd27693323b348518ed0e0705713a36d79fdbd969ad968fbef0979a7e0"}, - {file = "cryptography-37.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:471e0d70201c069f74c837983189949aa0d24bb2d751b57e26e3761f2f782b8d"}, - {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a68254dd88021f24a68b613d8c51d5c5e74d735878b9e32cc0adf19d1f10aaf9"}, - {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:a7d5137e556cc0ea418dca6186deabe9129cee318618eb1ffecbd35bee55ddc1"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aeaba7b5e756ea52c8861c133c596afe93dd716cbcacae23b80bc238202dc023"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e590dd70642eb2079d280420a888190aa040ad20f19ec8c6e097e38aa29e06"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1b9362d34363f2c71b7853f6251219298124aa4cc2075ae2932e64c91a3e2717"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e53258e69874a306fcecb88b7534d61820db8a98655662a3dd2ec7f1afd9132f"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:1f3bfbd611db5cb58ca82f3deb35e83af34bb8cf06043fa61500157d50a70982"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:419c57d7b63f5ec38b1199a9521d77d7d1754eb97827bbb773162073ccd8c8d4"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:dc26bb134452081859aa21d4990474ddb7e863aa39e60d1592800a8865a702de"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b8398b3d0efc420e777c40c16764d6870bcef2eb383df9c6dbb9ffe12c64452"}, - {file = "cryptography-37.0.2.tar.gz", hash = "sha256:f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e"}, + {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, + {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"}, + {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"}, + {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"}, + {file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"}, + {file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"}, + {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"}, + {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, + {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, ] cx-freeze = [ {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, @@ -2061,16 +2048,16 @@ docutils = [ {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] dropbox = [ - {file = "dropbox-11.31.0-py2-none-any.whl", hash = "sha256:393a99dfe30d42fd73c265b9b7d24bb21c9a961739cd097c3541e709eb2a209c"}, - {file = "dropbox-11.31.0-py3-none-any.whl", hash = "sha256:5f924102fd6464def81573320c6aa4ea9cd3368e1b1c13d838403dd4c9ffc919"}, - {file = "dropbox-11.31.0.tar.gz", hash = "sha256:f483d65b702775b9abf7b9328f702c68c6397fc01770477c6ddbfb1d858a5bcf"}, + {file = "dropbox-11.33.0-py2-none-any.whl", hash = "sha256:3ee9024631b80f18938556d5e27cbdede26d6dc0b73aeaa90fc075ce96c950b1"}, + {file = "dropbox-11.33.0-py3-none-any.whl", hash = "sha256:1a0cbc22b0d1dae96e18b37e3520e5c289de7eb1303935db40e4dbfc9bb9e59b"}, + {file = "dropbox-11.33.0.tar.gz", hash = "sha256:7c638b521169a460de38b9eaeb204fe918874f72d6c3eed005d064b6f37da9c1"}, ] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, ] evdev = [ - {file = "evdev-1.5.0.tar.gz", hash = "sha256:5b33b174f7c84576e7dd6071e438bf5ad227da95efd4356a39fe4c8355412fe6"}, + {file = "evdev-1.6.0.tar.gz", hash = "sha256:ecfa01b5c84f7e8c6ced3367ac95288f43cd84efbfd7dd7d0cdbfc0d18c87a6a"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, @@ -2145,7 +2132,7 @@ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] gazu = [ - {file = "gazu-0.8.28-py2.py3-none-any.whl", hash = "sha256:ec4f7c2688a2b37ee8a77737e4e30565ad362428c3ade9046136a998c043e51c"}, + {file = "gazu-0.8.30-py2.py3-none-any.whl", hash = "sha256:d692927a11314151bc33e7d67edee634053f70a3b09e4500dfc6626bfea18753"}, ] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, @@ -2156,24 +2143,24 @@ gitpython = [ {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] google-api-core = [ - {file = "google-api-core-2.8.1.tar.gz", hash = "sha256:958024c6aa3460b08f35741231076a4dd9a4c819a6a39d44da9627febe8b28f0"}, - {file = "google_api_core-2.8.1-py3-none-any.whl", hash = "sha256:ce1daa49644b50398093d2a9ad886501aa845e2602af70c3001b9f402a9d7359"}, + {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, + {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, ] google-api-python-client = [ {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] google-auth = [ - {file = "google-auth-2.7.0.tar.gz", hash = "sha256:8a954960f852d5f19e6af14dd8e75c20159609e85d8db37e4013cc8c3824a7e1"}, - {file = "google_auth-2.7.0-py2.py3-none-any.whl", hash = "sha256:df549a1433108801b11bdcc0e312eaf0d5f0500db42f0523e4d65c78722e8475"}, + {file = "google-auth-2.9.1.tar.gz", hash = "sha256:14292fa3429f2bb1e99862554cde1ee730d6840ebae067814d3d15d8549c0888"}, + {file = "google_auth-2.9.1-py2.py3-none-any.whl", hash = "sha256:5a7eed0cb0e3a83989fad0b59fe1329dfc8c479543039cd6fd1e01e9adf39475"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.2.tar.gz", hash = "sha256:b09b56f5463070c2153753ef123f07d2e49235e89148e9b2459ec8ed2f68d7d3"}, - {file = "googleapis_common_protos-1.56.2-py2.py3-none-any.whl", hash = "sha256:023eaea9d8c1cceccd9587c6af6c20f33eeeb05d4148670f2b0322dc1511700c"}, + {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, + {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, ] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, @@ -2184,12 +2171,12 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] imagesize = [ - {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, - {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, - {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, + {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, + {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -2204,8 +2191,8 @@ jedi = [ {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] jeepney = [ - {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, - {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, ] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, @@ -2266,75 +2253,46 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -2419,44 +2377,64 @@ pathlib2 = [ {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] pillow = [ - {file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"}, - {file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2"}, - {file = "Pillow-9.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a"}, - {file = "Pillow-9.1.1-cp310-cp310-win32.whl", hash = "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c"}, - {file = "Pillow-9.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108"}, - {file = "Pillow-9.1.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4"}, - {file = "Pillow-9.1.1-cp37-cp37m-win32.whl", hash = "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578"}, - {file = "Pillow-9.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9"}, - {file = "Pillow-9.1.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf"}, - {file = "Pillow-9.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a"}, - {file = "Pillow-9.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1"}, - {file = "Pillow-9.1.1-cp38-cp38-win32.whl", hash = "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54"}, - {file = "Pillow-9.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf"}, - {file = "Pillow-9.1.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92"}, - {file = "Pillow-9.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1"}, - {file = "Pillow-9.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601"}, - {file = "Pillow-9.1.1-cp39-cp39-win32.whl", hash = "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45"}, - {file = "Pillow-9.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"}, - {file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"}, + {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, + {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, + {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, + {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, + {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, + {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, + {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, + {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, + {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, + {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, + {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, + {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, + {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, + {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, + {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, + {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, + {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, + {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, ] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -2475,32 +2453,20 @@ prefixed = [ {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] protobuf = [ - {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, - {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, - {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, - {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, - {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, - {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, - {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, - {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, - {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, - {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, - {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, - {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, - {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, - {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, - {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, - {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, - {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, - {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, + {file = "protobuf-4.21.2-cp310-abi3-win32.whl", hash = "sha256:d622dc75e289e8b3031dd8b4e87df508f11a6b3d86a49fb50256af7ce030d35b"}, + {file = "protobuf-4.21.2-cp310-abi3-win_amd64.whl", hash = "sha256:4758b9c22ad0486639a68cea58d38571f233019a73212d78476ec648f68a49a3"}, + {file = "protobuf-4.21.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:e3d3df3292ab4bae85213b9ebef566b5aedb45f97425a92fac5b2e431d31e71c"}, + {file = "protobuf-4.21.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:29eaf8e9db33bc3bae14576ad61370aa2b64ea5d6e6cd705042692e5e0404b10"}, + {file = "protobuf-4.21.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ef0768a609a02b2b412fa0f59f1242f1597e9bb15188d043f3fde09115ca6c69"}, + {file = "protobuf-4.21.2-cp37-cp37m-win32.whl", hash = "sha256:5f8c7488e74024fa12b46aab4258f707d7d6e94c8d322d7c45cc13770f66ab59"}, + {file = "protobuf-4.21.2-cp37-cp37m-win_amd64.whl", hash = "sha256:57a593e40257ab4f164fe6e171651b1386c98f8ec5f5a8643642889c50d4f3c4"}, + {file = "protobuf-4.21.2-cp38-cp38-win32.whl", hash = "sha256:b82ac05b0651a4d2b9d56f5aeef3d711f5858eb4b71c13d77553739e5930a74a"}, + {file = "protobuf-4.21.2-cp38-cp38-win_amd64.whl", hash = "sha256:f2f43ae8dff452aee3026b59ea0a09245ab2529a55a0984992e76bcf848610e1"}, + {file = "protobuf-4.21.2-cp39-cp39-win32.whl", hash = "sha256:7b2dcca25d88ec77358eed3d031c8260b5bf3023fff03a31c9584591c5910833"}, + {file = "protobuf-4.21.2-cp39-cp39-win_amd64.whl", hash = "sha256:095fda15fe04a79c9f0edab09b424be46dd057b15986d235b84c8cea91659df7"}, + {file = "protobuf-4.21.2-py2.py3-none-any.whl", hash = "sha256:9b42afb67e19010cdda057e439574ccd944902ea14b0d52ba0bfba2aad50858d"}, + {file = "protobuf-4.21.2-py3-none-any.whl", hash = "sha256:853708afc3a7eed4df28a8d4bd4812f829f8d736c104dd8d584ccff27969e311"}, + {file = "protobuf-4.21.2.tar.gz", hash = "sha256:863f65e137d9de4a76cac39ae731a19bea1c30997f512ecf0dc9348112313401"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -2565,8 +2531,8 @@ pygments = [ {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pylint = [ - {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, - {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, + {file = "pylint-2.14.5-py3-none-any.whl", hash = "sha256:fabe30000de7d07636d2e82c9a518ad5ad7908590fe135ace169b44839c15f90"}, + {file = "pylint-2.14.5.tar.gz", hash = "sha256:487ce2192eee48211269a0e976421f334cf94de1806ca9d0a99449adcdf0285e"}, ] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, @@ -2809,8 +2775,8 @@ requests = [ {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] rsa = [ - {file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"}, - {file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"}, + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] secretstorage = [ {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, @@ -2825,8 +2791,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] slack-sdk = [ - {file = "slack_sdk-3.17.0-py2.py3-none-any.whl", hash = "sha256:0816efc43d1d2db8286e8dbcbb2e86fd0f71c206c01c521c2cb054ecb40f9ced"}, - {file = "slack_sdk-3.17.0.tar.gz", hash = "sha256:860cd0e50c454b955f14321c8c5486a47cc1e0e84116acdb009107f836752feb"}, + {file = "slack_sdk-3.18.0-py2.py3-none-any.whl", hash = "sha256:1bca5f1c006aef174b0e4ccc38cb8f4b1c9d595150d64dcb764f65c443e105cf"}, + {file = "slack_sdk-3.18.0.tar.gz", hash = "sha256:429b77c123993d2aaded2917909f4a40bb2079c1d5d6d0f86c4bb2b40900433b"}, ] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, @@ -2841,8 +2807,8 @@ speedcopy = [ {file = "speedcopy-2.1.4.tar.gz", hash = "sha256:eff007a97e49ec1934df4fa8074f4bd1cf4a3b14c5499d914988785cff0c199a"}, ] sphinx = [ - {file = "Sphinx-5.0.1-py3-none-any.whl", hash = "sha256:36aa2a3c2f6d5230be94585bc5d74badd5f9ed8f3388b8eedc1726fe45b1ad30"}, - {file = "Sphinx-5.0.1.tar.gz", hash = "sha256:f4da1187785a5bc7312cc271b0e867a93946c319d106363e102936a3d9857306"}, + {file = "Sphinx-5.0.2-py3-none-any.whl", hash = "sha256:d3e57663eed1d7c5c50895d191fdeda0b54ded6f44d5621b50709466c338d1e8"}, + {file = "Sphinx-5.0.2.tar.gz", hash = "sha256:b18e978ea7565720f26019c702cd85c84376e948370f1cd43d60265010e1c7b0"}, ] sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, @@ -2896,43 +2862,21 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +tomlkit = [ + {file = "tomlkit-0.11.1-py3-none-any.whl", hash = "sha256:1c5bebdf19d5051e2e1de6cf70adfc5948d47221f097fcff7a3ffc91e953eaf5"}, + {file = "tomlkit-0.11.1.tar.gz", hash = "sha256:61901f81ff4017951119cd0d1ed9b7af31c821d6845c8c477587bbdcd5e5854e"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, ] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, + {file = "urllib3-1.26.10-py2.py3-none-any.whl", hash = "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec"}, + {file = "urllib3-1.26.10.tar.gz", hash = "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -3087,6 +3031,6 @@ yarl = [ {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, ] zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, ] diff --git a/pyproject.toml b/pyproject.toml index 9552242694..186da55688 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ packages = [ openpype = 'start:boot' [tool.poetry.dependencies] -python = "3.7.*" +python = "3.9.*" aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } From 165088e721cf21fdc80da5f8e0f32b7f1cf680a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:48:53 +0200 Subject: [PATCH 002/480] :bug: fix py3.9 deprecated method name --- openpype/modules/ftrack/tray/ftrack_tray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 2919ae22fb..e4ccc59c7a 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -204,7 +204,7 @@ class FtrackTrayWrapper: failed_count = 0 # If thread failed test Ftrack and Mongo connection - elif not self.thread_socket_server.isAlive(): + elif not self.thread_socket_server.is_alive(): self.thread_socket_server.join() self.thread_socket_server = None ftrack_accessible = False From a6415e2e70197b2bf729fcd364bf6f067771f18b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 15:09:51 +0200 Subject: [PATCH 003/480] :arrow_up: bump version in python checks --- tools/build.sh | 8 ++++---- tools/build_win_installer.ps1 | 8 ++++---- tools/create_env.ps1 | 8 ++++---- tools/create_env.sh | 8 ++++---- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/tools/build.sh b/tools/build.sh index 79fb748cd5..b5aa22dfc0 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -90,7 +90,7 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" - command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.9 installed to continue.${RST}"; return 1; } local version_command version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version @@ -99,9 +99,9 @@ detect_python () { IFS=. set -- $python_version IFS="$oIFS" - if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then - if [ "$2" -gt "7" ] ; then - echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1; + if [ "$1" -ge "3" ] && [ "$2" -ge "9" ] ; then + if [ "$2" -gt "9" ] ; then + echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.9.x${RST}"; return 1; else echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}" fi diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 index b9d1ca2d3f..6549e57117 100644 --- a/tools/build_win_installer.ps1 +++ b/tools/build_win_installer.ps1 @@ -131,15 +131,15 @@ if(-not $m) { Set-Location -Path $current_dir Exit-WithCode 1 } -# We are supporting python 3.7 only -if (($matches[1] -lt 3) -or ($matches[2] -lt 7)) { +# We are supporting python 3.9 +if (($matches[1] -lt 3) -or ($matches[2] -lt 9)) { Write-Host "FAILED Version [ $p ] is old and unsupported" -ForegroundColor red Set-Location -Path $current_dir Exit-WithCode 1 -} elseif (($matches[1] -eq 3) -and ($matches[2] -gt 7)) { +} elseif (($matches[1] -eq 3) -and ($matches[2] -gt 9)) { Write-Host "WARNING Version [ $p ] is unsupported, use at your own risk." -ForegroundColor yellow Write-Host "*** " -NoNewline -ForegroundColor yellow - Write-Host "OpenPype supports only Python 3.7" -ForegroundColor white + Write-Host "OpenPype supports only Python 3.9" -ForegroundColor white } else { Write-Host "OK [ $p ]" -ForegroundColor green } diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 3f956e5c6a..ea38ae4745 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -99,14 +99,14 @@ print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1])) Set-Location -Path $current_dir Exit-WithCode 1 } - # We are supporting python 3.7 only - if (($matches[1] -lt 3) -or ($matches[2] -lt 7)) { + # We are supporting python 3.9 only + if (($matches[1] -lt 3) -or ($matches[2] -lt 9)) { Write-Color -Text "FAILED ", "Version ", "[", $p ,"]", "is old and unsupported" -Color Red, Yellow, Cyan, White, Cyan, Yellow Set-Location -Path $current_dir Exit-WithCode 1 - } elseif (($matches[1] -eq 3) -and ($matches[2] -gt 7)) { + } elseif (($matches[1] -eq 3) -and ($matches[2] -gt 9)) { Write-Color -Text "WARNING Version ", "[", $p, "]", " is unsupported, use at your own risk." -Color Yellow, Cyan, White, Cyan, Yellow - Write-Color -Text "*** ", "OpenPype supports only Python 3.7" -Color Yellow, White + Write-Color -Text "*** ", "OpenPype supports only Python 3.9" -Color Yellow, White } else { Write-Color "OK ", "[", $p, "]" -Color Green, Cyan, White, Cyan } diff --git a/tools/create_env.sh b/tools/create_env.sh index fba3942b87..ae27370227 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -88,16 +88,16 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" - command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.9 installed to continue.${RST}"; return 1; } local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version="$(python <<< ${version_command})" oIFS="$IFS" IFS=. set -- $python_version IFS="$oIFS" - if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then - if [ "$2" -gt "7" ] ; then - echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1; + if [ "$1" -ge "3" ] && [ "$2" -ge "9" ] ; then + if [ "$2" -gt "9" ] ; then + echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.9.x${RST}"; return 1; else echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}" fi From bb638050732640c984372b8ed190f3c80a887ae4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 17:49:33 +0200 Subject: [PATCH 004/480] :memo: update documentation --- website/docs/admin_distribute.md | 8 ++++++-- website/docs/dev_build.md | 22 +++++++++++----------- website/docs/dev_requirements.md | 8 ++++---- 3 files changed, 21 insertions(+), 17 deletions(-) diff --git a/website/docs/admin_distribute.md b/website/docs/admin_distribute.md index 2cccce0fbd..50843351d9 100644 --- a/website/docs/admin_distribute.md +++ b/website/docs/admin_distribute.md @@ -17,10 +17,12 @@ Distribution consists of two parts It is self contained (frozen) software that also includes all of the OpenPype codebase with the version from the time of the build. - Igniter package is around 500MB and preparing an updated version requires you to re-build pype. That would be + Igniter package is around 1Gb and preparing an updated version requires you to re-build pype. That would be inconvenient for regular and quick distribution of production updates and fixes. So you can distribute those independently, without requiring you artists to re-install every time. + You can have multiple versions installed at the same time. + ### 2. OpenPype Codebase When you upgrade your studio pype deployment to a new version or make any local code changes, you can distribute @@ -77,4 +79,6 @@ For example OpenPype will consider the versions in this order: `3.8.0-nightly` < See https://semver.org/ for more details. -For studios customizing the source code of OpenPype, a practical approach could be to build by adding a name and a number after the PATCH and not to deploy 3.8.0 from original OpenPype repository. For example, your builds will be: `3.8.0-yourstudio.1` < `3.8.0-yourstudio.2` < `3.8.1-yourstudio.1`. \ No newline at end of file +For studios customizing the source code of OpenPype, a practical approach could be to build by adding a name and a number after the PATCH and not to deploy 3.8.0 from original OpenPype repository. For example, your builds will be: `3.8.0-yourstudio.1` < `3.8.0-yourstudio.2` < `3.8.1-yourstudio.1`. + +Versions of Igniter and those coming in zips are compatible if they match major and minor version - `3.13.4` is compatible with `3.13.1` but not with `3.12.2` or `3.14.0`. diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d..42c0bd4dc4 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -11,7 +11,7 @@ import TabItem from '@theme/TabItem'; To build Pype you currently need (on all platforms): -- **[Python 3.7](https://www.python.org/downloads/)** as we are following [vfx platform](https://vfxplatform.com). +- **[Python 3.9](https://www.python.org/downloads/)** as we are following [vfx platform CY2022](https://vfxplatform.com). - **[git](https://git-scm.com/downloads)** We use [CX_Freeze](https://cx-freeze.readthedocs.io/en/latest) to freeze the code and all dependencies and @@ -114,8 +114,8 @@ To build OpenPype on Linux you will need: - **[curl](https://curl.se)** on systems that doesn't have one preinstalled. - **bzip2**, **readline**, **sqlite3** and other libraries. -Because some Linux distros come with newer Python version pre-installed, you might -need to install **3.7** version and make use of it explicitly. +Because some Linux distros come with older Python version pre-installed, you might +need to install **3.9** version and make use of it explicitly. Your best bet is probably using [pyenv](https://github.com/pyenv/pyenv). You can use your package manager to install **git** and other packages to your build @@ -136,16 +136,16 @@ $ eval "$(pyenv virtualenv-init -)" # reload shell $ exec $SHELL -# install Python 3.7.10 +# install Python 3.9.6 # python will be downloaded and build so please make sure # you have all necessary requirements installed (see below). -$ pyenv install -v 3.7.10 +$ pyenv install -v 3.9.6 # change path to pype 3 $ cd /path/to/pype-3 # set local python version -$ pyenv local 3.7.10 +$ pyenv local 3.9.6 ``` :::note Install build requirements for **Ubuntu** @@ -220,19 +220,19 @@ $ exec "$SHELL" $ PATH=$(pyenv root)/shims:$PATH ``` -4) Pull in required Python version 3.7.x +4) Pull in required Python version 3.9.x ```shell # install Python build dependences $ brew install openssl readline sqlite3 xz zlib -# replace with up-to-date 3.7.x version -$ pyenv install 3.7.9 +# replace with up-to-date 3.9.x version +$ pyenv install 3.9.6 ``` 5) Set local Python version ```shell # switch to Pype source directory -$ pyenv local 3.7.9 +$ pyenv local 3.9.6 ``` 6) Install `create-dmg` @@ -256,7 +256,7 @@ to `pyproject.toml` to `[tool.poetry.dependencies]` section. ```toml title="/pyproject.toml" [tool.poetry.dependencies] -python = "3.7.*" +python = "3.9.*" aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index eb4b132297..640b27d69d 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -14,7 +14,7 @@ The main things you will need to run and build pype are: - **Terminal** in your OS - PowerShell 5.0+ (Windows) - Bash (Linux) -- [**Python 3.7.9**](#python) or higher +- [**Python 3.9.x**](#python) - [**MongoDB**](#database) @@ -55,13 +55,13 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.9.x** is the recommended version to use (as per [VFX platform CY2022](https://vfxplatform.com/)). -If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. +If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. ## Hardware -openPYPE should be installed on all workstations that need to use it, the same as any other application. +openPYPE should be installed on all workstations that need to use it, the same as any other application. There are no specific requirements for the hardware. If the workstation can run the major DCCs, it most probably can run openPYPE. From b1853e5b914746523a87dcf637da2e93fc379db6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 18:03:17 +0200 Subject: [PATCH 005/480] :arrow_up: update python in dockerfiles --- Dockerfile | 2 +- Dockerfile.centos7 | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7232223c3c..46dd9e5c0a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # Build Pype docker image FROM ubuntu:focal AS builder -ARG OPENPYPE_PYTHON_VERSION=3.7.12 +ARG OPENPYPE_PYTHON_VERSION=3.9.12 ARG BUILD_DATE ARG VERSION diff --git a/Dockerfile.centos7 b/Dockerfile.centos7 index be3db58b62..5eb2f478ea 100644 --- a/Dockerfile.centos7 +++ b/Dockerfile.centos7 @@ -1,6 +1,6 @@ # Build Pype docker image FROM centos:7 AS builder -ARG OPENPYPE_PYTHON_VERSION=3.7.12 +ARG OPENPYPE_PYTHON_VERSION=3.9.12 LABEL org.opencontainers.image.name="pypeclub/openpype" LABEL org.opencontainers.image.title="OpenPype Docker Image" @@ -96,11 +96,11 @@ RUN source $HOME/.bashrc \ RUN source $HOME/.bashrc \ && bash ./tools/build.sh -RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \ - && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.7/vendor/python/PySide2/Qt/lib +RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.9/lib \ + && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.9/lib \ + && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.9/lib \ + && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.9/lib \ + && cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.9/vendor/python/PySide2/Qt/lib RUN cd /opt/openpype \ rm -rf ./vendor/bin From 0d70b50b3fd5cd2832dce4ff88315bebb9baa083 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 19:36:46 +0200 Subject: [PATCH 006/480] :arrow_up: update zipping python --- tools/create_zip.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/create_zip.sh b/tools/create_zip.sh index 46393f78b1..316d456338 100755 --- a/tools/create_zip.sh +++ b/tools/create_zip.sh @@ -78,9 +78,9 @@ detect_python () { IFS=. set -- $python_version IFS="$oIFS" - if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then - if [ "$2" -gt "7" ] ; then - echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1; + if [ "$1" -ge "3" ] && [ "$2" -ge "9" ] ; then + if [ "$2" -gt "9" ] ; then + echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.9.x${RST}"; return 1; else echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}" fi From 832b7bd05293ff5be1fa3192df864e9143e7ecd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 12 Aug 2022 12:58:40 +0200 Subject: [PATCH 007/480] :memo: update python versions --- README.md | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index b8c04f8b49..8a1e88b1f3 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ OpenPype ==== -[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2021-lightgrey?labelColor=303846) +[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2022-lightgrey?labelColor=303846) @@ -31,7 +31,7 @@ The main things you will need to run and build OpenPype are: - **Terminal** in your OS - PowerShell 5.0+ (Windows) - Bash (Linux) -- [**Python 3.7.8**](#python) or higher +- [**Python 3.9.6**](#python) or higher - [**MongoDB**](#database) (needed only for local development) @@ -50,13 +50,13 @@ For more details on requirements visit [requirements documentation](https://open Building OpenPype ------------- -To build OpenPype you currently need [Python 3.7](https://www.python.org/downloads/) as we are following +To build OpenPype you currently need [Python 3.9](https://www.python.org/downloads/) as we are following [vfx platform](https://vfxplatform.com). Because of some Linux distros comes with newer Python version -already, you need to install **3.7** version and make use of it. You can use perhaps [pyenv](https://github.com/pyenv/pyenv) for this on Linux. +already, you need to install **3.9** version and make use of it. You can use perhaps [pyenv](https://github.com/pyenv/pyenv) for this on Linux. ### Windows -You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). +You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). More tools might be needed for installing dependencies (for example for **OpenTimelineIO**) - mostly development tools like [CMake](https://cmake.org/) and [Visual Studio](https://visualstudio.microsoft.com/cs/downloads/) @@ -82,7 +82,7 @@ OpenPype is build using [CX_Freeze](https://cx-freeze.readthedocs.io/en/latest) ### macOS -You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build +You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build some OpenPype dependencies like [CMake](https://cmake.org/) and **XCode Command Line Tools** (or some other build system). Easy way of installing everything necessary is to use [Homebrew](https://brew.sh): @@ -106,19 +106,19 @@ exec "$SHELL" PATH=$(pyenv root)/shims:$PATH ``` -4) Pull in required Python version 3.7.x +4) Pull in required Python version 3.9.x ```sh # install Python build dependences brew install openssl readline sqlite3 xz zlib -# replace with up-to-date 3.7.x version -pyenv install 3.7.9 +# replace with up-to-date 3.9.x version +pyenv install 3.9.6 ``` 5) Set local Python version ```sh # switch to OpenPype source directory -pyenv local 3.7.9 +pyenv local 3.9.6 ``` #### To build OpenPype: @@ -145,7 +145,7 @@ sudo ./tools/docker_build.sh centos7 If all is successful, you'll find built OpenPype in `./build/` folder. #### Manual build -You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. +You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. To build Python related stuff, you need Python header files installed (`python3-dev` on Ubuntu for example). @@ -222,14 +222,14 @@ eval "$(pyenv virtualenv-init -)" # reload shell exec $SHELL -# install Python 3.7.9 -pyenv install -v 3.7.9 +# install Python 3.9.x +pyenv install -v 3.9.6 # change path to OpenPype 3 cd /path/to/openpype-3 # set local python version -pyenv local 3.7.9 +pyenv local 3.9.6 ``` @@ -337,4 +337,4 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d -This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! \ No newline at end of file +This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! From 7accb372cd65df831233d379b861a3da84e7f0d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 14:00:11 +0200 Subject: [PATCH 008/480] remove tray publisher from experimental tools --- openpype/modules/traypublish_action.py | 15 ++------------- openpype/tools/experimental_tools/tools_def.py | 5 ----- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/openpype/modules/traypublish_action.py b/openpype/modules/traypublish_action.py index 39163b8eb8..1038bac47b 100644 --- a/openpype/modules/traypublish_action.py +++ b/openpype/modules/traypublish_action.py @@ -2,7 +2,7 @@ import os from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules.interfaces import ITrayAction class TrayPublishAction(OpenPypeModule, ITrayAction): @@ -21,20 +21,9 @@ class TrayPublishAction(OpenPypeModule, ITrayAction): "publish" ) ] - self._experimental_tools = None def tray_init(self): - from openpype.tools.experimental_tools import ExperimentalTools - - self._experimental_tools = ExperimentalTools() - - def tray_menu(self, *args, **kwargs): - super(TrayPublishAction, self).tray_menu(*args, **kwargs) - traypublisher = self._experimental_tools.get("traypublisher") - visible = False - if traypublisher and traypublisher.enabled: - visible = True - self._action_item.setVisible(visible) + return def on_action_trigger(self): self.run_traypublisher() diff --git a/openpype/tools/experimental_tools/tools_def.py b/openpype/tools/experimental_tools/tools_def.py index fa2971dc1d..38a49e8ab5 100644 --- a/openpype/tools/experimental_tools/tools_def.py +++ b/openpype/tools/experimental_tools/tools_def.py @@ -89,11 +89,6 @@ class ExperimentalTools: "New publisher", "Combined creation and publishing into one tool.", self._show_publisher - ), - ExperimentalTool( - "traypublisher", - "New Standalone Publisher", - "Standalone publisher using new publisher. Requires restart" ) ] From 8e6dcbc048be40e038e3406fd3b3ae22e1631c9a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 14:00:20 +0200 Subject: [PATCH 009/480] change label of tray publisher action --- openpype/modules/traypublish_action.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/traypublish_action.py b/openpype/modules/traypublish_action.py index 1038bac47b..29aea07210 100644 --- a/openpype/modules/traypublish_action.py +++ b/openpype/modules/traypublish_action.py @@ -6,7 +6,7 @@ from openpype.modules.interfaces import ITrayAction class TrayPublishAction(OpenPypeModule, ITrayAction): - label = "New Publish (beta)" + label = "Tray Publish" name = "traypublish_tool" def initialize(self, modules_settings): From d3db2bbea538981a3f711c12cc722cccd8bdc147 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 29 Aug 2022 13:58:01 +0200 Subject: [PATCH 010/480] :bug: fixed poetry lock --- poetry.lock | 728 +++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 523 insertions(+), 205 deletions(-) diff --git a/poetry.lock b/poetry.lock index c68ffdf1ea..1af92e4de2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -112,17 +112,16 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.11.7" +version = "2.12.5" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<2" +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" @@ -179,15 +178,12 @@ pytz = ">=2015.7" [[package]] name = "bcrypt" -version = "3.2.2" +version = "4.0.0" description = "Modern password hashing for your software and your servers" category = "main" optional = false python-versions = ">=3.6" -[package.dependencies] -cffi = ">=1.1" - [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] @@ -293,7 +289,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.4.3" +version = "6.4.4" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -387,7 +383,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "docutils" -version = "0.17.1" +version = "0.18.1" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false @@ -548,7 +544,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "2.10.0" +version = "2.11.0" description = "Google Authentication Library" category = "main" optional = false @@ -681,8 +677,8 @@ optional = false python-versions = ">=3.7" [package.extras] -trio = ["async-generator", "trio"] -test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"] +test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] +trio = ["trio", "async-generator"] [[package]] name = "jinja2" @@ -759,11 +755,11 @@ pymongo = "*" [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" [[package]] name = "mccabe" @@ -996,22 +992,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pylint" -version = "2.14.5" +version = "2.15.0" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.11.6,<=2.12.0-dev0" +astroid = ">=2.12.4,<=2.14.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" @@ -1251,7 +1250,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2022.2" +version = "2022.2.1" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -1345,7 +1344,7 @@ pyasn1 = ">=0.1.3" [[package]] name = "secretstorage" -version = "3.3.2" +version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" category = "main" optional = false @@ -1424,7 +1423,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "5.0.2" +version = "5.0.1" description = "Python documentation generator" category = "dev" optional = false @@ -1467,21 +1466,20 @@ docutils = "*" sphinx = "*" [package.extras] -test = ["pytest-cov", "pytest (>=3.0.0)"] -lint = ["pylint", "flake8", "black"] dev = ["pre-commit"] +lint = ["black", "flake8", "pylint"] +test = ["pytest (>=3.0.0)", "pytest-cov"] [[package]] name = "sphinx-rtd-theme" -version = "1.0.0" +version = "0.5.1" description = "Read the Docs theme for Sphinx" category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = "*" [package.dependencies] -docutils = "<0.18" -sphinx = ">=1.6" +sphinx = "*" [package.extras] dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] @@ -1610,7 +1608,7 @@ python-versions = ">=3.7" [[package]] name = "tomlkit" -version = "0.11.1" +version = "0.11.4" description = "Style preserving TOML library" category = "dev" optional = false @@ -1620,7 +1618,7 @@ python-versions = ">=3.6,<4.0" name = "typing-extensions" version = "4.3.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" @@ -1634,7 +1632,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.11" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1642,7 +1640,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1716,7 +1714,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "fc91ecccdb9048af4335a1dc4ea7837ff5ef80a53f2417323bc295c7e4505438" +content-hash = "878a639bb6dad7820350e47d09f31fe35758dd5c7161369bc8f1f929aefa92d3" [metadata.files] acre = [] @@ -1798,7 +1796,10 @@ aiohttp-json-rpc = [ {file = "aiohttp-json-rpc-0.13.3.tar.gz", hash = "sha256:6237a104478c22c6ef96c7227a01d6832597b414e4b79a52d85593356a169e99"}, {file = "aiohttp_json_rpc-0.13.3-py3-none-any.whl", hash = "sha256:4fbd197aced61bd2df7ae3237ead7d3e08833c2ccf48b8581e1828c95ebee680"}, ] -aiohttp-middlewares = [] +aiohttp-middlewares = [ + {file = "aiohttp-middlewares-2.1.0.tar.gz", hash = "sha256:5863970d944dc63faedc96ef324a7fe2bcefefebe29acc90cd641236322d00c3"}, + {file = "aiohttp_middlewares-2.1.0-py3-none-any.whl", hash = "sha256:c83d48702e6a8669981976f39a60e83d059dc01d7b1ee651aec5d4cb807ff784"}, +] aiosignal = [ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, @@ -1817,8 +1818,8 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"}, - {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"}, + {file = "astroid-2.12.5-py3-none-any.whl", hash = "sha256:d612609242996c4365aeb0345e61edba34363eaaba55f1c0addf6a98f073bef6"}, + {file = "astroid-2.12.5.tar.gz", hash = "sha256:396c88d0a58d7f8daadf730b2ce90838bf338c6752558db719ec6f99c18ec20e"}, ] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, @@ -1828,8 +1829,8 @@ atomicwrites = [ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] autopep8 = [ {file = "autopep8-1.5.7-py2.py3-none-any.whl", hash = "sha256:aa213493c30dcdac99537249ee65b24af0b2c29f2e83cd8b3f68760441ed0db9"}, @@ -1840,17 +1841,18 @@ babel = [ {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, ] bcrypt = [ - {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, - {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, - {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, - {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, + {file = "bcrypt-4.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:845b1daf4df2dd94d2fdbc9454953ca9dd0e12970a0bfc9f3dcc6faea3fa96e4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8780e69f9deec9d60f947b169507d2c9816e4f11548f1f7ebee2af38b9b22ae4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3334446fac200499e8bc04a530ce3cf0b3d7151e0e4ac5c0dddd3d95e97843"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb67f6a6c72dfb0a02f3df51550aa1862708e55128b22543e2b42c74f3620d7"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:7c7dd6c1f05bf89e65261d97ac3a6520f34c2acb369afb57e3ea4449be6ff8fd"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:594780b364fb45f2634c46ec8d3e61c1c0f1811c4f2da60e8eb15594ecbf93ed"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2d0dd19aad87e4ab882ef1d12df505f4c52b28b69666ce83c528f42c07379227"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bf413f2a9b0a2950fc750998899013f2e718d20fa4a58b85ca50b6df5ed1bbf9"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ede0f506554571c8eda80db22b83c139303ec6b595b8f60c4c8157bdd0bdee36"}, + {file = "bcrypt-4.0.0-cp36-abi3-win32.whl", hash = "sha256:dc6ec3dc19b1c193b2f7cf279d3e32e7caf447532fbcb7af0906fe4398900c33"}, + {file = "bcrypt-4.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:0b0f0c7141622a31e9734b7f649451147c04ebb5122327ac0bd23744df84be90"}, + {file = "bcrypt-4.0.0.tar.gz", hash = "sha256:c59c170fc9225faad04dde1ba61d85b413946e8ce2e5f5f5ff30dfd67283f319"}, ] blessed = [ {file = "blessed-1.19.1-py2.py3-none-any.whl", hash = "sha256:63b8554ae2e0e7f43749b6715c734cc8f3883010a809bf16790102563e6cf25b"}, @@ -1955,47 +1957,56 @@ coolname = [ {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] coverage = [ - {file = "coverage-6.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a9032f9b7d38bdf882ac9f66ebde3afb8145f0d4c24b2e600bc4c6304aafb87e"}, - {file = "coverage-6.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e0524adb49c716ca763dbc1d27bedce36b14f33e6b8af6dba56886476b42957c"}, - {file = "coverage-6.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4548be38a1c810d79e097a38107b6bf2ff42151900e47d49635be69943763d8"}, - {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23876b018dfa5d3e98e96f5644b109090f16a4acb22064e0f06933663005d39"}, - {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fe75dcfcb889b6800f072f2af5a331342d63d0c1b3d2bf0f7b4f6c353e8c9c0"}, - {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2f8553878a24b00d5ab04b7a92a2af50409247ca5c4b7a2bf4eabe94ed20d3ee"}, - {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d774d9e97007b018a651eadc1b3970ed20237395527e22cbeb743d8e73e0563d"}, - {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d56f105592188ce7a797b2bd94b4a8cb2e36d5d9b0d8a1d2060ff2a71e6b9bbc"}, - {file = "coverage-6.4.2-cp310-cp310-win32.whl", hash = "sha256:d230d333b0be8042ac34808ad722eabba30036232e7a6fb3e317c49f61c93386"}, - {file = "coverage-6.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:5ef42e1db047ca42827a85e34abe973971c635f83aed49611b7f3ab49d0130f0"}, - {file = "coverage-6.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:25b7ec944f114f70803d6529394b64f8749e93cbfac0fe6c5ea1b7e6c14e8a46"}, - {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb00521ab4f99fdce2d5c05a91bddc0280f0afaee0e0a00425e28e209d4af07"}, - {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dff52b3e7f76ada36f82124703f4953186d9029d00d6287f17c68a75e2e6039"}, - {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:147605e1702d996279bb3cc3b164f408698850011210d133a2cb96a73a2f7996"}, - {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:422fa44070b42fef9fb8dabd5af03861708cdd6deb69463adc2130b7bf81332f"}, - {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8af6c26ba8df6338e57bedbf916d76bdae6308e57fc8f14397f03b5da8622b4e"}, - {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5336e0352c0b12c7e72727d50ff02557005f79a0b8dcad9219c7c4940a930083"}, - {file = "coverage-6.4.2-cp37-cp37m-win32.whl", hash = "sha256:0f211df2cba951ffcae210ee00e54921ab42e2b64e0bf2c0befc977377fb09b7"}, - {file = "coverage-6.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a13772c19619118903d65a91f1d5fea84be494d12fd406d06c849b00d31bf120"}, - {file = "coverage-6.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7bd0ffbcd03dc39490a1f40b2669cc414fae0c4e16b77bb26806a4d0b7d1452"}, - {file = "coverage-6.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0895ea6e6f7f9939166cc835df8fa4599e2d9b759b02d1521b574e13b859ac32"}, - {file = "coverage-6.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e7ced84a11c10160c0697a6cc0b214a5d7ab21dfec1cd46e89fbf77cc66fae"}, - {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80db4a47a199c4563d4a25919ff29c97c87569130375beca3483b41ad5f698e8"}, - {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3def6791adf580d66f025223078dc84c64696a26f174131059ce8e91452584e1"}, - {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f89d8e03c8a3757aae65570d14033e8edf192ee9298303db15955cadcff0c63"}, - {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d0b48aff8e9720bdec315d67723f0babd936a7211dc5df453ddf76f89c59933"}, - {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b20286c2b726f94e766e86a3fddb7b7e37af5d0c635bdfa7e4399bc523563de"}, - {file = "coverage-6.4.2-cp38-cp38-win32.whl", hash = "sha256:d714af0bdba67739598849c9f18efdcc5a0412f4993914a0ec5ce0f1e864d783"}, - {file = "coverage-6.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:5f65e5d3ff2d895dab76b1faca4586b970a99b5d4b24e9aafffc0ce94a6022d6"}, - {file = "coverage-6.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a697977157adc052284a7160569b36a8bbec09db3c3220642e6323b47cec090f"}, - {file = "coverage-6.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c77943ef768276b61c96a3eb854eba55633c7a3fddf0a79f82805f232326d33f"}, - {file = "coverage-6.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54d8d0e073a7f238f0666d3c7c0d37469b2aa43311e4024c925ee14f5d5a1cbe"}, - {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f22325010d8824594820d6ce84fa830838f581a7fd86a9235f0d2ed6deb61e29"}, - {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b04d305ea172ccb21bee5bacd559383cba2c6fcdef85b7701cf2de4188aa55"}, - {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:866ebf42b4c5dbafd64455b0a1cd5aa7b4837a894809413b930026c91e18090b"}, - {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e36750fbbc422c1c46c9d13b937ab437138b998fe74a635ec88989afb57a3978"}, - {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:79419370d6a637cb18553ecb25228893966bd7935a9120fa454e7076f13b627c"}, - {file = "coverage-6.4.2-cp39-cp39-win32.whl", hash = "sha256:b5e28db9199dd3833cc8a07fa6cf429a01227b5d429facb56eccd765050c26cd"}, - {file = "coverage-6.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:edfdabe7aa4f97ed2b9dd5dde52d2bb29cb466993bb9d612ddd10d0085a683cf"}, - {file = "coverage-6.4.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:e2618cb2cf5a7cc8d698306e42ebcacd02fb7ef8cfc18485c59394152c70be97"}, - {file = "coverage-6.4.2.tar.gz", hash = "sha256:6c3ccfe89c36f3e5b9837b9ee507472310164f352c9fe332120b764c9d60adbe"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, + {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, + {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, + {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, + {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, + {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, + {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, + {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, + {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, + {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, + {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, + {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, + {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, + {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, + {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, ] cryptography = [ {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, @@ -2049,17 +2060,27 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] -deprecated = [] -dill = [] +deprecated = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] +dill = [ + {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, + {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, +] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] docutils = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, + {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, + {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, +] +dropbox = [ + {file = "dropbox-11.33.0-py2-none-any.whl", hash = "sha256:3ee9024631b80f18938556d5e27cbdede26d6dc0b73aeaa90fc075ce96c950b1"}, + {file = "dropbox-11.33.0-py3-none-any.whl", hash = "sha256:1a0cbc22b0d1dae96e18b37e3520e5c289de7eb1303935db40e4dbfc9bb9e59b"}, + {file = "dropbox-11.33.0.tar.gz", hash = "sha256:7c638b521169a460de38b9eaeb204fe918874f72d6c3eed005d064b6f37da9c1"}, ] -dropbox = [] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, @@ -2071,12 +2092,77 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -frozenlist = [] -ftrack-python-api = [] +frozenlist = [ + {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5f271c93f001748fc26ddea409241312a75e13466b06c94798d1a341cf0e6989"}, + {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c6ef8014b842f01f5d2b55315f1af5cbfde284eb184075c189fd657c2fd8204"}, + {file = "frozenlist-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:219a9676e2eae91cb5cc695a78b4cb43d8123e4160441d2b6ce8d2c70c60e2f3"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b47d64cdd973aede3dd71a9364742c542587db214e63b7529fbb487ed67cddd9"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2af6f7a4e93f5d08ee3f9152bce41a6015b5cf87546cb63872cc19b45476e98a"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a718b427ff781c4f4e975525edb092ee2cdef6a9e7bc49e15063b088961806f8"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c56c299602c70bc1bb5d1e75f7d8c007ca40c9d7aebaf6e4ba52925d88ef826d"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717470bfafbb9d9be624da7780c4296aa7935294bd43a075139c3d55659038ca"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:31b44f1feb3630146cffe56344704b730c33e042ffc78d21f2125a6a91168131"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c3b31180b82c519b8926e629bf9f19952c743e089c41380ddca5db556817b221"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d82bed73544e91fb081ab93e3725e45dd8515c675c0e9926b4e1f420a93a6ab9"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49459f193324fbd6413e8e03bd65789e5198a9fa3095e03f3620dee2f2dabff2"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:94e680aeedc7fd3b892b6fa8395b7b7cc4b344046c065ed4e7a1e390084e8cb5"}, + {file = "frozenlist-1.3.1-cp310-cp310-win32.whl", hash = "sha256:fabb953ab913dadc1ff9dcc3a7a7d3dc6a92efab3a0373989b8063347f8705be"}, + {file = "frozenlist-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:eee0c5ecb58296580fc495ac99b003f64f82a74f9576a244d04978a7e97166db"}, + {file = "frozenlist-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bc75692fb3770cf2b5856a6c2c9de967ca744863c5e89595df64e252e4b3944"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086ca1ac0a40e722d6833d4ce74f5bf1aba2c77cbfdc0cd83722ffea6da52a04"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b51eb355e7f813bcda00276b0114c4172872dc5fb30e3fea059b9367c18fbcb"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74140933d45271c1a1283f708c35187f94e1256079b3c43f0c2267f9db5845ff"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee4c5120ddf7d4dd1eaf079af3af7102b56d919fa13ad55600a4e0ebe532779b"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d9e00f3ac7c18e685320601f91468ec06c58acc185d18bb8e511f196c8d4b2"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e19add867cebfb249b4e7beac382d33215d6d54476bb6be46b01f8cafb4878b"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a027f8f723d07c3f21963caa7d585dcc9b089335565dabe9c814b5f70c52705a"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:61d7857950a3139bce035ad0b0945f839532987dfb4c06cfe160254f4d19df03"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:53b2b45052e7149ee8b96067793db8ecc1ae1111f2f96fe1f88ea5ad5fd92d10"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bbb1a71b1784e68870800b1bc9f3313918edc63dbb8f29fbd2e767ce5821696c"}, + {file = "frozenlist-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:ab6fa8c7871877810e1b4e9392c187a60611fbf0226a9e0b11b7b92f5ac72792"}, + {file = "frozenlist-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89139662cc4e65a4813f4babb9ca9544e42bddb823d2ec434e18dad582543bc"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4c0c99e31491a1d92cde8648f2e7ccad0e9abb181f6ac3ddb9fc48b63301808e"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:61e8cb51fba9f1f33887e22488bad1e28dd8325b72425f04517a4d285a04c519"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc2f3e368ee5242a2cbe28323a866656006382872c40869b49b265add546703f"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58fb94a01414cddcdc6839807db77ae8057d02ddafc94a42faee6004e46c9ba8"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:022178b277cb9277d7d3b3f2762d294f15e85cd2534047e68a118c2bb0058f3e"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:572ce381e9fe027ad5e055f143763637dcbac2542cfe27f1d688846baeef5170"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19127f8dcbc157ccb14c30e6f00392f372ddb64a6ffa7106b26ff2196477ee9f"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42719a8bd3792744c9b523674b752091a7962d0d2d117f0b417a3eba97d1164b"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2743bb63095ef306041c8f8ea22bd6e4d91adabf41887b1ad7886c4c1eb43d5f"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fa47319a10e0a076709644a0efbcaab9e91902c8bd8ef74c6adb19d320f69b83"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52137f0aea43e1993264a5180c467a08a3e372ca9d378244c2d86133f948b26b"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:f5abc8b4d0c5b556ed8cd41490b606fe99293175a82b98e652c3f2711b452988"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1e1cf7bc8cbbe6ce3881863671bac258b7d6bfc3706c600008925fb799a256e2"}, + {file = "frozenlist-1.3.1-cp38-cp38-win32.whl", hash = "sha256:0dde791b9b97f189874d654c55c24bf7b6782343e14909c84beebd28b7217845"}, + {file = "frozenlist-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:9494122bf39da6422b0972c4579e248867b6b1b50c9b05df7e04a3f30b9a413d"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31bf9539284f39ff9398deabf5561c2b0da5bb475590b4e13dd8b268d7a3c5c1"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0c8c803f2f8db7217898d11657cb6042b9b0553a997c4a0601f48a691480fab"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da5ba7b59d954f1f214d352308d1d86994d713b13edd4b24a556bcc43d2ddbc3"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e6b2b456f21fc93ce1aff2b9728049f1464428ee2c9752a4b4f61e98c4db96"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526d5f20e954d103b1d47232e3839f3453c02077b74203e43407b962ab131e7b"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b499c6abe62a7a8d023e2c4b2834fce78a6115856ae95522f2f974139814538c"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab386503f53bbbc64d1ad4b6865bf001414930841a870fc97f1546d4d133f141"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f63c308f82a7954bf8263a6e6de0adc67c48a8b484fab18ff87f349af356efd"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:12607804084d2244a7bd4685c9d0dca5df17a6a926d4f1967aa7978b1028f89f"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:da1cdfa96425cbe51f8afa43e392366ed0b36ce398f08b60de6b97e3ed4affef"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f810e764617b0748b49a731ffaa525d9bb36ff38332411704c2400125af859a6"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:35c3d79b81908579beb1fb4e7fcd802b7b4921f1b66055af2578ff7734711cfa"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c92deb5d9acce226a501b77307b3b60b264ca21862bd7d3e0c1f3594022f01bc"}, + {file = "frozenlist-1.3.1-cp39-cp39-win32.whl", hash = "sha256:5e77a8bd41e54b05e4fb2708dc6ce28ee70325f8c6f50f3df86a44ecb1d7a19b"}, + {file = "frozenlist-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:625d8472c67f2d96f9a4302a947f92a7adbc1e20bedb6aff8dbc8ff039ca6189"}, + {file = "frozenlist-1.3.1.tar.gz", hash = "sha256:3a735e4211a04ccfa3f4833547acdf5d2f863bfeb01cfd3edaffbc251f15cec8"}, +] +ftrack-python-api = [ + {file = "ftrack-python-api-2.3.3.tar.gz", hash = "sha256:358f37e5b1c5635eab107c19e27a0c890d512877f78af35b1ac416e90c037295"}, + {file = "ftrack_python_api-2.3.3-py2.py3-none-any.whl", hash = "sha256:82834c4d5def5557a2ea547a7e6f6ba84d3129e8f90457d8bbd85b287a2c39f6"}, +] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -gazu = [] +gazu = [ + {file = "gazu-0.8.30-py2.py3-none-any.whl", hash = "sha256:d692927a11314151bc33e7d67edee634053f70a3b09e4500dfc6626bfea18753"}, +] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, @@ -2094,14 +2180,17 @@ google-api-python-client = [ {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] google-auth = [ - {file = "google-auth-2.9.1.tar.gz", hash = "sha256:14292fa3429f2bb1e99862554cde1ee730d6840ebae067814d3d15d8549c0888"}, - {file = "google_auth-2.9.1-py2.py3-none-any.whl", hash = "sha256:5a7eed0cb0e3a83989fad0b59fe1329dfc8c479543039cd6fd1e01e9adf39475"}, + {file = "google-auth-2.11.0.tar.gz", hash = "sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb"}, + {file = "google_auth-2.11.0-py2.py3-none-any.whl", hash = "sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -googleapis-common-protos = [] +googleapis-common-protos = [ + {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, + {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, +] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, @@ -2118,9 +2207,6 @@ importlib-metadata = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] -idna = [] -imagesize = [] -importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -2133,12 +2219,18 @@ jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] -jeepney = [] +jeepney = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] -jinxed = [] +jinxed = [ + {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, + {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, +] jsonschema = [ {file = "jsonschema-2.6.0-py2.py3-none-any.whl", hash = "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08"}, {file = "jsonschema-2.6.0.tar.gz", hash = "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"}, @@ -2190,12 +2282,28 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2204,14 +2312,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2221,6 +2342,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2295,7 +2422,10 @@ packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -paramiko = [] +paramiko = [ + {file = "paramiko-2.11.0-py2.py3-none-any.whl", hash = "sha256:655f25dc8baf763277b933dfcea101d636581df8d6b9774d1fb653426b72c270"}, + {file = "paramiko-2.11.0.tar.gz", hash = "sha256:003e6bee7c034c21fbb051bf83dc0a9ee4106204dd3c53054c71452cc4ec3938"}, +] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, @@ -2304,8 +2434,70 @@ pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] -pillow = [] -platformdirs = [] +pillow = [ + {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, + {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, + {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, + {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, + {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, + {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, + {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, + {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, + {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, + {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, + {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, + {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, + {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, + {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, + {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, + {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, + {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, + {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -2318,7 +2510,22 @@ prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] -protobuf = [] +protobuf = [ + {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, + {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, + {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, + {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, + {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, + {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, + {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, + {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, + {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, + {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, + {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, + {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2377,8 +2584,14 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pygments = [] -pylint = [] +pygments = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] +pylint = [ + {file = "pylint-2.15.0-py3-none-any.whl", hash = "sha256:4b124affc198b7f7c9b5f9ab690d85db48282a025ef9333f51d2d7281b92a6c3"}, + {file = "pylint-2.15.0.tar.gz", hash = "sha256:4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0"}, +] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, @@ -2505,10 +2718,42 @@ pynput = [ {file = "pynput-1.7.6-py3.9.egg", hash = "sha256:264429fbe676e98e9050ad26a7017453bdd08768adb25cafb918347cf9f1eb4a"}, {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] -pyobjc-core = [] -pyobjc-framework-applicationservices = [] -pyobjc-framework-cocoa = [] -pyobjc-framework-quartz = [] +pyobjc-core = [ + {file = "pyobjc-core-8.5.tar.gz", hash = "sha256:704c275439856c0d1287469f0d589a7d808d48b754a93d9ce5415d4eaf06d576"}, + {file = "pyobjc_core-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0c234143b48334443f5adcf26e668945a6d47bc1fa6223e80918c6c735a029d9"}, + {file = "pyobjc_core-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1486ee533f0d76f666804ce89723ada4db56bfde55e56151ba512d3f849857f8"}, + {file = "pyobjc_core-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:412de06dfa728301c04b3e46fd7453320a8ae8b862e85236e547cd797a73b490"}, + {file = "pyobjc_core-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b3e09cccb1be574a82cc9f929ae27fc4283eccc75496cb5d51534caa6bb83a3"}, + {file = "pyobjc_core-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:eeafe21f879666ab7f57efcc6b007c9f5f8733d367b7e380c925203ed83f000d"}, + {file = "pyobjc_core-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0071686976d7ea8c14690950e504a13cb22b4ebb2bc7b5ec47c1c1c0f6eff41"}, +] +pyobjc-framework-applicationservices = [ + {file = "pyobjc-framework-ApplicationServices-8.5.tar.gz", hash = "sha256:fa3015ef8e3add90af3447d7fdcc7f8dd083cc2a1d58f99a569480a2df10d2b1"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:436b16ebe448a829a8312e10208eec81a2adcae1fff674dbcc3262e1bd76e0ca"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:408958d14aa7fcf46f2163754c211078bc63be1368934d86188202914dce077d"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d6cd4ce192859a22e208da4d7177a1c3ceb1ef2f64c339fd881102b1210cadd"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0251d092adb1d2d116fd9f147ceef0e53b158a46c21245131c40b9d7b786d0db"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:9742e69fe6d4545d0e02b0ad0a7a2432bc9944569ee07d6e90ffa5ef614df9f7"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16f5677c14ea903c6aaca1dd121521825c39e816cae696d6ae32c0b287252ab2"}, +] +pyobjc-framework-cocoa = [ + {file = "pyobjc-framework-Cocoa-8.5.tar.gz", hash = "sha256:569bd3a020f64b536fb2d1c085b37553e50558c9f907e08b73ffc16ae68e1861"}, + {file = "pyobjc_framework_Cocoa-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a7c160416696bf6035dfcdf0e603aaa52858d6afcddfcc5ab41733619ac2529"}, + {file = "pyobjc_framework_Cocoa-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ceba444282030be8596b812260e8d28b671254a51052ad778d32da6e17db847"}, + {file = "pyobjc_framework_Cocoa-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f46b2b161b8dd40c7b9e00bc69636c3e6480b2704a69aee22ee0154befbe163a"}, + {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b31d425aee8698cbf62b187338f5ca59427fa4dca2153a73866f7cb410713119"}, + {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:898359ac1f76eedec8aa156847682378a8950824421c40edb89391286e607dc4"}, + {file = "pyobjc_framework_Cocoa-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:baa2947f76b119a3360973d74d57d6dada87ac527bab9a88f31596af392f123c"}, +] +pyobjc-framework-quartz = [ + {file = "pyobjc-framework-Quartz-8.5.tar.gz", hash = "sha256:d2bc5467a792ddc04814f12a1e9c2fcaf699a1c3ad3d4264cfdce6b9c7b10624"}, + {file = "pyobjc_framework_Quartz-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9f0fb663f7872c9de94169031ac42b91ad01bd4cad49a9f1a0164be8f028426"}, + {file = "pyobjc_framework_Quartz-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:567eec91287cfe9a1b6433717192c585935de8f3daa28d82ce72fdd6c7ac00f6"}, + {file = "pyobjc_framework_Quartz-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f910ab41a712ffc7a8c3e3716a2d6f39ea4419004b26a2fd2d2f740ff5c262c"}, + {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29d07066781628278bf0e5278abcfc96ef6724c66c5629a0b4c214d319a82e55"}, + {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:72abcde1a3d72be11f2c881c9b9872044c8f2de86d2047b67fe771713638b107"}, + {file = "pyobjc_framework_Quartz-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8809b9a2df2f461697bdb45b6d1b5a4f881f88f09450e3990858e64e3e26c530"}, +] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2532,8 +2777,14 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-engineio = [] -python-socketio = [] +python-engineio = [ + {file = "python-engineio-3.14.2.tar.gz", hash = "sha256:eab4553f2804c1ce97054c8b22cf0d5a9ab23128075248b97e1a5b2f29553085"}, + {file = "python_engineio-3.14.2-py2.py3-none-any.whl", hash = "sha256:5a9e6086d192463b04a1428ff1f85b6ba631bbb19d453b144ffc04f530542b84"}, +] +python-socketio = [ + {file = "python-socketio-4.6.1.tar.gz", hash = "sha256:cd1f5aa492c1eb2be77838e837a495f117e17f686029ebc03d62c09e33f4fa10"}, + {file = "python_socketio-4.6.1-py2.py3-none-any.whl", hash = "sha256:5a21da53fdbdc6bb6c8071f40e13d100e0b279ad997681c2492478e06f370523"}, +] python-xlib = [ {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, @@ -2541,7 +2792,10 @@ python-xlib = [ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] -pytz = [] +pytz = [ + {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, + {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, +] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, @@ -2558,7 +2812,10 @@ pywin32-ctypes = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] -"qt.py" = [] +"qt.py" = [ + {file = "Qt.py-1.3.7-py2.py3-none-any.whl", hash = "sha256:150099d1c6f64c9621a2c9d79d45102ec781c30ee30ee69fc082c6e9be7324fe"}, + {file = "Qt.py-1.3.7.tar.gz", hash = "sha256:803c7bdf4d6230f9a466be19d55934a173eabb61406d21cb91e80c2a3f773b1f"}, +] qtawesome = [ {file = "QtAwesome-0.7.3-py2.py3-none-any.whl", hash = "sha256:ddf4530b4af71cec13b24b88a4cdb56ec85b1e44c43c42d0698804c7137b09b0"}, {file = "QtAwesome-0.7.3.tar.gz", hash = "sha256:b98b9038d19190e83ab26d91c4d8fc3a36591ee2bc7f5016d4438b8240d097bd"}, @@ -2571,9 +2828,18 @@ recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] -requests = [] -rsa = [] -secretstorage = [] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] +secretstorage = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, @@ -2584,8 +2850,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] slack-sdk = [ - {file = "slack_sdk-3.18.0-py2.py3-none-any.whl", hash = "sha256:1bca5f1c006aef174b0e4ccc38cb8f4b1c9d595150d64dcb764f65c443e105cf"}, - {file = "slack_sdk-3.18.0.tar.gz", hash = "sha256:429b77c123993d2aaded2917909f4a40bb2079c1d5d6d0f86c4bb2b40900433b"}, + {file = "slack_sdk-3.18.1-py2.py3-none-any.whl", hash = "sha256:63ce5e6253a31873d6c921c9feaa842a93a2f56e6e009cb7daf406f4bc4df798"}, + {file = "slack_sdk-3.18.1.tar.gz", hash = "sha256:a25d3d2bf0bf605d54d764d4a463fe7c0659ee24c13d75653e2bec247bd5998b"}, ] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, @@ -2600,16 +2866,16 @@ speedcopy = [ {file = "speedcopy-2.1.4.tar.gz", hash = "sha256:eff007a97e49ec1934df4fa8074f4bd1cf4a3b14c5499d914988785cff0c199a"}, ] sphinx = [ - {file = "Sphinx-5.0.2-py3-none-any.whl", hash = "sha256:d3e57663eed1d7c5c50895d191fdeda0b54ded6f44d5621b50709466c338d1e8"}, - {file = "Sphinx-5.0.2.tar.gz", hash = "sha256:b18e978ea7565720f26019c702cd85c84376e948370f1cd43d60265010e1c7b0"}, + {file = "Sphinx-5.0.1-py3-none-any.whl", hash = "sha256:36aa2a3c2f6d5230be94585bc5d74badd5f9ed8f3388b8eedc1726fe45b1ad30"}, + {file = "Sphinx-5.0.1.tar.gz", hash = "sha256:f4da1187785a5bc7312cc271b0e867a93946c319d106363e102936a3d9857306"}, ] sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, {file = "sphinx_qt_documentation-0.4.tar.gz", hash = "sha256:f43ba17baa93e353fb94045027fb67f9d935ed158ce8662de93f08b88eec6774"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, - {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, + {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, + {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -2656,8 +2922,8 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tomlkit = [ - {file = "tomlkit-0.11.1-py3-none-any.whl", hash = "sha256:1c5bebdf19d5051e2e1de6cf70adfc5948d47221f097fcff7a3ffc91e953eaf5"}, - {file = "tomlkit-0.11.1.tar.gz", hash = "sha256:61901f81ff4017951119cd0d1ed9b7af31c821d6845c8c477587bbdcd5e5854e"}, + {file = "tomlkit-0.11.4-py3-none-any.whl", hash = "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c"}, + {file = "tomlkit-0.11.4.tar.gz", hash = "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83"}, ] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, @@ -2668,8 +2934,8 @@ uritemplate = [ {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.10-py2.py3-none-any.whl", hash = "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec"}, - {file = "urllib3-1.26.10.tar.gz", hash = "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6"}, + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2679,84 +2945,136 @@ websocket-client = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] -wrapt = [] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, {file = "wsrpc_aiohttp-3.2.0-py3-none-any.whl", hash = "sha256:fa9b0bf5cb056898cb5c9f64cbc5eacb8a5dd18ab1b7f0cd4a2208b4a7fde282"}, ] yarl = [ - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, - {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, - {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, - {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, - {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, - {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, - {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, - {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, - {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, - {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, - {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, - {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, - {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, - {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, + {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, + {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, + {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, + {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, + {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, + {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, + {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, + {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, + {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, + {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, ] zipp = [ {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, From a316a55ec106ace2718e08302d73f9c5041e594e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 12 Sep 2022 15:15:31 +0200 Subject: [PATCH 011/480] :heavy_plus_sign: update cx_freeze and change otio version --- poetry.lock | 245 ++++++++++++++++++++++++++++++++++--------------- pyproject.toml | 6 +- 2 files changed, 174 insertions(+), 77 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1af92e4de2..638dabdb7b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -121,7 +121,10 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] [[package]] name = "async-timeout" @@ -148,10 +151,10 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "autopep8" @@ -256,9 +259,9 @@ optional = false python-versions = ">=2.7, <4.0" [package.extras] -dev = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)", "pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] -doc = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)"] -test = ["pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] +dev = ["lowdown (>=0.2.0,<1)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] +doc = ["lowdown (>=0.2.0,<1)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] +test = ["pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)"] [[package]] name = "colorama" @@ -314,23 +317,31 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "cx-freeze" -version = "6.9" +version = "6.11.1" description = "Create standalone executables from Python scripts" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} -importlib-metadata = ">=4.3.1" +cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\" and python_version < \"3.10\""} +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +lief = {version = ">=0.11.5", markers = "sys_platform == \"win32\" and python_version <= \"3.10\""} +packaging = ">=21.0" +patchelf = {version = ">=0.12", markers = "sys_platform == \"linux\""} + +[package.extras] +dev = ["bump2version (>=1.0.1)", "cibuildwheel (==2.6.1)", "pre-commit (>=2.17.0)", "pylint (>=2.13.0)"] +doc = ["sphinx (>=5.0.1,<6.0.0)", "sphinx-rtd-theme (==1.0.0)"] +test = ["nose (==1.3.7)", "pygments (>=2.11.2)", "pytest (>=7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytest-timeout (>=1.4.2)"] [[package]] name = "cx-logging" @@ -352,7 +363,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] +dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "dill" @@ -374,8 +385,8 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -485,7 +496,7 @@ requests = ">=2.25.1,<=2.27.1" [package.extras] dev = ["wheel"] -test = ["pytest-cov (==2.12.1)", "requests-mock (==1.9.3)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "black (==21.12b0)", "pre-commit (==2.17.0)"] +test = ["black (==21.12b0)", "pre-commit (==2.17.0)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "pytest-cov (==2.12.1)", "requests-mock (==1.9.3)"] [[package]] name = "gitdb" @@ -557,7 +568,7 @@ rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" [package.extras] -aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -628,9 +639,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -649,10 +660,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" @@ -677,8 +688,8 @@ optional = false python-versions = ">=3.7" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] -trio = ["trio", "async-generator"] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async-generator", "trio"] [[package]] name = "jinja2" @@ -731,8 +742,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"] [[package]] name = "lazy-object-proxy" @@ -742,6 +753,14 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "lief" +version = "0.12.1" +description = "Library to instrument executable formats" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "log4mongo" version = "1.7.0" @@ -779,24 +798,19 @@ python-versions = ">=3.7" [[package]] name = "opentimelineio" -version = "0.14.0.dev1" +version = "0.14.1" description = "Editorial interchange format and API" category = "main" optional = false -python-versions = "*" +python-versions = ">2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, !=3.9.0" [package.dependencies] -pyaaf2 = "1.4.0" +pyaaf2 = ">=1.4.0,<1.5.0" [package.extras] -dev = ["check-manifest", "flake8 (>=3.5)", "coverage (>=4.5)", "urllib3 (>=1.24.3)"] +dev = ["check-manifest", "coverage (>=4.5)", "flake8 (>=3.5)", "urllib3 (>=1.24.3)"] view = ["PySide2 (>=5.11,<6.0)"] -[package.source] -type = "legacy" -url = "https://distribute.openpype.io/wheels" -reference = "openpype" - [[package]] name = "packaging" version = "21.3" @@ -823,9 +837,9 @@ pynacl = ">=1.0.1" six = "*" [package.extras] -all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] -ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] -gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +all = ["bcrypt (>=3.1.3)", "gssapi (>=1.4.1)", "invoke (>=1.3)", "pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["bcrypt (>=3.1.3)", "pynacl (>=1.0.1)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=1.3)"] [[package]] @@ -840,6 +854,17 @@ python-versions = ">=3.6" qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] +[[package]] +name = "patchelf" +version = "0.15.0.0" +description = "A small utility to modify the dynamic linker and RPATH of ELF executables." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +test = ["importlib-metadata", "pytest"] + [[package]] name = "pathlib2" version = "2.3.7.post1" @@ -872,8 +897,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -1036,7 +1061,7 @@ python-versions = "*" aws = ["pymongo-auth-aws (<2.0.0)"] encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] gssapi = ["pykerberos"] -ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] +ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] srv = ["dnspython (>=1.16.0,<1.17.0)"] tls = ["ipaddress"] @@ -1055,7 +1080,7 @@ cffi = ">=1.4.1" [package.extras] docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pynput" @@ -1169,7 +1194,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-print" @@ -1394,8 +1419,8 @@ optional = false python-versions = ">=3.6.0" [package.extras] -optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=10,<11)", "websocket-client (>=1,<2)"] -testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "itsdangerous (==1.1.0)", "Jinja2 (==3.0.3)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=4,<5)", "black (==22.3.0)", "click (==8.0.4)", "psutil (>=5,<6)", "databases (>=0.5)", "boto3 (<=2)", "moto (>=3,<4)"] +optional = ["SQLAlchemy (>=1,<2)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)"] +testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.3.0)", "boto3 (<=2)", "click (==8.0.4)", "codecov (>=2,<3)", "databases (>=0.5)", "flake8 (>=4,<5)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] [[package]] name = "smmap" @@ -1450,8 +1475,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "docutils-stubs", "types-typed-ast", "types-requests"] -test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed-ast"] [[package]] name = "sphinx-qt-documentation" @@ -1482,7 +1507,7 @@ python-versions = "*" sphinx = "*" [package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] [[package]] name = "sphinxcontrib-applehelp" @@ -1493,7 +1518,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1505,7 +1530,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1517,8 +1542,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" @@ -1529,7 +1554,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -1540,7 +1565,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1552,7 +1577,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1568,7 +1593,7 @@ sphinxcontrib-serializinghtml = "*" [package.extras] lint = ["flake8"] -test = ["pytest", "sqlalchemy", "whoosh", "sphinx"] +test = ["pytest", "sphinx", "sqlalchemy", "whoosh"] [[package]] name = "stone" @@ -1639,8 +1664,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1684,7 +1709,7 @@ yarl = "*" [package.extras] develop = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov", "sphinx", "sphinxcontrib-plantuml", "tox (>=2.4)"] -testing = ["async-timeout", "pytest", "pytest-aiohttp", "pytest-cov", "coverage (!=4.3)", "coveralls"] +testing = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov"] ujson = ["ujson"] [[package]] @@ -1708,13 +1733,13 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" -python-versions = "3.9.*" -content-hash = "878a639bb6dad7820350e47d09f31fe35758dd5c7161369bc8f1f929aefa92d3" +python-versions = "^3.9.1" +content-hash = "80cee7a98d806c905e8d220451a556c08e582d8994048bf1f619971d0eaa0c0d" [metadata.files] acre = [] @@ -2033,17 +2058,31 @@ cryptography = [ {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, ] cx-freeze = [ - {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, - {file = "cx_Freeze-6.9-cp310-cp310-win_amd64.whl", hash = "sha256:243f36d35a034a409cd6247d8cb5d1fbfd7374e3e668e813d0811f64d6bd5ed3"}, - {file = "cx_Freeze-6.9-cp36-cp36m-win32.whl", hash = "sha256:ffc855eabc735b693e2d604d71dce6d52d78a6ba1070c55d51e786dd68ed232c"}, - {file = "cx_Freeze-6.9-cp36-cp36m-win_amd64.whl", hash = "sha256:fe4e32a0c75b2b54491882926bf3ba12f8a3d589822a68a8be7c09f1dcca5546"}, - {file = "cx_Freeze-6.9-cp37-cp37m-win32.whl", hash = "sha256:99c292e7a31cb343efc0cf47f82220a44a4a3b8776651624cd8ee03c23104940"}, - {file = "cx_Freeze-6.9-cp37-cp37m-win_amd64.whl", hash = "sha256:738ab22f3a3f6bc220b16dccf2aa0603c3cd271b2a7a9d9480dab82311308b23"}, - {file = "cx_Freeze-6.9-cp38-cp38-win32.whl", hash = "sha256:c1c75df572858e623d0aa39771cd984c0abd8aacb43b2aca2d12d0bc95f25566"}, - {file = "cx_Freeze-6.9-cp38-cp38-win_amd64.whl", hash = "sha256:0788c895c47fdcf375151ce78ff42336c01aca7bc43daecb8f8f8356cdc42b43"}, - {file = "cx_Freeze-6.9-cp39-cp39-win32.whl", hash = "sha256:a31f5ddbc80b29e297370d868791470b0e3e9062db45038c23293a76ed039018"}, - {file = "cx_Freeze-6.9-cp39-cp39-win_amd64.whl", hash = "sha256:30708f603076713c0a839cdfb34f4126d68e9d61afb3d9a59daa9cf252033872"}, - {file = "cx_Freeze-6.9.tar.gz", hash = "sha256:673aa3199af2ef87fc03a43a30e5d78b27ced2cedde925da89c55b5657da267b"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e372b9e72ac0e2207ee65a9d404e2669da1134dc37f5ace9a2a779099d3aa868"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd293382e1ad270dddf5a2707db5dbb8600a1e0b0c9b0da7af9d61326eb1b325"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:feec2f36bce042da6a0d92690bc592b0dcec29218adc2278535cd13b28ec3485"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5aafcc6337856d5921b20f41acdcc8d0fe770388f3a072eb25163f8825f6c5d"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-win32.whl", hash = "sha256:b99cc0b6d6c1ba51bd9fe11dbfae9aabcf089ba779ea86d83d280e2e40f484e7"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:f0dfe6acf25eb096faba7d4b4b001bcd0f818e372ea1f05d900665b0ad82b0b9"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a68e70dcb27b0720b131a35c5fdd096012fe00119a8e51d935f3fb3cd251c39"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f7bde925042d8843af9b6242a1bf3865dbbae088f3183a89a575124ec2e14a4"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-win32.whl", hash = "sha256:7698fb82b6f84b3426774b5f3bee770601f26b612306319664a02f1ec5160861"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-win_amd64.whl", hash = "sha256:9848c975401b21a98aa896baabfed067c3e981afd5b5b0a8a5eabe5c9f23d3c5"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87dcf5ceb78dc6af910c45238128fda2394b7c430d3fa469e87e1efdeeb5d4cc"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb74d8cc1f8c658986acc19ea6875b985a979421f9bb9c310b43cd2ff5d90c44"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-win32.whl", hash = "sha256:971c0a8356ef0ee09a3097f9c9d5b52cde6d08d1ef80e997eb4a6e22fe0eff2f"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7c1cb44379b2093cbdde77e302a376f29aa61999c73be6e8a559463db84b85c4"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc90d6dbde66e8ddfe6b26f63fb2ea7d6d0e4568205f40660a63b8b200dcabcf"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f56f618a23d86bdcfff22b29ec993117effd32a401060013105517301c0bf32"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-win32.whl", hash = "sha256:4edfb5d65afb11eb9f0326d40d15445366481585705b3096f2cd090e30a36247"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfb5a8032bf424c04814c9426425fa1db4cf8c280da948969eead9f616c0fd92"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0a3e32980269cfabc2e814978bfdf4382fe3cbc9ac64f9f1bdb1cd2ddf3a40d0"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:798bb7ca037c3c885efd3eda6756c84c7927c712b730b22a7f256440faa36d38"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5aa1759098ca4853200a79138b626a9caa2ccf829d662b28c82ec7e71ea97cde"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7962680ae04ee3afda1012026b5394a534e2526b68681d591158b7d8bc733bcf"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-win32.whl", hash = "sha256:da4f82fe27e71571c0ab9d700b5e6c6c631ae39133d9b6d7157939f1e9f37312"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:aaf399b6ed5d54b7271980ae354605620bedcd52d722f57ad527bd989c56a875"}, + {file = "cx_Freeze-6.11.1.tar.gz", hash = "sha256:8f3a30c9e3394f290655e346d3b460910656b30ac6347a87499bb5ad365c6e7c"}, ] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, @@ -2278,6 +2317,32 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] +lief = [ + {file = "lief-0.12.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4fbbc9d520de87ac22210c62d22a9b088e5460f9a028741311e6f68ef8877ddd"}, + {file = "lief-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443e4494df448ea1a021976258c7a6aca27d81b0612783fa3a84fab196fb9fcb"}, + {file = "lief-0.12.1-cp310-cp310-win32.whl", hash = "sha256:1c4019dddf03a5185462fb5ea04327cee08d40f46777b02f0773c7dc294552ea"}, + {file = "lief-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d7e09968f99ddf1e3983d3bcc16c62d1b6635a345fee8d8139f82b31bad457d6"}, + {file = "lief-0.12.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9fa6269ec4fa3f874b807fbba3c48a46af30df2497723f6966080e3eb630cb26"}, + {file = "lief-0.12.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b05cac5fa491e01e1819573bbbbcaea0a4229f4aa3a2edb231b5695ddaf2d"}, + {file = "lief-0.12.1-cp36-cp36m-win32.whl", hash = "sha256:f1292bff96579c18e01e20b7a14043052379fe6e9a476c1d6d88aca43e5f9ac7"}, + {file = "lief-0.12.1-cp36-cp36m-win_amd64.whl", hash = "sha256:dab63876113bd573d64ce043f50153f6e2810e5e78256397aa0fe1fedf82ab84"}, + {file = "lief-0.12.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:5771f5226b62c885a7aa30c1b98040d39229a1dab889d03155e5538e57d0054b"}, + {file = "lief-0.12.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8ec307a762505076a6d31566225a231c44ec7063c0e7d751ac4654c674454c47"}, + {file = "lief-0.12.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a755f6088d3b2041e4402adf917ac87e5ad9d1c5278973f48a29a5631fe393eb"}, + {file = "lief-0.12.1-cp37-cp37m-win32.whl", hash = "sha256:5d746f7eb6d3bf35a0230c7184aaaf434cb1ea89d7e7c8e8fe14a49cf2bb17a0"}, + {file = "lief-0.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2d3ab7212da696bcbe5ca9dd78ceaa32dfb8a0e85e18001793b4441ef4624561"}, + {file = "lief-0.12.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:4360b0acd525ba77777cc38f0e5128c90c93cc4e91ab566ef3aa45b7f8a8c57e"}, + {file = "lief-0.12.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5e82e466d36cbabb28cc1a787b554d2feae5ab55c39cab58ef64fb6513bad92a"}, + {file = "lief-0.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa0022a3bf70ef46335639e61b946cc2d9cf012d60e263c215e3e64b1ce38b4"}, + {file = "lief-0.12.1-cp38-cp38-win32.whl", hash = "sha256:d29f91d9f64f67d3ada5b7e0e48ab084d825fb4601d32d9fecdd2bdf23cdad23"}, + {file = "lief-0.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:7dea6b3f17d362f93165379c46dadb012c73b1f751c8ceac256e5f43842cd86d"}, + {file = "lief-0.12.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:44012da4c32c670a97bb8a055a4ff16168cfaa757d03986f319aa3329a43e343"}, + {file = "lief-0.12.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e1d23997b0a71d34e766ff183be07854c6f698fd3d6aa44bf30b6b7f4f77ef55"}, + {file = "lief-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b845eca79c772041efb38b50cfaf951e24bc047ec462450b7e54e75b7e2bee0d"}, + {file = "lief-0.12.1-cp39-cp39-win32.whl", hash = "sha256:0df84ac2df20b14db12e69442d39b0e8cd89428ba3b131995e0570bcd3725460"}, + {file = "lief-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:960a2da9f28c8d5dba753bb9ab77e26b3c6ff9b9658918be95650ceb8ee91e68"}, + {file = "lief-0.12.1.zip", hash = "sha256:4ff4ccfae2e1ee4ccba2b5556027dbb56282b8a973c5835c5b597e8b7b664416"}, +] log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] @@ -2417,7 +2482,31 @@ multidict = [ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, ] -opentimelineio = [] +opentimelineio = [ + {file = "OpenTimelineIO-0.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d5466742d1de323e922965e64ca7099f6dd756774d5f8b404a11d6ec6e7c5fe0"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:3f5187eb0cd8f607bfcc5c1d58ce878734975a0a6a91360a2605ad831198ed89"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a2b64bf817d3065f7302c748bcc1d5938971e157c42e67fcb4e5e3612358813b"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27m-win32.whl", hash = "sha256:4cde33ea83ba041332bae55474fc155219871396b82031dd54d3e857973805b6"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:d5dc153867c688ad4f39cbac78eda069cfe4f17376d9444d202f8073efa6cbd4"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e07390dd1e0f82e5a5880ef2d498cbcbf482b4e5bfb4b9026342578a2fad358d"}, + {file = "OpenTimelineIO-0.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4c1c522df397536c7620d44e32302165a9ef9bbbf0de83a5a0621f0a75047cc9"}, + {file = "OpenTimelineIO-0.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e368a1d64366e3fdf1eadd10077a135833fdc893ff65f8dc43a91254cb7ee6fa"}, + {file = "OpenTimelineIO-0.14.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:cf2cd94d11d0ae0fc78418cc0d17f2fe3bf85598b9b109f98b2301272a87bff5"}, + {file = "OpenTimelineIO-0.14.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7af41f43ef72fbf3c0ae2e47cabd7715eb348726c9e5e430ab36ce2357181cf4"}, + {file = "OpenTimelineIO-0.14.1-cp37-cp37m-win32.whl", hash = "sha256:55dbb859d16535ba5dab8a66a78aef8db55f030d771b6e5b91e94241b6db65bd"}, + {file = "OpenTimelineIO-0.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08eaef8fbc423c25e94e189eb788c92c16916ae74d16ebcab34ba889e980c6ad"}, + {file = "OpenTimelineIO-0.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:10b34a6997d6d6edb9b8a1c93718a1e90e8202d930559cdce2ad369e0473327f"}, + {file = "OpenTimelineIO-0.14.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c6b44986da8c7a64f8f549795279f0af05ec875a425d11600585dab0b3269ec2"}, + {file = "OpenTimelineIO-0.14.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:45e1774d9f7215190a7c1e5b70dfc237f4a03b79b0539902d9ec8074707450f9"}, + {file = "OpenTimelineIO-0.14.1-cp38-cp38-win32.whl", hash = "sha256:1ee0e72320309b8dedf0e2f40fc2b8d3dd2c854db0aba28a84a038d7177a1208"}, + {file = "OpenTimelineIO-0.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:bd58e9fdc765623e160ab3ec32e9199bcb3906a6f3c06cca7564fbb7c18d2d28"}, + {file = "OpenTimelineIO-0.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f8d6e15f793577de59cc01e49600898ab12dbdc260dbcba83936c00965f0090a"}, + {file = "OpenTimelineIO-0.14.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:50644c5e43076a3717b77645657545d0be19376ecb4c6f2e4103670052d726d4"}, + {file = "OpenTimelineIO-0.14.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:a44f77fb5dbfd60d992ac2acc6782a7b0a26452db3a069425b8bd73b2f3bb336"}, + {file = "OpenTimelineIO-0.14.1-cp39-cp39-win32.whl", hash = "sha256:63fb0d1258f490bcebf6325067db64a0f0dc405b8b905ee2bb625f04d04a8082"}, + {file = "OpenTimelineIO-0.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:8a303b2f3dfba542f588b227575f1967f7a9da854b34f620504e1ecb8d551f5f"}, + {file = "OpenTimelineIO-0.14.1.tar.gz", hash = "sha256:0b9adc0fd303b978af120259d6b1d23e0623800615b4a3e2eb9f9fb2c70d5d13"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -2430,6 +2519,14 @@ parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] +patchelf = [ + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:08e5e30a9415a8628de47726fbf15bfcd89be35df51c8a0a12372aebd0c5b4f6"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:4ce9d08119816bc4316c8ecc5f33da42384934fc0fc9cfbdded53a4930705466"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.musllinux_1_1_s390x.whl", hash = "sha256:ae19b0f91aabc9af2608a4ca0395533f1df9122e6abc11ef2c8db6e4db0f98c2"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.musllinux_1_1_i686.whl", hash = "sha256:f3f87aee44d1d1b2209e38c4227b0316bb03538df68d20b3d96205aa87868d95"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:52e48c08110f2988a9761a5a383f7ae35b1e8e06a140e320d18386d3510697ed"}, + {file = "patchelf-0.15.0.0.tar.gz", hash = "sha256:0f8dcf0df0ba919ce37e8aef67a08bde5326897098451df94ab3a5eedc9e08d9"}, +] pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, diff --git a/pyproject.toml b/pyproject.toml index dd79dfb93f..a16fa6edc8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,11 +28,11 @@ packages = [ openpype = 'start:boot' [tool.poetry.dependencies] -python = "3.9.*" +python = "^3.9.1" aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } -opentimelineio = { version = "0.14.0.dev1", source = "openpype" } +opentimelineio = "^0.14" appdirs = { git = "https://github.com/ActiveState/appdirs.git", branch = "master" } blessed = "^1.17" # openpype terminal formatting coolname = "*" @@ -75,7 +75,7 @@ aiohttp-middlewares = "^2.0.0" flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "~6.9" +cx_freeze = "6.11.1" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" From 41417efeebb63c74d08dbccbdfb5d6ba14e2004f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 12 Sep 2022 18:00:19 +0200 Subject: [PATCH 012/480] :recycle: remove hacks --- tools/create_env.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/tools/create_env.sh b/tools/create_env.sh index 6fc40f54e2..22cc852089 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -192,8 +192,6 @@ main () { echo -e "${BIGreen}>>>${RST} Post-venv creation fixes ..." local openpype_index=$("$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/parse_pyproject.py" tool.poetry.source.0.url) echo -e "${BIGreen}- ${RST} Using index: ${BIWhite}$openpype_index${RST}" - "$POETRY_HOME/bin/poetry" run pip install setuptools==49.6.0 - "$POETRY_HOME/bin/poetry" run pip install --disable-pip-version-check --force-reinstall wheel "$POETRY_HOME/bin/poetry" run python -m pip install --disable-pip-version-check --force-reinstall pip } From 3dc17918d417d74d9d1f60a37380735fd03fd1e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 12 Sep 2022 18:10:41 +0200 Subject: [PATCH 013/480] :memo: update python version specs --- README.md | 7 ++++--- website/docs/dev_requirements.md | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b8d33e65b3..2c2594abd1 100644 --- a/README.md +++ b/README.md @@ -53,10 +53,11 @@ Building OpenPype To build OpenPype you currently need [Python 3.9](https://www.python.org/downloads/) as we are following [vfx platform](https://vfxplatform.com). Because of some Linux distros comes with newer Python version already, you need to install **3.9** version and make use of it. You can use perhaps [pyenv](https://github.com/pyenv/pyenv) for this on Linux. +**Note**: We do not support 3.9.0 because of [this bug](https://github.com/python/cpython/pull/22670). Please, use higher versions of 3.9.x. ### Windows -You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). +You will need [Python >= 3.9.1](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). More tools might be needed for installing dependencies (for example for **OpenTimelineIO**) - mostly development tools like [CMake](https://cmake.org/) and [Visual Studio](https://visualstudio.microsoft.com/cs/downloads/) @@ -82,7 +83,7 @@ OpenPype is build using [CX_Freeze](https://cx-freeze.readthedocs.io/en/latest) ### macOS -You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build +You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build some OpenPype dependencies like [CMake](https://cmake.org/) and **XCode Command Line Tools** (or some other build system). Easy way of installing everything necessary is to use [Homebrew](https://brew.sh): @@ -145,7 +146,7 @@ sudo ./tools/docker_build.sh centos7 If all is successful, you'll find built OpenPype in `./build/` folder. #### Manual build -You will need [Python 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. +You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. To build Python related stuff, you need Python header files installed (`python3-dev` on Ubuntu for example). diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 640b27d69d..46acb4d8e7 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -56,6 +56,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python **Python 3.9.x** is the recommended version to use (as per [VFX platform CY2022](https://vfxplatform.com/)). +**Note**: We do not support 3.9.0 because of [this bug](https://github.com/python/cpython/pull/22670). Please, use higher versions of 3.9.x. If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. From 70948555e00c8a4677e82b24e623a3a524c0d5f2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 12 Sep 2022 21:33:57 +0200 Subject: [PATCH 014/480] nuke: adding pipeline abstraction for new publisher --- openpype/hosts/nuke/api/__init__.py | 8 ++++ openpype/hosts/nuke/api/pipeline.py | 63 ++++++++++++++++++++++++++++- 2 files changed, 70 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 962f31c177..42b669fec3 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -18,6 +18,10 @@ from .pipeline import ( ls, + list_instances, + remove_instance, + select_instance, + containerise, parse_container, update_container, @@ -51,6 +55,10 @@ __all__ = ( "ls", + "list_instances", + "remove_instance", + "select_instance", + "containerise", "parse_container", "update_container", diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index bac42128cc..50e7dd9495 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -39,6 +39,7 @@ from .lib import ( check_inventory_versions, set_avalon_knob_data, read_avalon_data, + get_avalon_knob_data ) from .lib_template_builder import ( create_placeholder, update_placeholder @@ -431,7 +432,6 @@ def ls(): """ all_nodes = nuke.allNodes(recurseGroups=False) - # TODO: add readgeo, readcamera, readimage nodes = [n for n in all_nodes] for n in nodes: @@ -439,3 +439,64 @@ def ls(): container = parse_container(n) if container: yield container + + +def list_instances(): + """List all created instances to publish from current workfile. + + For SubsetManager + + Returns: + (list) of dictionaries matching instances format + """ + instances = [] + for node in nuke.allNodes(): + + if node.Class() in ["Viewer", "Dot"]: + continue + + try: + if node["disable"].value(): + continue + except Exception as E: + log.warning(E) + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data( + node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + # add node name + avalon_knob_data["name"] = node.name() + + instances.append(avalon_knob_data) + + return instances + + +def remove_instance(instance): + """Remove instance from current workfile metadata. + + For SubsetManager + + Args: + instance (dict): instance representation from subsetmanager model + """ + node = nuke.toNode(instance["name"]) + nuke.delete(node) + + +def select_instance(instance): + """ + Select instance in Node View + + Args: + instance (dict): instance representation from subsetmanager model + """ + node = nuke.toNode(instance["name"]) + node["selected"].setValue(True) From 8e3eab2f6007f666317f22af2894662568cb7f5f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 12 Sep 2022 21:34:13 +0200 Subject: [PATCH 015/480] nuke: adding workfile creator --- .../nuke/plugins/create/workfile_creator.py | 77 +++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 openpype/hosts/nuke/plugins/create/workfile_creator.py diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py new file mode 100644 index 0000000000..9ceaf376c1 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -0,0 +1,77 @@ +import openpype.hosts.nuke.api as api +from openpype.client import get_asset_by_name +from openpype.pipeline import ( + AutoCreator, + CreatedInstance, + legacy_io, +) + + +class WorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + default_variant = "Main" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + subset_name = instance_data["subset"] + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + # nothing to change on workfiles + pass + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.family == self.family: + existing_instance = instance + break + + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + if existing_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + self.default_variant, task_name, asset_doc, + project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": self.default_variant + } + data.update(self.get_dynamic_data( + self.default_variant, task_name, asset_doc, + project_name, host_name + )) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + elif ( + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + self.default_variant, task_name, asset_doc, + project_name, host_name + ) + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name + existing_instance["subset"] = subset_name From a7caced36e5f5f6184a95f9c0ce7fc30e1666107 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Sep 2022 12:18:50 +0200 Subject: [PATCH 016/480] nuke: refactor host to be NukeHost --- openpype/hosts/nuke/api/__init__.py | 9 +- openpype/hosts/nuke/api/lib.py | 26 +++++ openpype/hosts/nuke/api/pipeline.py | 165 ++++++++++++++++++---------- openpype/hosts/nuke/startup/menu.py | 65 +---------- 4 files changed, 138 insertions(+), 127 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 42b669fec3..38f51a309e 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -6,15 +6,12 @@ from .workio import ( current_file, work_root, ) - from .command import ( viewer_update_and_undo_stop ) - from .plugin import OpenPypeCreator from .pipeline import ( - install, - uninstall, + NukeHost, ls, @@ -33,7 +30,6 @@ from .lib import ( duplicate_node, convert_knob_value_to_correct_type ) - from .utils import ( colorspace_exists_on_node, get_colorspace_list @@ -50,8 +46,7 @@ __all__ = ( "viewer_update_and_undo_stop", "OpenPypeCreator", - "install", - "uninstall", + "NukeHost", "ls", diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e55fdbfcb2..764e904295 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2605,6 +2605,32 @@ def recreate_instance(origin_node, avalon_data=None): return new_node +def add_scripts_menu(): + try: + from scriptsmenu import launchfornuke + except ImportError: + log.warning( + "Skipping studio.menu install, because " + "'scriptsmenu' module seems unavailable." + ) + return + + # load configuration of custom menu + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + config = project_settings["nuke"]["scriptsmenu"]["definition"] + _menu = project_settings["nuke"]["scriptsmenu"]["name"] + + if not config: + log.warning("Skipping studio menu, no definition found.") + return + + # run the launcher for Maya menu + studio_menu = launchfornuke.main(title=_menu.title()) + + # apply configuration + studio_menu.build_from_configuration(studio_menu, config) + + def add_scripts_gizmo(): # load configuration of custom menu diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 50e7dd9495..2301149872 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -1,12 +1,18 @@ +import nuke + import os import importlib from collections import OrderedDict -import nuke - import pyblish.api import openpype +from openpype.host import ( + HostBase, + IWorkfileHost, + ILoadHost, + INewPublisher +) from openpype.api import ( Logger, get_current_project_settings @@ -39,11 +45,23 @@ from .lib import ( check_inventory_versions, set_avalon_knob_data, read_avalon_data, - get_avalon_knob_data + get_avalon_knob_data, + on_script_load, + dirmap_file_name_filter, + add_scripts_menu, + add_scripts_gizmo ) from .lib_template_builder import ( create_placeholder, update_placeholder ) +from .workio import ( + open_file, + save_file, + file_extensions, + has_unsaved_changes, + work_root, + current_file +) log = Logger.get_logger(__name__) @@ -62,6 +80,92 @@ if os.getenv("PYBLISH_GUI", None): pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) +class NukeHost( + HostBase, IWorkfileHost, ILoadHost, INewPublisher +): + name = "nuke" + + def open_workfile(self, filepath): + return open_file(filepath) + + def save_workfile(self, filepath=None): + return save_file(filepath) + + def work_root(self, session): + return work_root(session) + + def get_current_workfile(self): + return current_file() + + def workfile_has_unsaved_changes(self): + return has_unsaved_changes() + + def get_workfile_extensions(self): + return file_extensions() + + def get_containers(self): + return ls() + + def install(self): + ''' Installing all requarements for Nuke host + ''' + + pyblish.api.register_host("nuke") + + self.log.info("Registering Nuke plug-ins..") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + register_inventory_action_path(INVENTORY_PATH) + + # Register Avalon event for workfiles loading. + register_event_callback("workio.open_file", check_inventory_versions) + register_event_callback("taskChanged", change_context_label) + + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled) + + _install_menu() + + # add script menu + add_scripts_menu() + add_scripts_gizmo() + + add_nuke_callbacks() + + launch_workfiles_app() + + def get_context_data(self): + pass + + def update_context_data(self, data, changes): + pass + + +def add_nuke_callbacks(): + + workfile_settings = WorkfileSettings() + # Set context settings. + nuke.addOnCreate( + workfile_settings.set_context_settings, nodeClass="Root") + nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") + nuke.addOnCreate(process_workfile_builder, nodeClass="Root") + + # fix ffmpeg settings on script + nuke.addOnScriptLoad(on_script_load) + + # set checker for last versions on loaded containers + nuke.addOnScriptLoad(check_inventory_versions) + nuke.addOnScriptSave(check_inventory_versions) + + # # set apply all workfile settings on script load and save + nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) + + nuke.addFilenameFilter(dirmap_file_name_filter) + + log.info('Automatic syncing of write file knob to script version') + + def reload_config(): """Attempt to reload pipeline at run-time. @@ -88,52 +192,6 @@ def reload_config(): reload(module) -def install(): - ''' Installing all requarements for Nuke host - ''' - - pyblish.api.register_host("nuke") - - log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - register_inventory_action_path(INVENTORY_PATH) - - # Register Avalon event for workfiles loading. - register_event_callback("workio.open_file", check_inventory_versions) - register_event_callback("taskChanged", change_context_label) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled) - workfile_settings = WorkfileSettings() - - # Set context settings. - nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - nuke.addOnCreate(process_workfile_builder, nodeClass="Root") - - _install_menu() - launch_workfiles_app() - - -def uninstall(): - '''Uninstalling host's integration - ''' - log.info("Deregistering Nuke plug-ins..") - pyblish.deregister_host("nuke") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) - deregister_inventory_action_path(INVENTORY_PATH) - - pyblish.api.deregister_callback( - "instanceToggled", on_pyblish_instance_toggled) - - reload_config() - _uninstall_menu() - - def _show_workfiles(): # Make sure parent is not set # - this makes Workfiles tool as separated window which @@ -244,15 +302,6 @@ def _install_menu(): add_shortcuts_from_presets() -def _uninstall_menu(): - menubar = nuke.menu("Nuke") - menu = menubar.findItem(MENU_LABEL) - - for item in menu.items(): - log.info("Removing menu item: {}".format(item.name())) - menu.removeItem(item.name()) - - def change_context_label(): menubar = nuke.menu("Nuke") menu = menubar.findItem(MENU_LABEL) diff --git a/openpype/hosts/nuke/startup/menu.py b/openpype/hosts/nuke/startup/menu.py index 1461d41385..613d508387 100644 --- a/openpype/hosts/nuke/startup/menu.py +++ b/openpype/hosts/nuke/startup/menu.py @@ -1,64 +1,5 @@ -import nuke -import os - -from openpype.api import Logger from openpype.pipeline import install_host -from openpype.hosts.nuke import api -from openpype.hosts.nuke.api.lib import ( - on_script_load, - check_inventory_versions, - WorkfileSettings, - dirmap_file_name_filter, - add_scripts_gizmo -) -from openpype.settings import get_project_settings +from openpype.hosts.nuke.api import NukeHost -log = Logger.get_logger(__name__) - - -install_host(api) - -# fix ffmpeg settings on script -nuke.addOnScriptLoad(on_script_load) - -# set checker for last versions on loaded containers -nuke.addOnScriptLoad(check_inventory_versions) -nuke.addOnScriptSave(check_inventory_versions) - -# # set apply all workfile settings on script load and save -nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - -nuke.addFilenameFilter(dirmap_file_name_filter) - -log.info('Automatic syncing of write file knob to script version') - - -def add_scripts_menu(): - try: - from scriptsmenu import launchfornuke - except ImportError: - log.warning( - "Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable." - ) - return - - # load configuration of custom menu - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) - config = project_settings["nuke"]["scriptsmenu"]["definition"] - _menu = project_settings["nuke"]["scriptsmenu"]["name"] - - if not config: - log.warning("Skipping studio menu, no definition found.") - return - - # run the launcher for Maya menu - studio_menu = launchfornuke.main(title=_menu.title()) - - # apply configuration - studio_menu.build_from_configuration(studio_menu, config) - - -add_scripts_menu() - -add_scripts_gizmo() +host = NukeHost() +install_host(host) From 501da09b79fc8bcf54b9abda92714cb5d5eabdd3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Sep 2022 12:23:30 +0200 Subject: [PATCH 017/480] nuke: change log info and removing unused imports --- openpype/hosts/nuke/api/pipeline.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 2301149872..26372d0047 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -22,9 +22,6 @@ from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, register_inventory_action_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, - deregister_inventory_action_path, AVALON_CONTAINER_ID, ) from openpype.pipeline.workfile import BuildWorkfile @@ -138,12 +135,13 @@ class NukeHost( def get_context_data(self): pass - def update_context_data(self, data, changes): + def update_context_data(self, data): pass def add_nuke_callbacks(): - + """ Adding all available nuke callbacks + """ workfile_settings = WorkfileSettings() # Set context settings. nuke.addOnCreate( @@ -163,7 +161,7 @@ def add_nuke_callbacks(): nuke.addFilenameFilter(dirmap_file_name_filter) - log.info('Automatic syncing of write file knob to script version') + log.info("Added Nuke callbacks ...") def reload_config(): From 8e1342fd086591f713588fbdc6b4380399d0cfdf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Sep 2022 15:00:35 +0200 Subject: [PATCH 018/480] nuke: menu change to new publisher --- openpype/hosts/nuke/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 26372d0047..cfc8fd52b2 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -231,7 +231,7 @@ def _install_menu(): ) menu.addCommand( "Publish...", - lambda: host_tools.show_publish(parent=main_window) + lambda: host_tools.show_publisher(parent=main_window) ) menu.addCommand( "Manage...", From 7a42beb2db9187e9180b75f2cc5a45d8b7081fa9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Sep 2022 15:01:05 +0200 Subject: [PATCH 019/480] nuke: add update/get context data --- openpype/hosts/nuke/api/pipeline.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index cfc8fd52b2..373fd7134b 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -1,6 +1,7 @@ import nuke import os +import json import importlib from collections import OrderedDict @@ -34,6 +35,8 @@ from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop from .lib import ( Context, + imprint, + Knobby, get_main_window, add_publish_knob, WorkfileSettings, @@ -133,10 +136,22 @@ class NukeHost( launch_workfiles_app() def get_context_data(self): - pass + root_node = nuke.root() + context_value = root_node["publish_context"].getValue() + return json.loads(context_value) - def update_context_data(self, data): - pass + def update_context_data(self, data, changes): + root_node = nuke.root() + imprint( + root_node, + { + "publish_context": Knobby( + "String_Knob", + json.dumps(data), + flags=[nuke.INVISIBLE] + ) + } + ) def add_nuke_callbacks(): From 9c050d47cba9ca85fb75fdf35f678701a9c77fdf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Sep 2022 22:44:08 +0200 Subject: [PATCH 020/480] nuke: implementing write and read create data functions --- openpype/hosts/nuke/api/lib.py | 100 +++++++++++++--------------- openpype/hosts/nuke/api/pipeline.py | 21 ++---- 2 files changed, 52 insertions(+), 69 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 764e904295..90c19707d9 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1,14 +1,13 @@ import os from pprint import pformat import re +import json import six import platform import tempfile import contextlib from collections import OrderedDict -import clique - import nuke from Qt import QtCore, QtWidgets @@ -64,6 +63,7 @@ EXCLUDED_KNOB_TYPE_ON_READ = ( 26, # Text Knob (But for backward compatibility, still be read # if value is not an empty string.) ) +JSON_PREFIX = "JSON:::" class Context: @@ -95,8 +95,38 @@ def get_main_window(): return Context.main_window +def write_create_data(node, knobname, data): + knob_value = JSON_PREFIX + json.dumps(data) + if knobname in node.knobs(): + knob = node[knobname] + knob.setValue(knob_value) + return + + knob = nuke.String_Knob(knobname) + knob.setValue(knob_value) + knob.setFlag(nuke.INVISIBLE) + node.addKnob(knob) + + +def read_create_data(node, knobname): + if knobname not in node.knobs(): + log.debug("get knob: {}".format(node[knobname])) + return + + rawdata = node[knobname].getValue() + if ( + isinstance(rawdata, six.string_types) + and rawdata.startswith(JSON_PREFIX) + ): + try: + return json.loads(rawdata[len(JSON_PREFIX):]) + except json.JSONDecodeError: + return + + class Knobby(object): - """For creating knob which it's type isn't mapped in `create_knobs` + """[DEPRICATED] For creating knob which it's type isn't + mapped in `create_knobs` Args: type (string): Nuke knob type name @@ -121,9 +151,15 @@ class Knobby(object): knob.setFlag(flag) return knob + @staticmethod + def nice_naming(key): + """Convert camelCase name into UI Display Name""" + words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) + return " ".join(words) + def create_knobs(data, tab=None): - """Create knobs by data + """[DEPRICATED] Create knobs by data Depending on the type of each dict value and creates the correct Knob. @@ -217,7 +253,7 @@ def create_knobs(data, tab=None): def imprint(node, data, tab=None): - """Store attributes with value on node + """[DEPRICATED] Store attributes with value on node Parse user data into Node knobs. Use `collections.OrderedDict` to ensure knob order. @@ -273,7 +309,7 @@ def imprint(node, data, tab=None): def add_publish_knob(node): - """Add Publish knob to node + """[DEPRICATED] Add Publish knob to node Arguments: node (nuke.Node): nuke node to be processed @@ -291,7 +327,7 @@ def add_publish_knob(node): def set_avalon_knob_data(node, data=None, prefix="avalon:"): - """ Sets data into nodes's avalon knob + """[DEPRICATED] Sets data into nodes's avalon knob Arguments: node (nuke.Node): Nuke node to imprint with data, @@ -353,7 +389,7 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"): def get_avalon_knob_data(node, prefix="avalon:"): - """ Gets a data from nodes's avalon knob + """[DEPRICATED] Gets a data from nodes's avalon knob Arguments: node (obj): Nuke node to search for data, @@ -393,7 +429,7 @@ def get_avalon_knob_data(node, prefix="avalon:"): def fix_data_for_node_create(data): - """Fixing data to be used for nuke knobs + """[DEPRICATED] Fixing data to be used for nuke knobs """ for k, v in data.items(): if isinstance(v, six.text_type): @@ -404,7 +440,7 @@ def fix_data_for_node_create(data): def add_write_node_legacy(name, **kwarg): - """Adding nuke write node + """[DEPRICATED] Adding nuke write node Arguments: name (str): nuke node name kwarg (attrs): data for nuke knobs @@ -567,7 +603,7 @@ def get_nuke_imageio_settings(): def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): - ''' Get preset data for dataflow (fileType, compression, bitDepth) + '''[DEPRICATED] Get preset data for dataflow (fileType, compression, bitDepth) ''' assert any([creator, nodeclass]), nuke.message( @@ -2811,48 +2847,6 @@ def dirmap_file_name_filter(file_name): return file_name -# ------------------------------------ -# This function seems to be deprecated -# ------------------------------------ -def ls_img_sequence(path): - """Listing all available coherent image sequence from path - - Arguments: - path (str): A nuke's node object - - Returns: - data (dict): with nuke formated path and frameranges - """ - file = os.path.basename(path) - dirpath = os.path.dirname(path) - base, ext = os.path.splitext(file) - name, padding = os.path.splitext(base) - - # populate list of files - files = [ - f for f in os.listdir(dirpath) - if name in f - if ext in f - ] - - # create collection from list of files - collections, reminder = clique.assemble(files) - - if len(collections) > 0: - head = collections[0].format("{head}") - padding = collections[0].format("{padding}") % 1 - padding = "#" * len(padding) - tail = collections[0].format("{tail}") - file = head + padding + tail - - return { - "path": os.path.join(dirpath, file).replace("\\", "/"), - "frames": collections[0].format("[{ranges}]") - } - - return False - - def get_group_io_nodes(nodes): """Get the input and the output of a group of nodes.""" diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 373fd7134b..392573e1f6 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -1,7 +1,6 @@ import nuke import os -import json import importlib from collections import OrderedDict @@ -35,8 +34,6 @@ from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop from .lib import ( Context, - imprint, - Knobby, get_main_window, add_publish_knob, WorkfileSettings, @@ -49,7 +46,9 @@ from .lib import ( on_script_load, dirmap_file_name_filter, add_scripts_menu, - add_scripts_gizmo + add_scripts_gizmo, + read_create_data, + write_create_data ) from .lib_template_builder import ( create_placeholder, update_placeholder @@ -137,21 +136,11 @@ class NukeHost( def get_context_data(self): root_node = nuke.root() - context_value = root_node["publish_context"].getValue() - return json.loads(context_value) + return read_create_data(root_node, "publish_context") def update_context_data(self, data, changes): root_node = nuke.root() - imprint( - root_node, - { - "publish_context": Knobby( - "String_Knob", - json.dumps(data), - flags=[nuke.INVISIBLE] - ) - } - ) + write_create_data(root_node, "publish_context", data) def add_nuke_callbacks(): From 69c964d806c993ab4d5b5e72591bdb137bfbb098 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 10:23:24 +0200 Subject: [PATCH 021/480] nuke: new creator wip --- openpype/hosts/nuke/api/lib.py | 55 +++++++++---- openpype/hosts/nuke/api/pipeline.py | 20 +++-- openpype/hosts/nuke/api/plugin.py | 121 +++++++++++++++++++++++++++- 3 files changed, 171 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 90c19707d9..9959ed79ea 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -64,6 +64,8 @@ EXCLUDED_KNOB_TYPE_ON_READ = ( # if value is not an empty string.) ) JSON_PREFIX = "JSON:::" +ROOT_DATA_KNOB = "publish_context" +INSTANCE_DATA_KNOB = "publish_instance" class Context: @@ -95,22 +97,32 @@ def get_main_window(): return Context.main_window -def write_create_data(node, knobname, data): - knob_value = JSON_PREFIX + json.dumps(data) +def write_node_data(node, knobname, data): + # if exists then update data if knobname in node.knobs(): - knob = node[knobname] - knob.setValue(knob_value) + log.debug("Updating knobname `{}` on node `{}`".format( + knobname, node.name() + )) + update_node_data(node, knobname, data) return + log.debug("Creating knobname `{}` on node `{}`".format( + knobname, node.name() + )) + # else create new + knob_value = JSON_PREFIX + json.dumps(data) knob = nuke.String_Knob(knobname) knob.setValue(knob_value) knob.setFlag(nuke.INVISIBLE) node.addKnob(knob) -def read_create_data(node, knobname): +def read_node_data(node, knobname): + if knobname not in node.knobs(): - log.debug("get knob: {}".format(node[knobname])) + log.warnig("Knobname `{}` does not exist on node `{}`".format( + knobname, node.name() + )) return rawdata = node[knobname].getValue() @@ -124,6 +136,14 @@ def read_create_data(node, knobname): return +def update_node_data(node, knobname, data): + knob = node[knobname] + node_data = read_node_data(node, knobname) + node_data.update(data) + knob_value = JSON_PREFIX + json.dumps(node_data) + knob.setValue(knob_value) + + class Knobby(object): """[DEPRICATED] For creating knob which it's type isn't mapped in `create_knobs` @@ -2168,11 +2188,14 @@ class WorkfileSettings(object): node['frame_range_lock'].setValue(True) # adding handle_start/end to root avalon knob - if not set_avalon_knob_data(self._root_node, { - "handleStart": int(handle_start), - "handleEnd": int(handle_end) - }): - log.warning("Cannot set Avalon knob to Root node!") + write_node_data( + self._root_node, + INSTANCE_DATA_KNOB, + { + "handleStart": int(handle_start), + "handleEnd": int(handle_end) + } + ) def reset_resolution(self): """Set resolution to project resolution.""" @@ -2295,7 +2318,6 @@ def get_write_node_template_attr(node): subset=avalon_knob_data["subset"] ) - # collecting correct data correct_data = OrderedDict() @@ -2347,10 +2369,11 @@ def get_dependent_nodes(nodes): def find_free_space_to_paste_nodes( - nodes, - group=nuke.root(), - direction="right", - offset=300): + nodes, + group=nuke.root(), + direction="right", + offset=300 +): """ For getting coordinates in DAG (node graph) for placing new nodes diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 392573e1f6..a18d3b54e7 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -34,6 +34,7 @@ from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop from .lib import ( Context, + ROOT_DATA_KNOB, get_main_window, add_publish_knob, WorkfileSettings, @@ -47,8 +48,8 @@ from .lib import ( dirmap_file_name_filter, add_scripts_menu, add_scripts_gizmo, - read_create_data, - write_create_data + read_node_data, + write_node_data ) from .lib_template_builder import ( create_placeholder, update_placeholder @@ -136,11 +137,11 @@ class NukeHost( def get_context_data(self): root_node = nuke.root() - return read_create_data(root_node, "publish_context") + return read_node_data(root_node, ROOT_DATA_KNOB) def update_context_data(self, data, changes): root_node = nuke.root() - write_create_data(root_node, "publish_context", data) + write_node_data(root_node, ROOT_DATA_KNOB, data) def add_nuke_callbacks(): @@ -492,7 +493,7 @@ def ls(): yield container -def list_instances(): +def list_instances(creator_id=None): """List all created instances to publish from current workfile. For SubsetManager @@ -514,7 +515,7 @@ def list_instances(): # get data from avalon knob avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) + node) if not avalon_knob_data: continue @@ -522,8 +523,13 @@ def list_instances(): if avalon_knob_data["id"] != "pyblish.avalon.instance": continue + if creator_id and avalon_knob_data["identifier"] != creator_id: + continue + # add node name - avalon_knob_data["name"] = node.name() + avalon_knob_data.update({ + "instance_node": node + }) instances.append(avalon_knob_data) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 37ce03dc55..302d6c5cd7 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,15 +1,23 @@ +import nuke + import os +import sys +import six import random import string from collections import OrderedDict from abc import abstractmethod - -import nuke +from abc import ( + ABCMeta +) from openpype.api import get_current_project_settings from openpype.pipeline import ( LegacyCreator, LoaderPlugin, + CreatorError, + Creator as NewCreator, + CreatedInstance ) from .lib import ( Knobby, @@ -21,6 +29,115 @@ from .lib import ( set_node_knobs_from_settings, get_view_process_node ) +from .pipeline import ( + list_instances +) + + +class OpenPypeCreatorError(CreatorError): + pass + + +@six.add_metaclass(ABCMeta) +class NukeCreator(NewCreator): + selected_nodes = [] + + def _create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type="NoOp" + ): + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + knobs (OrderedDict): knobs name and values + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + nuke.Node: Newly created instance node. + + """ + node_knobs = knobs or {} + + # set parent node + parent_node = nuke.root() + if parent: + parent_node = nuke.toNode(parent) + + with parent_node: + created_node = nuke.createNode(node_type) + created_node["name"].setValue(node_name) + + for key, values in node_knobs.items(): + if key in created_node.knobs(): + created_node["key"].setValue(values) + + return created_node + + # def create(self, subset_name, instance_data, pre_create_data): + # try: + # if pre_create_data.get("use_selection"): + # self.selected_nodes = nuke.selectedNodes() + + # # Get the node type and remove it from the data, not needed + # _node_type = instance_data.pop("node_type", None) + # if _node_type is None: + # _node_type = "NoOp" + + # instance_node = self._create_instance_node( + # subset_name, node_type=_node_type, pre_create_data) + + # instance_data["instance_node"] = instance_node.path() + + # instance = CreatedInstance( + # self.family, + # subset_name, + # instance_data, + # self) + # self._add_instance_to_context(instance) + # imprint(instance_node, instance.data_to_store()) + # return instance + + # except hou.Error as er: + # six.reraise( + # OpenPypeCreatorError, + # OpenPypeCreatorError("Creator error: {}".format(er)), + # sys.exc_info()[2]) + + # def collect_instances(self): + # for instance_data in list_instances(creator_id=self.identifier): + # created_instance = CreatedInstance.from_existing( + # instance_data, self + # ) + # self._add_instance_to_context(created_instance) + + # def update_instances(self, update_list): + # for created_inst, _changes in update_list: + # instance_node = created_inst.pop("instance_node", None) + # current_data = created_inst.data + + # imprint( + # instance_node, + # { + # key: value[1] for key, value in _changes.items() + # if current_data.get(key) != value[1] + # }, + # update=True + # ) + + # def remove_instances(self, instances): + # for instance in instances: + # remove_instance(instance) + # self._remove_instance_from_context(instance) + + # def get_pre_create_attr_defs(self): + # return [ + # BoolDef("use_selection", label="Use selection") + # ] class OpenPypeCreator(LegacyCreator): From 100fa0ff6e3c6ce372bd6149ad32496f9f3afb3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 23 Sep 2022 15:10:18 +0200 Subject: [PATCH 022/480] :bug: remove OCIO submodule --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 1e98372f6ae5072ef698684b714617a8acfe96dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 23 Sep 2022 15:13:17 +0200 Subject: [PATCH 023/480] :bug: create version directory if it doesn't exist --- igniter/bootstrap_repos.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index ccc9d4ac52..c6772a3930 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -804,6 +804,8 @@ class BootstrapRepos: """ version = OpenPypeVersion.version_in_str(zip_file.name) destination_dir = self.data_dir / f"{version.major}.{version.minor}" + if not destination_dir.exists(): + destination_dir.mkdir(parents=True) destination = destination_dir / zip_file.name if destination.exists(): From b33120508e6ea771da356459e293db33f65ef021 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 23 Sep 2022 15:22:38 +0200 Subject: [PATCH 024/480] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From b42c0a12358b0228d4d73ad218d45d82548e67fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 23 Sep 2022 16:07:14 +0200 Subject: [PATCH 025/480] :wrench: add version to shortcut name --- inno_setup.iss | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/inno_setup.iss b/inno_setup.iss index fa050ef1d6..3adde52a8b 100644 --- a/inno_setup.iss +++ b/inno_setup.iss @@ -48,8 +48,8 @@ Source: "build\{#build}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdir ; NOTE: Don't use "Flags: ignoreversion" on any shared system files [Icons] -Name: "{autoprograms}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe" -Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe"; Tasks: desktopicon +Name: "{autoprograms}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe" +Name: "{autodesktop}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe"; Tasks: desktopicon [Run] Filename: "{app}\openpype_gui.exe"; Description: "{cm:LaunchProgram,OpenPype}"; Flags: nowait postinstall skipifsilent From f8c789a15299fd739f370385e0f2d39d91b7ff35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 23 Sep 2022 18:39:26 +0200 Subject: [PATCH 026/480] :recycle: remove unused conditions --- igniter/install_thread.py | 227 +++++++++++++++++--------------------- 1 file changed, 101 insertions(+), 126 deletions(-) diff --git a/igniter/install_thread.py b/igniter/install_thread.py index 0cccf664e7..7b8f9c2fca 100644 --- a/igniter/install_thread.py +++ b/igniter/install_thread.py @@ -33,7 +33,6 @@ class InstallThread(QThread): def __init__(self, parent=None,): self._mongo = None - self._path = None self._result = None QThread.__init__(self, parent) @@ -62,143 +61,119 @@ class InstallThread(QThread): progress_callback=self.set_progress, message=self.message) local_version = OpenPypeVersion.get_installed_version_str() - # if user did enter nothing, we install OpenPype from local version. - # zip content of `repos`, copy it to user data dir and append - # version to it. - if not self._path: - # user did not entered url - if not self._mongo: - # it not set in environment - if not os.getenv("OPENPYPE_MONGO"): - # try to get it from settings registry - try: - self._mongo = bs.secure_registry.get_item( - "openPypeMongo") - except ValueError: - self.message.emit( - "!!! We need MongoDB URL to proceed.", True) - self._set_result(-1) - return - else: - self._mongo = os.getenv("OPENPYPE_MONGO") - else: - self.message.emit("Saving mongo connection string ...", False) - bs.secure_registry.set_item("openPypeMongo", self._mongo) - - os.environ["OPENPYPE_MONGO"] = self._mongo - - self.message.emit( - f"Detecting installed OpenPype versions in {bs.data_dir}", - False) - detected = bs.find_openpype(include_zips=True) - - if detected: - if not OpenPypeVersion.get_installed_version().is_compatible( - detected[-1]): - self.message.emit(( - f"Latest detected version {detected[-1]} " - "is not compatible with the currently running " - f"{local_version}" - ), True) - self.message.emit(( - "Filtering detected versions to compatible ones..." - ), False) - - detected = [ - version for version in detected - if version.is_compatible( - OpenPypeVersion.get_installed_version()) - ] - - if OpenPypeVersion( - version=local_version, path=Path()) < detected[-1]: - self.message.emit(( - f"Latest installed version {detected[-1]} is newer " - f"then currently running {local_version}" - ), False) - self.message.emit("Skipping OpenPype install ...", False) - if detected[-1].path.suffix.lower() == ".zip": - bs.extract_openpype(detected[-1]) - self._set_result(0) - return - - if OpenPypeVersion(version=local_version).get_main_version() == detected[-1].get_main_version(): # noqa - self.message.emit(( - f"Latest installed version is the same as " - f"currently running {local_version}" - ), False) - self.message.emit("Skipping OpenPype install ...", False) - self._set_result(0) - return - - self.message.emit(( - "All installed versions are older then " - f"currently running one {local_version}" - ), False) - else: - if getattr(sys, 'frozen', False): - self.message.emit("None detected.", True) - self.message.emit(("We will use OpenPype coming with " - "installer."), False) - openpype_version = bs.create_version_from_frozen_code() - if not openpype_version: - self.message.emit( - f"!!! Install failed - {openpype_version}", True) - self._set_result(-1) - return - self.message.emit(f"Using: {openpype_version}", False) - bs.install_version(openpype_version) - self.message.emit(f"Installed as {openpype_version}", False) - self.progress.emit(100) - self._set_result(1) - return - else: - self.message.emit("None detected.", False) - - self.message.emit( - f"We will use local OpenPype version {local_version}", False) - - local_openpype = bs.create_version_from_live_code() - if not local_openpype: - self.message.emit( - f"!!! Install failed - {local_openpype}", True) - self._set_result(-1) - return + # user did not entered url + if self._mongo: + self.message.emit("Saving mongo connection string ...", False) + bs.secure_registry.set_item("openPypeMongo", self._mongo) + elif os.getenv("OPENPYPE_MONGO"): + self._mongo = os.getenv("OPENPYPE_MONGO") + else: + # try to get it from settings registry try: - bs.install_version(local_openpype) - except (OpenPypeVersionExists, - OpenPypeVersionInvalid, - OpenPypeVersionIOError) as e: - self.message.emit(f"Installed failed: ", True) - self.message.emit(str(e), True) + self._mongo = bs.secure_registry.get_item( + "openPypeMongo") + except ValueError: + self.message.emit( + "!!! We need MongoDB URL to proceed.", True) self._set_result(-1) return + os.environ["OPENPYPE_MONGO"] = self._mongo - self.message.emit(f"Installed as {local_openpype}", False) + self.message.emit( + f"Detecting installed OpenPype versions in {bs.data_dir}", + False) + detected = bs.find_openpype(include_zips=True) + if not detected and getattr(sys, 'frozen', False): + self.message.emit("None detected.", True) + self.message.emit(("We will use OpenPype coming with " + "installer."), False) + openpype_version = bs.create_version_from_frozen_code() + if not openpype_version: + self.message.emit( + f"!!! Install failed - {openpype_version}", True) + self._set_result(-1) + return + self.message.emit(f"Using: {openpype_version}", False) + bs.install_version(openpype_version) + self.message.emit(f"Installed as {openpype_version}", False) self.progress.emit(100) self._set_result(1) return - else: - # if we have mongo connection string, validate it, set it to - # user settings and get OPENPYPE_PATH from there. - if self._mongo: - if not validate_mongo_connection(self._mongo): - self.message.emit( - f"!!! invalid mongo url {self._mongo}", True) - self._set_result(-1) - return - bs.secure_registry.set_item("openPypeMongo", self._mongo) - os.environ["OPENPYPE_MONGO"] = self._mongo - self.message.emit(f"processing {self._path}", True) - repo_file = bs.process_entered_location(self._path) + if detected and not OpenPypeVersion.get_installed_version().is_compatible( + detected[-1]): + self.message.emit(( + f"Latest detected version {detected[-1]} " + "is not compatible with the currently running " + f"{local_version}" + ), True) + self.message.emit(( + "Filtering detected versions to compatible ones..." + ), False) - if not repo_file: - self.message.emit("!!! Cannot install", True) - self._set_result(-1) + # filter results to get only compatible versions + detected = [ + version for version in detected + if version.is_compatible( + OpenPypeVersion.get_installed_version()) + ] + + if detected: + if OpenPypeVersion( + version=local_version, path=Path()) < detected[-1]: + self.message.emit(( + f"Latest installed version {detected[-1]} is newer " + f"then currently running {local_version}" + ), False) + self.message.emit("Skipping OpenPype install ...", False) + if detected[-1].path.suffix.lower() == ".zip": + bs.extract_openpype(detected[-1]) + self._set_result(0) return + if OpenPypeVersion(version=local_version).get_main_version() == detected[ + -1].get_main_version(): # noqa + self.message.emit(( + f"Latest installed version is the same as " + f"currently running {local_version}" + ), False) + self.message.emit("Skipping OpenPype install ...", False) + self._set_result(0) + return + + self.message.emit(( + "All installed versions are older then " + f"currently running one {local_version}" + ), False) + + self.message.emit("None detected.", False) + + self.message.emit( + f"We will use local OpenPype version {local_version}", False) + + local_openpype = bs.create_version_from_live_code() + if not local_openpype: + self.message.emit( + f"!!! Install failed - {local_openpype}", True) + self._set_result(-1) + return + + try: + bs.install_version(local_openpype) + except (OpenPypeVersionExists, + OpenPypeVersionInvalid, + OpenPypeVersionIOError) as e: + self.message.emit(f"Installed failed: ", True) + self.message.emit(str(e), True) + self._set_result(-1) + return + + self.message.emit(f"Installed as {local_openpype}", False) + self.progress.emit(100) + self._set_result(1) + return + self.progress.emit(100) self._set_result(1) return From 47a4dc5d9c9586fcd7d207351589a57b2320dbc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 26 Sep 2022 12:27:05 +0200 Subject: [PATCH 027/480] :rotating_light: fix hound :dog: --- igniter/install_thread.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/igniter/install_thread.py b/igniter/install_thread.py index 7b8f9c2fca..3c9abece65 100644 --- a/igniter/install_thread.py +++ b/igniter/install_thread.py @@ -101,8 +101,7 @@ class InstallThread(QThread): self._set_result(1) return - if detected and not OpenPypeVersion.get_installed_version().is_compatible( - detected[-1]): + if detected and not OpenPypeVersion.get_installed_version().is_compatible(detected[-1]): # noqa: E501 self.message.emit(( f"Latest detected version {detected[-1]} " "is not compatible with the currently running " @@ -132,8 +131,7 @@ class InstallThread(QThread): self._set_result(0) return - if OpenPypeVersion(version=local_version).get_main_version() == detected[ - -1].get_main_version(): # noqa + if OpenPypeVersion(version=local_version).get_main_version() == detected[-1].get_main_version(): # noqa: E501 self.message.emit(( f"Latest installed version is the same as " f"currently running {local_version}" From 13f9789dd2308921449536d9a1aa77e2afeb5184 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 13:00:39 +0200 Subject: [PATCH 028/480] nuke: renaming and fixing node data setter and getter --- openpype/hosts/nuke/api/lib.py | 22 ++++++++++++++-------- openpype/hosts/nuke/api/pipeline.py | 9 +++++---- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 9959ed79ea..d61d8c161e 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -97,7 +97,8 @@ def get_main_window(): return Context.main_window -def write_node_data(node, knobname, data): +def set_node_data(node, knobname, data): + # TODO: doc string # if exists then update data if knobname in node.knobs(): log.debug("Updating knobname `{}` on node `{}`".format( @@ -107,8 +108,8 @@ def write_node_data(node, knobname, data): return log.debug("Creating knobname `{}` on node `{}`".format( - knobname, node.name() - )) + knobname, node.name() + )) # else create new knob_value = JSON_PREFIX + json.dumps(data) knob = nuke.String_Knob(knobname) @@ -117,8 +118,8 @@ def write_node_data(node, knobname, data): node.addKnob(knob) -def read_node_data(node, knobname): - +def get_node_data(node, knobname): + # TODO: doc string if knobname not in node.knobs(): log.warnig("Knobname `{}` does not exist on node `{}`".format( knobname, node.name() @@ -137,8 +138,9 @@ def read_node_data(node, knobname): def update_node_data(node, knobname, data): + # TODO: doc string knob = node[knobname] - node_data = read_node_data(node, knobname) + node_data = get_node_data(node, knobname) or {} node_data.update(data) knob_value = JSON_PREFIX + json.dumps(node_data) knob.setValue(knob_value) @@ -2179,7 +2181,8 @@ class WorkfileSettings(object): range = '{0}-{1}'.format( int(data["frameStart"]), - int(data["frameEnd"])) + int(data["frameEnd"]) + ) for node in nuke.allNodes(filter="Viewer"): node['frame_range'].setValue(range) @@ -2188,7 +2191,10 @@ class WorkfileSettings(object): node['frame_range_lock'].setValue(True) # adding handle_start/end to root avalon knob - write_node_data( + log.debug("self._root_node: {}".format(self._root_node)) + log.debug("self._root_node: {}".format(self._root_node)) + + set_node_data( self._root_node, INSTANCE_DATA_KNOB, { diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index ddc93d7256..3a8c235ee4 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -44,8 +44,9 @@ from .lib import ( dirmap_file_name_filter, add_scripts_menu, add_scripts_gizmo, - read_node_data, - write_node_data + get_node_data, + set_node_data, + update_node_data ) from .workfile_template_builder import ( NukePlaceholderLoadPlugin, @@ -137,11 +138,11 @@ class NukeHost( def get_context_data(self): root_node = nuke.root() - return read_node_data(root_node, ROOT_DATA_KNOB) + return get_node_data(root_node, ROOT_DATA_KNOB) def update_context_data(self, data, changes): root_node = nuke.root() - write_node_data(root_node, ROOT_DATA_KNOB, data) + set_node_data(root_node, ROOT_DATA_KNOB, data) def add_nuke_callbacks(): From 615b7780918f8659c6e559817bacaa7b549e661b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 13:07:02 +0200 Subject: [PATCH 029/480] Nuke: adding docstrings --- openpype/hosts/nuke/api/lib.py | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index d61d8c161e..d9d3752d10 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -98,7 +98,16 @@ def get_main_window(): def set_node_data(node, knobname, data): - # TODO: doc string + """Write data to node invisible knob + + Will create new in case it doesnt exists + or update the one already created. + + Args: + node (nuke.Node): node object + knobname (str): knob name + data (dict): data to be stored in knob + """ # if exists then update data if knobname in node.knobs(): log.debug("Updating knobname `{}` on node `{}`".format( @@ -119,7 +128,15 @@ def set_node_data(node, knobname, data): def get_node_data(node, knobname): - # TODO: doc string + """Read data from node. + + Args: + node (nuke.Node): node object + knobname (str): knob name + + Returns: + dict: data stored in knob + """ if knobname not in node.knobs(): log.warnig("Knobname `{}` does not exist on node `{}`".format( knobname, node.name() @@ -138,7 +155,13 @@ def get_node_data(node, knobname): def update_node_data(node, knobname, data): - # TODO: doc string + """Update already present data. + + Args: + node (nuke.Node): node object + knobname (str): knob name + data (dict): data to update knob value + """ knob = node[knobname] node_data = get_node_data(node, knobname) or {} node_data.update(data) From 6a657f9d1a8a9df5ed485261633275a5939599ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:32:39 +0200 Subject: [PATCH 030/480] nuke: updating api --- openpype/hosts/nuke/api/__init__.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 0cbae94bc5..002e4e3dc6 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -9,7 +9,11 @@ from .workio import ( from .command import ( viewer_update_and_undo_stop ) -from .plugin import OpenPypeCreator +from .plugin import ( + NukeCreator, + NukeCreatorError, + OpenPypeCreator +) from .pipeline import ( NukeHost, @@ -26,11 +30,16 @@ from .pipeline import ( get_workfile_build_placeholder_plugins, ) from .lib import ( + INSTANCE_DATA_KNOB, + ROOT_DATA_KNOB, maintained_selection, reset_selection, get_view_process_node, duplicate_node, - convert_knob_value_to_correct_type + convert_knob_value_to_correct_type, + get_node_data, + set_node_data, + update_node_data ) from .utils import ( colorspace_exists_on_node, @@ -47,6 +56,8 @@ __all__ = ( "viewer_update_and_undo_stop", + "NukeCreator", + "NukeCreatorError", "OpenPypeCreator", "NukeHost", @@ -62,11 +73,16 @@ __all__ = ( "get_workfile_build_placeholder_plugins", + "INSTANCE_DATA_KNOB", + "ROOT_DATA_KNOB", "maintained_selection", "reset_selection", "get_view_process_node", "duplicate_node", "convert_knob_value_to_correct_type", + "get_node_data", + "set_node_data", + "update_node_data", "colorspace_exists_on_node", "get_colorspace_list" From 4a890a452d162110b93bca99a38129f12443d1e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:36:54 +0200 Subject: [PATCH 031/480] nuke: removing create from menu --- openpype/hosts/nuke/api/pipeline.py | 10 +++------- openpype/settings/defaults/project_settings/nuke.json | 1 - .../schemas/projects_schema/schema_project_nuke.json | 5 ----- 3 files changed, 3 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 3a8c235ee4..5abe182c5e 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -231,8 +231,8 @@ def _install_menu(): menu.addSeparator() menu.addCommand( - "Create...", - lambda: host_tools.show_creator(parent=main_window) + "Publish...", + lambda: host_tools.show_publisher(parent=main_window) ) menu.addCommand( "Load...", @@ -241,14 +241,11 @@ def _install_menu(): use_context=True ) ) - menu.addCommand( - "Publish...", - lambda: host_tools.show_publisher(parent=main_window) - ) menu.addCommand( "Manage...", lambda: host_tools.show_scene_inventory(parent=main_window) ) + menu.addSeparator() menu.addCommand( "Library...", lambda: host_tools.show_library_loader( @@ -344,7 +341,6 @@ def add_shortcuts_from_presets(): if nuke_presets.get("menu"): menu_label_mapping = { "manage": "Manage...", - "create": "Create...", "load": "Load...", "build_workfile": "Build Workfile", "publish": "Publish..." diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index c3eda2cbb4..7b3d07d441 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -1,7 +1,6 @@ { "general": { "menu": { - "create": "ctrl+shift+alt+c", "publish": "ctrl+alt+p", "load": "ctrl+alt+l", "manage": "ctrl+alt+m", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 7cf82b9e69..c21df7d44d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -17,11 +17,6 @@ "key": "menu", "label": "OpenPype Menu shortcuts", "children": [ - { - "type": "text", - "key": "create", - "label": "Create..." - }, { "type": "text", "key": "publish", From 1ce3fa421a61722815653f34f585f9589b20c9e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:37:59 +0200 Subject: [PATCH 032/480] nuke: adding abstract NukeCreator class --- openpype/hosts/nuke/api/plugin.py | 133 ++++++++++++++++-------------- 1 file changed, 73 insertions(+), 60 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 302d6c5cd7..5ac144b2c7 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -12,6 +12,7 @@ from abc import ( ) from openpype.api import get_current_project_settings +from openpype.lib import BoolDef from openpype.pipeline import ( LegacyCreator, LoaderPlugin, @@ -20,6 +21,7 @@ from openpype.pipeline import ( CreatedInstance ) from .lib import ( + INSTANCE_DATA_KNOB, Knobby, check_subsetname_exists, maintained_selection, @@ -27,14 +29,16 @@ from .lib import ( add_publish_knob, get_nuke_imageio_settings, set_node_knobs_from_settings, - get_view_process_node + get_view_process_node, + set_node_data ) from .pipeline import ( - list_instances + list_instances, + remove_instance ) -class OpenPypeCreatorError(CreatorError): +class NukeCreatorError(CreatorError): pass @@ -47,7 +51,7 @@ class NukeCreator(NewCreator): node_name, knobs=None, parent=None, - node_type="NoOp" + node_type=None ): """Create node representing instance. @@ -61,6 +65,8 @@ class NukeCreator(NewCreator): nuke.Node: Newly created instance node. """ + node_type = node_type or "NoOp" + node_knobs = knobs or {} # set parent node @@ -68,76 +74,83 @@ class NukeCreator(NewCreator): if parent: parent_node = nuke.toNode(parent) - with parent_node: - created_node = nuke.createNode(node_type) - created_node["name"].setValue(node_name) + try: + with parent_node: + created_node = nuke.createNode(node_type) + created_node["name"].setValue(node_name) - for key, values in node_knobs.items(): - if key in created_node.knobs(): - created_node["key"].setValue(values) + for key, values in node_knobs.items(): + if key in created_node.knobs(): + created_node["key"].setValue(values) + except Exception as _err: + raise NukeCreatorError("Creating have failed: {}".format(_err)) return created_node - # def create(self, subset_name, instance_data, pre_create_data): - # try: - # if pre_create_data.get("use_selection"): - # self.selected_nodes = nuke.selectedNodes() + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + else: + self.selected_nodes = None - # # Get the node type and remove it from the data, not needed - # _node_type = instance_data.pop("node_type", None) - # if _node_type is None: - # _node_type = "NoOp" + instance_node = self._create_instance_node( + subset_name, + node_type=instance_data.pop("node_type", None) + ) - # instance_node = self._create_instance_node( - # subset_name, node_type=_node_type, pre_create_data) + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) - # instance_data["instance_node"] = instance_node.path() + instance.transient_data["node"] = instance_node - # instance = CreatedInstance( - # self.family, - # subset_name, - # instance_data, - # self) - # self._add_instance_to_context(instance) - # imprint(instance_node, instance.data_to_store()) - # return instance + self._add_instance_to_context(instance) - # except hou.Error as er: - # six.reraise( - # OpenPypeCreatorError, - # OpenPypeCreatorError("Creator error: {}".format(er)), - # sys.exc_info()[2]) + set_node_data( + instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) - # def collect_instances(self): - # for instance_data in list_instances(creator_id=self.identifier): - # created_instance = CreatedInstance.from_existing( - # instance_data, self - # ) - # self._add_instance_to_context(created_instance) + return instance - # def update_instances(self, update_list): - # for created_inst, _changes in update_list: - # instance_node = created_inst.pop("instance_node", None) - # current_data = created_inst.data + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) - # imprint( - # instance_node, - # { - # key: value[1] for key, value in _changes.items() - # if current_data.get(key) != value[1] - # }, - # update=True - # ) + def collect_instances(self): + for (node, data) in list_instances(creator_id=self.identifier): + created_instance = CreatedInstance.from_existing( + data, self + ) + created_instance.transient_data["node"] = node + self._add_instance_to_context(created_instance) - # def remove_instances(self, instances): - # for instance in instances: - # remove_instance(instance) - # self._remove_instance_from_context(instance) + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.transient_data["node"] + current_data = created_inst.data - # def get_pre_create_attr_defs(self): - # return [ - # BoolDef("use_selection", label="Use selection") - # ] + set_node_data( + instance_node, + INSTANCE_DATA_KNOB, + { + key: value[1] for key, value in _changes.items() + if current_data.get(key) != value[1] + } + ) + + def remove_instances(self, instances): + for instance in instances: + remove_instance(instance) + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] class OpenPypeCreator(LegacyCreator): From 02ac5c19e93970e5c86411eaa5b6c28db72cd6ff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:39:01 +0200 Subject: [PATCH 033/480] nuke: improving workfile creator --- .../nuke/plugins/create/workfile_creator.py | 42 +++++++++++++++---- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py index 9ceaf376c1..f2227737b4 100644 --- a/openpype/hosts/nuke/plugins/create/workfile_creator.py +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -5,6 +5,7 @@ from openpype.pipeline import ( CreatedInstance, legacy_io, ) +import nuke class WorkfileCreator(AutoCreator): @@ -17,14 +18,37 @@ class WorkfileCreator(AutoCreator): return [] def collect_instances(self): - for instance_data in api.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - subset_name = instance_data["subset"] - instance = CreatedInstance( - self.family, subset_name, instance_data, self - ) - self._add_instance_to_context(instance) + root_node = nuke.root() + instance_data = api.get_node_data( + root_node, api.INSTANCE_DATA_KNOB + ) + self.log.debug("__ instance_data: {}".format(instance_data)) + + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + self.default_variant, task_name, asset_doc, + project_name, host_name + ) + self.log.debug("__ subset_name: {}".format(subset_name)) + instance_data.update({ + "asset": asset_name, + "task": task_name, + "variant": self.default_variant + }) + instance_data.update(self.get_dynamic_data( + self.default_variant, task_name, asset_doc, + project_name, host_name + )) + + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): # nothing to change on workfiles @@ -42,6 +66,8 @@ class WorkfileCreator(AutoCreator): task_name = legacy_io.Session["AVALON_TASK"] host_name = legacy_io.Session["AVALON_APP"] + self.log.debug("__ existing_instance: {}".format(existing_instance)) + if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( From 7b30692171bd377e6718f59e0d65dae2e8bb98f1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:44:21 +0200 Subject: [PATCH 034/480] nuke: improving list/remove/select instance functions --- openpype/hosts/nuke/api/pipeline.py | 36 ++++++++++++++--------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 5abe182c5e..ecf38812cc 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -31,6 +31,7 @@ from .command import viewer_update_and_undo_stop from .lib import ( Context, ROOT_DATA_KNOB, + INSTANCE_DATA_KNOB, get_main_window, add_publish_knob, WorkfileSettings, @@ -504,8 +505,8 @@ def list_instances(creator_id=None): Returns: (list) of dictionaries matching instances format """ - instances = [] - for node in nuke.allNodes(): + listed_instances = [] + for node in nuke.allNodes(recurseGroups=True): if node.Class() in ["Viewer", "Dot"]: continue @@ -513,30 +514,29 @@ def list_instances(creator_id=None): try: if node["disable"].value(): continue - except Exception as E: - log.warning(E) + except Exception as _exc: + log.debug("Node {} have no disable knob - {}".format( + node.fullName(), _exc)) # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node) + instance_data = get_node_data( + node, INSTANCE_DATA_KNOB) - if not avalon_knob_data: + if not instance_data: continue - if avalon_knob_data["id"] != "pyblish.avalon.instance": + if instance_data["id"] != "pyblish.avalon.instance": continue - if creator_id and avalon_knob_data["identifier"] != creator_id: + log.debug("_ creator_id: {}".format(creator_id)) + if creator_id and instance_data["creator_identifier"] != creator_id: continue - # add node name - avalon_knob_data.update({ - "instance_node": node - }) + listed_instances.append((node, instance_data)) instances.append(avalon_knob_data) - return instances + return listed_instances def remove_instance(instance): @@ -547,8 +547,8 @@ def remove_instance(instance): Args: instance (dict): instance representation from subsetmanager model """ - node = nuke.toNode(instance["name"]) - nuke.delete(node) + instance_node = instance.transient_data["node"] + nuke.delete(instance_node) def select_instance(instance): @@ -558,5 +558,5 @@ def select_instance(instance): Args: instance (dict): instance representation from subsetmanager model """ - node = nuke.toNode(instance["name"]) - node["selected"].setValue(True) + instance_node = instance.transient_data["node"] + instance_node["selected"].setValue(True) From e84eb355b6c978cb978487a74fc11c4f83bc1caf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:46:25 +0200 Subject: [PATCH 035/480] nuke: remove residual code --- openpype/hosts/nuke/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index ecf38812cc..cbe8b547c6 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -534,8 +534,6 @@ def list_instances(creator_id=None): listed_instances.append((node, instance_data)) - instances.append(avalon_knob_data) - return listed_instances From 9e609984e1b2b9225f49e7b2e3b60ac0be13dcfb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 21:46:53 +0200 Subject: [PATCH 036/480] nuke: no need to log --- openpype/hosts/nuke/api/lib.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index d9d3752d10..211bab2d80 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -138,9 +138,6 @@ def get_node_data(node, knobname): dict: data stored in knob """ if knobname not in node.knobs(): - log.warnig("Knobname `{}` does not exist on node `{}`".format( - knobname, node.name() - )) return rawdata = node[knobname].getValue() @@ -2213,10 +2210,6 @@ class WorkfileSettings(object): node['frame_range'].setValue(range) node['frame_range_lock'].setValue(True) - # adding handle_start/end to root avalon knob - log.debug("self._root_node: {}".format(self._root_node)) - log.debug("self._root_node: {}".format(self._root_node)) - set_node_data( self._root_node, INSTANCE_DATA_KNOB, From 541b9d41f2460e4da3a24f4188ec3309546a992a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 22:25:02 +0200 Subject: [PATCH 037/480] nuke: workfile creator cleaning --- .../nuke/plugins/create/workfile_creator.py | 50 ++----------------- 1 file changed, 4 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py index f2227737b4..ae54d6bcb2 100644 --- a/openpype/hosts/nuke/plugins/create/workfile_creator.py +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -48,6 +48,7 @@ class WorkfileCreator(AutoCreator): instance = CreatedInstance( self.family, subset_name, instance_data, self ) + instance.transient_data["node"] = root_node self._add_instance_to_context(instance) def update_instances(self, update_list): @@ -55,49 +56,6 @@ class WorkfileCreator(AutoCreator): pass def create(self, options=None): - existing_instance = None - for instance in self.create_context.instances: - if instance.family == self.family: - existing_instance = instance - break - - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - host_name = legacy_io.Session["AVALON_APP"] - - self.log.debug("__ existing_instance: {}".format(existing_instance)) - - if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name - ) - data = { - "asset": asset_name, - "task": task_name, - "variant": self.default_variant - } - data.update(self.get_dynamic_data( - self.default_variant, task_name, asset_doc, - project_name, host_name - )) - - new_instance = CreatedInstance( - self.family, subset_name, data, self - ) - self._add_instance_to_context(new_instance) - - elif ( - existing_instance["asset"] != asset_name - or existing_instance["task"] != task_name - ): - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - self.default_variant, task_name, asset_doc, - project_name, host_name - ) - existing_instance["asset"] = asset_name - existing_instance["task"] = task_name - existing_instance["subset"] = subset_name + # no need to create if it is created + # in `collect_instances` + pass From 5cde873d504e71970868301d33ffe4ffae4daaa5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Sep 2022 22:25:10 +0200 Subject: [PATCH 038/480] fixing updating --- openpype/hosts/nuke/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 5ac144b2c7..af29fa2dd1 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -138,7 +138,7 @@ class NukeCreator(NewCreator): INSTANCE_DATA_KNOB, { key: value[1] for key, value in _changes.items() - if current_data.get(key) != value[1] + if current_data.get(key) != value[0] } ) From 495ea784148687c3e9a6437b9a949d0dfd17f71b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 12:31:54 +0200 Subject: [PATCH 039/480] nuke: refactory Backdrop Creator to new publisher --- openpype/hosts/nuke/api/__init__.py | 2 + openpype/hosts/nuke/api/plugin.py | 81 ++++++++++++++---- .../nuke/plugins/create/create_backdrop.py | 85 +++++++++---------- 3 files changed, 109 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 002e4e3dc6..8b18d4bc3b 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -34,6 +34,7 @@ from .lib import ( ROOT_DATA_KNOB, maintained_selection, reset_selection, + select_nodes, get_view_process_node, duplicate_node, convert_knob_value_to_correct_type, @@ -77,6 +78,7 @@ __all__ = ( "ROOT_DATA_KNOB", "maintained_selection", "reset_selection", + "select_nodes", "get_view_process_node", "duplicate_node", "convert_knob_value_to_correct_type", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index af29fa2dd1..7cb8bc8b84 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -7,8 +7,10 @@ import random import string from collections import OrderedDict from abc import abstractmethod -from abc import ( - ABCMeta + +from openpype.client import ( + get_asset_by_name, + get_subsets, ) from openpype.api import get_current_project_settings @@ -18,7 +20,8 @@ from openpype.pipeline import ( LoaderPlugin, CreatorError, Creator as NewCreator, - CreatedInstance + CreatedInstance, + legacy_io ) from .lib import ( INSTANCE_DATA_KNOB, @@ -42,11 +45,39 @@ class NukeCreatorError(CreatorError): pass -@six.add_metaclass(ABCMeta) class NukeCreator(NewCreator): selected_nodes = [] - def _create_instance_node( + def add_info_knob(self, node): + if "OP_info" in node.knobs().keys(): + return + + # add info text + info_knob = nuke.Text_Knob("OP_info", "") + info_knob.setValue(""" +

Do not erase manually !

+

This node is maintained by OpenPype Publisher.

+

We recomand to remove it from the Publisher gui.

+ """) + node.addKnob(info_knob) + + def check_existing_subset(self, subset_name, instance_data): + """Check if existing subset name versions already exists.""" + # Get all subsets of the current asset + project_name = legacy_io.active_project() + asset_doc = get_asset_by_name( + project_name, instance_data["asset"], fields=["_id"] + ) + subset_docs = get_subsets( + project_name, asset_ids=[asset_doc["_id"]], fields=["name"] + ) + existing_subset_names_low = { + subset_doc["name"].lower() + for subset_doc in subset_docs + } + return subset_name.lower() in existing_subset_names_low + + def create_instance_node( self, node_name, knobs=None, @@ -57,9 +88,9 @@ class NukeCreator(NewCreator): Arguments: node_name (str): Name of the new node. - knobs (OrderedDict): knobs name and values + knobs (OrderedDict): node knobs name and values parent (str): Name of the parent node. - node_type (str, optional): Type of the node. + node_type (str, optional): Nuke node Class. Returns: nuke.Node: Newly created instance node. @@ -79,6 +110,8 @@ class NukeCreator(NewCreator): created_node = nuke.createNode(node_type) created_node["name"].setValue(node_name) + self.add_info_knob(created_node) + for key, values in node_knobs.items(): if key in created_node.knobs(): created_node["key"].setValue(values) @@ -87,23 +120,39 @@ class NukeCreator(NewCreator): return created_node - def create(self, subset_name, instance_data, pre_create_data): - try: - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - else: - self.selected_nodes = None + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + else: + self.selected_nodes = [] - instance_node = self._create_instance_node( + self.log.debug("Selection is: {}".format(self.selected_nodes)) + + def create(self, subset_name, instance_data, pre_create_data): + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(subset_name)) + + try: + + instance_node = self.create_instance_node( subset_name, node_type=instance_data.pop("node_type", None) ) - instance = CreatedInstance( self.family, subset_name, instance_data, - self) + self + ) instance.transient_data["node"] = instance_node diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index 0c11b3f274..e622fd12be 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -1,56 +1,55 @@ -import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - select_nodes, - set_avalon_knob_data +from nukescripts import autoBackdrop + +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection, + select_nodes + ) -class CreateBackdrop(plugin.OpenPypeCreator): +class CreateBackdrop(NukeCreator): """Add Publishable Backdrop""" - name = "nukenodes" + identifier = "create_backdrop" label = "Create Backdrop" family = "nukenodes" icon = "file-archive-o" - defaults = ["Main"] + maintain_selection = True - def __init__(self, *args, **kwargs): - super(CreateBackdrop, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xdfea5dff" - return + # plugin attributes + node_color = "0xdfea5dff" - def process(self): - from nukescripts import autoBackdrop - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if len(self.selected_nodes) >= 1: + select_nodes(self.selected_nodes) - if len(nodes) >= 1: - select_nodes(nodes) - bckd_node = autoBackdrop() - bckd_node["name"].setValue("{}_BDN".format(self.name)) - bckd_node["tile_color"].setValue(int(self.node_color, 16)) - bckd_node["note_font_size"].setValue(24) - bckd_node["label"].setValue("[{}]".format(self.name)) - # add avalon knobs - instance = set_avalon_knob_data(bckd_node, self.data) + created_node = autoBackdrop() + created_node["name"].setValue(node_name) + created_node["tile_color"].setValue(int(self.node_color, 16)) + created_node["note_font_size"].setValue(24) + created_node["label"].setValue("[{}]".format(node_name)) - return instance - else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return - else: - bckd_node = autoBackdrop() - bckd_node["name"].setValue("{}_BDN".format(self.name)) - bckd_node["tile_color"].setValue(int(self.node_color, 16)) - bckd_node["note_font_size"].setValue(24) - bckd_node["label"].setValue("[{}]".format(self.name)) - # add avalon knobs - instance = set_avalon_knob_data(bckd_node, self.data) + return created_node - return instance + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(subset_name)) + + instance = super(CreateBackdrop, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance From 72dd122809fc29dddadabebf9692d60ab6043cab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 12:58:38 +0200 Subject: [PATCH 040/480] nuke: refactory publishing backdrop to new publisher --- .../nuke/plugins/publish/collect_backdrop.py | 18 +++++++++---- .../nuke/plugins/publish/extract_backdrop.py | 26 +++++++++---------- .../nuke/plugins/publish/validate_backdrop.py | 7 ++--- 3 files changed, 30 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index 4efbb88b8c..688b1054ab 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -1,3 +1,4 @@ +from pprint import pformat import pyblish.api from openpype.hosts.nuke.api import lib as pnlib import nuke @@ -14,8 +15,14 @@ class CollectBackdrops(pyblish.api.InstancePlugin): families = ["nukenodes"] def process(self, instance): + self.log.debug(pformat(instance.data)) - bckn = instance[0] + # new publisher way + if instance.data.get("transientData"): + bckn = instance.data["transientData"]["node"] + else: + # or backward compatible + bckn = instance[0] # define size of the backdrop left = bckn.xpos() @@ -23,6 +30,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): right = left + bckn['bdwidth'].value() bottom = top + bckn['bdheight'].value() + instance.data["transientData"]["childNodes"] = [] # iterate all nodes for node in nuke.allNodes(): @@ -37,13 +45,13 @@ class CollectBackdrops(pyblish.api.InstancePlugin): and (node.ypos() + node.screenHeight() < bottom): # add contained nodes to instance's node list - instance.append(node) + instance.data["transientData"]["childNodes"].append(node) # get all connections from outside of backdrop - nodes = instance[1:] + nodes = instance.data["transientData"]["childNodes"] connections_in, connections_out = pnlib.get_dependent_nodes(nodes) - instance.data["nodeConnectionsIn"] = connections_in - instance.data["nodeConnectionsOut"] = connections_out + instance.data["transientData"]["nodeConnectionsIn"] = connections_in + instance.data["transientData"]["nodeConnectionsOut"] = connections_out # make label nicer instance.data["label"] = "{0} ({1} nodes)".format( diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index d1e5c4cc5a..e62c715b9c 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -26,8 +26,14 @@ class ExtractBackdropNode(publish.Extractor): families = ["nukenodes"] def process(self, instance): - tmp_nodes = list() - nodes = instance[1:] + tmp_nodes = [] + child_nodes = instance.data["transientData"]["childNodes"] + # all connections outside of backdrop + connections_in = instance.data["transientData"]["nodeConnectionsIn"] + connections_out = instance.data["transientData"]["nodeConnectionsOut"] + self.log.debug("_ connections_in: `{}`".format(connections_in)) + self.log.debug("_ connections_out: `{}`".format(connections_out)) + # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) @@ -35,20 +41,14 @@ class ExtractBackdropNode(publish.Extractor): # maintain selection with maintained_selection(): - # all connections outside of backdrop - connections_in = instance.data["nodeConnectionsIn"] - connections_out = instance.data["nodeConnectionsOut"] - self.log.debug("_ connections_in: `{}`".format(connections_in)) - self.log.debug("_ connections_out: `{}`".format(connections_out)) - - # create input nodes and name them as passing node (*_INP) + # create input child_nodes and name them as passing node (*_INP) for n, inputs in connections_in.items(): for i, input in inputs: inpn = nuke.createNode("Input") inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) n.setInput(i, inpn) inpn.setXYpos(input.xpos(), input.ypos()) - nodes.append(inpn) + child_nodes.append(inpn) tmp_nodes.append(inpn) reset_selection() @@ -63,13 +63,13 @@ class ExtractBackdropNode(publish.Extractor): if d.name() in n.name()), 0), opn) opn.setInput(0, n) opn.autoplace() - nodes.append(opn) + child_nodes.append(opn) tmp_nodes.append(opn) reset_selection() - # select nodes to copy + # select child_nodes to copy reset_selection() - select_nodes(nodes) + select_nodes(child_nodes) # create tmp nk file # save file to the path nuke.nodeCopy(path) diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index 17dc79dc56..7ce3ce0e3f 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -60,7 +60,8 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): actions = [SelectCenterInNodeGraph] def process(self, instance): - connections_out = instance.data["nodeConnectionsOut"] + child_nodes = instance.data["transientData"]["childNodes"] + connections_out = instance.data["transientData"]["nodeConnectionsOut"] msg_multiple_outputs = ( "Only one outcoming connection from " @@ -78,10 +79,10 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): self.log.debug( "Amount of nodes on instance: {}".format( - len(instance)) + len(child_nodes)) ) - if len(instance) == 1: + if child_nodes == []: raise PublishXmlValidationError( self, msg_no_nodes, From 4df13d790503620e456a8b40713282b14271f95f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 12:58:58 +0200 Subject: [PATCH 041/480] nuke: adding info knob to backdrop node creator --- openpype/hosts/nuke/plugins/create/create_backdrop.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index e622fd12be..40eaab08cf 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -38,6 +38,8 @@ class CreateBackdrop(NukeCreator): created_node["note_font_size"].setValue(24) created_node["label"].setValue("[{}]".format(node_name)) + self.add_info_knob(created_node) + return created_node def create(self, subset_name, instance_data, pre_create_data): From e9c8144d6edbd6452a50089cf41bb478f147f917 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 20:42:41 +0200 Subject: [PATCH 042/480] nuke: adding apply settings to NukeCreator --- openpype/hosts/nuke/api/plugin.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 7cb8bc8b84..4b85334c03 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -201,6 +201,10 @@ class NukeCreator(NewCreator): BoolDef("use_selection", label="Use selection") ] + def apply_settings(self, project_settings, system_settings): + """Method called on initialization of plugin to apply settings.""" + self.creators_settings = project_settings["nuke"]["create"] + class OpenPypeCreator(LegacyCreator): """Pype Nuke Creator class wrapper""" From cab1a09d5fa857fd7b5471600709e3acca513668 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 20:57:05 +0200 Subject: [PATCH 043/480] nuke: adding also active key check in validator --- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d44f27791a..1d853f72b5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -62,7 +62,16 @@ class ValidateKnobs(pyblish.api.ContextPlugin): for instance in context: # Filter publisable instances. - if not instance.data["publish"]: + if ( + instance.data.get("publish") + and instance.data["publish"] is False + ): + continue + + if ( + instance.data.get("active") + and instance.data["active"] is False + ): continue # Filter families. From 86e740c43be228f946835890ab9caaebdb23c542 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Oct 2022 20:57:30 +0200 Subject: [PATCH 044/480] nuke: removing print --- openpype/hosts/nuke/plugins/publish/extract_backdrop.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index e62c715b9c..5166fa4b2c 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -104,6 +104,3 @@ class ExtractBackdropNode(publish.Extractor): self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) - - self.log.info("Data {}".format( - instance.data)) From bcac2bd143bcd07517cef7604e7190f0d6c986c4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:13:26 +0200 Subject: [PATCH 045/480] nuke: settings for write attributes --- .../defaults/project_settings/nuke.json | 26 +++++---- .../projects_schema/schema_project_nuke.json | 58 +++++-------------- .../schemas/template_nuke_write_attrs.json | 19 ++++++ 3 files changed, 49 insertions(+), 54 deletions(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 7b3d07d441..cdb410b378 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -56,12 +56,15 @@ ], "create": { "CreateWriteRender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "defaults": [ + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", + "default_variants": [ "Main", "Mask" ], - "knobs": [], + "instance_attributes": [ + "reviewable", + "farm_rendering" + ], "prenodes": { "Reformat01": { "nodeclass": "Reformat", @@ -82,27 +85,28 @@ } }, "CreateWritePrerender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "use_range_limit": true, - "defaults": [ + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", + "default_variants": [ "Key01", "Bg01", "Fg01", "Branch01", "Part01" ], - "reviewable": false, - "knobs": [], + "instance_attributes": [ + "farm_rendering", + "use_range_limit" + ], "prenodes": {} }, "CreateWriteStill": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", - "defaults": [ + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", + "default_variants": [ "ImageFrame", "MPFrame", "LayoutFrame" ], - "knobs": [], + "instance_attributes": [], "prenodes": { "FrameHold01": { "nodeclass": "FrameHold", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index c21df7d44d..520985154e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -97,26 +97,20 @@ "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", @@ -160,36 +154,20 @@ "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" - }, - { - "type": "boolean", - "key": "use_range_limit", - "label": "Use Frame range limit by default" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, - { - "type": "boolean", - "key": "reviewable", - "label": "Add reviewable toggle" - }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", @@ -233,26 +211,20 @@ "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json new file mode 100644 index 0000000000..8be48e669d --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json @@ -0,0 +1,19 @@ +[ + { + "key": "instance_attributes", + "label": "Instance attributes", + "type": "enum", + "multiselection": true, + "enum_items": [ + { + "reviewable": "Reviewable" + }, + { + "farm_rendering": "Farm rendering" + }, + { + "use_range_limit": "Use range limit" + } + ] + } +] From ca6d8bd7123616eb2fdb458761ba4556a95480ef Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:17:30 +0200 Subject: [PATCH 046/480] Nuke adding deprecation warning and adding it to deprecated functions --- openpype/hosts/nuke/api/lib.py | 99 ++++++++++++++++++++----------- openpype/hosts/nuke/api/plugin.py | 6 +- 2 files changed, 67 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 211bab2d80..68f0ca6e35 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -3,6 +3,8 @@ from pprint import pformat import re import json import six +import functools +import warnings import platform import tempfile import contextlib @@ -68,6 +70,51 @@ ROOT_DATA_KNOB = "publish_context" INSTANCE_DATA_KNOB = "publish_instance" +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + class Context: main_window = None context_label = None @@ -1164,8 +1211,6 @@ def create_write_node( data, input=None, prenodes=None, - review=True, - farm=True, linked_knobs=None, **kwargs ): @@ -1207,35 +1252,26 @@ def create_write_node( ''' prenodes = prenodes or {} - # group node knob overrides - knob_overrides = data.pop("knobs", []) - # filtering variables plugin_name = data["creator"] subset = data["subset"] # get knob settings for write node imageio_writes = get_imageio_node_setting( - node_class=data["nodeclass"], + node_class="Write", plugin_name=plugin_name, subset=subset ) for knob in imageio_writes["knobs"]: if knob["name"] == "file_type": - representation = knob["value"] + ext = knob["value"] - try: - data.update({ - "imageio_writes": imageio_writes, - "representation": representation, - }) - anatomy_filled = format_anatomy(data) - - except Exception as e: - msg = "problem with resolving anatomy template: {}".format(e) - log.error(msg) - nuke.message(msg) + data.update({ + "imageio_writes": imageio_writes, + "ext": ext + }) + anatomy_filled = format_anatomy(data) # build file path to workfiles fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/") @@ -1244,7 +1280,7 @@ def create_write_node( version=data["version"], subset=data["subset"], frame=data["frame"], - ext=representation + ext=ext ) # create directory @@ -1298,14 +1334,6 @@ def create_write_node( # connect to previous node now_node.setInput(0, prev_node) - # imprinting group node - set_avalon_knob_data(GN, data["avalon"]) - add_publish_knob(GN) - add_rendering_knobs(GN, farm) - - if review: - add_review_knob(GN) - # add divider GN.addKnob(nuke.Text_Knob('', 'Rendering')) @@ -1351,12 +1379,6 @@ def create_write_node( # adding write to read button add_button_clear_rendered(GN, os.path.dirname(fpath)) - # Deadline tab. - add_deadline_tab(GN) - - # open the our Tab as default - GN[_NODE_TAB_NAME].setFlag(0) - # set tile color tile_color = next( iter( @@ -1367,12 +1389,10 @@ def create_write_node( GN["tile_color"].setValue( color_gui_to_int(tile_color)) - # finally add knob overrides - set_node_knobs_from_settings(GN, knob_overrides, **kwargs) - return GN +@deprecated("openpype.hosts.nuke.api.lib.create_write_node") def create_write_node_legacy( name, data, input=None, prenodes=None, review=True, linked_knobs=None, farm=True @@ -1725,6 +1745,7 @@ def color_gui_to_int(color_gui): return int(hex_value, 16) +@deprecated def add_rendering_knobs(node, farm=True): ''' Adds additional rendering knobs to given node @@ -1745,6 +1766,7 @@ def add_rendering_knobs(node, farm=True): return node +@deprecated def add_review_knob(node): ''' Adds additional review knob to given node @@ -1761,7 +1783,9 @@ def add_review_knob(node): return node +@deprecated def add_deadline_tab(node): + # TODO: remove this as it is only linked to legacy create node.addKnob(nuke.Tab_Knob("Deadline")) knob = nuke.Int_Knob("deadlinePriority", "Priority") @@ -1787,7 +1811,10 @@ def add_deadline_tab(node): node.addKnob(knob) +@deprecated def get_deadline_knob_names(): + # TODO: remove this as it is only linked to legacy + # validate_write_deadline_tab return [ "Deadline", "deadlineChunkSize", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 4b85334c03..cb062bc647 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -33,7 +33,8 @@ from .lib import ( get_nuke_imageio_settings, set_node_knobs_from_settings, get_view_process_node, - set_node_data + set_node_data, + deprecated ) from .pipeline import ( list_instances, @@ -138,7 +139,7 @@ class NukeCreator(NewCreator): # make sure subset name is unique if self.check_existing_subset(subset_name, instance_data): raise NukeCreatorError( - ("subset {} is already published with different HDA" + ("subset {} is already published" "definition.").format(subset_name)) try: @@ -736,6 +737,7 @@ class ExporterReviewMov(ExporterReview): return self.data +@deprecated("openpype.hosts.nuke.api.plugin.NukeWriteCreator") class AbstractWriteRender(OpenPypeCreator): """Abstract creator to gather similar implementation for Write creators""" name = "" From 57863b5151287f36d30159520f3f48181dc870f1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:18:26 +0200 Subject: [PATCH 047/480] nuke: fix format anatomy func --- openpype/hosts/nuke/api/lib.py | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 68f0ca6e35..8bc905dfe8 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1082,27 +1082,14 @@ def format_anatomy(data): Return: path (str) ''' - # TODO: perhaps should be nonPublic - anatomy = Anatomy() log.debug("__ anatomy.templates: {}".format(anatomy.templates)) - try: - # TODO: bck compatibility with old anatomy template - padding = int( - anatomy.templates["render"].get( - "frame_padding", - anatomy.templates["render"].get("padding") - ) + padding = int( + anatomy.templates["render"].get( + "frame_padding" ) - except KeyError as e: - msg = ("`padding` key is not in `render` " - "or `frame_padding` on is not available in " - "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`").format(e) - - log.error(msg) - nuke.message(msg) + ) version = data.get("version", None) if not version: @@ -1110,16 +1097,16 @@ def format_anatomy(data): data["version"] = get_version_from_path(file) project_name = anatomy.project_name - asset_name = data["avalon"]["asset"] - task_name = os.environ["AVALON_TASK"] + asset_name = data["asset"] + task_name = data["task"] host_name = os.environ["AVALON_APP"] context_data = get_template_data_with_names( project_name, asset_name, task_name, host_name ) data.update(context_data) data.update({ - "subset": data["avalon"]["subset"], - "family": data["avalon"]["family"], + "subset": data["subset"], + "family": data["family"], "frame": "#" * padding, }) return anatomy.format(data) From 8d0758887ec84f7a0a14ab6d42dfe98c0f7e3e09 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:19:02 +0200 Subject: [PATCH 048/480] nuke: unparent publisher --- openpype/hosts/nuke/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index cbe8b547c6..e8fba8ae34 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -233,7 +233,7 @@ def _install_menu(): menu.addSeparator() menu.addCommand( "Publish...", - lambda: host_tools.show_publisher(parent=main_window) + host_tools.show_publisher ) menu.addCommand( "Load...", From 694d1026e8706a3a3f4d343ec27f8c300b4c39eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:19:23 +0200 Subject: [PATCH 049/480] nuke: add NukeWriteCreator --- openpype/hosts/nuke/api/plugin.py | 128 +++++++++++++++++++++++++++++- 1 file changed, 125 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index cb062bc647..091ef81d0f 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,3 +1,4 @@ +from pprint import pformat import nuke import os @@ -14,7 +15,10 @@ from openpype.client import ( ) from openpype.api import get_current_project_settings -from openpype.lib import BoolDef +from openpype.lib import ( + BoolDef, + EnumDef +) from openpype.pipeline import ( LegacyCreator, LoaderPlugin, @@ -202,9 +206,127 @@ class NukeCreator(NewCreator): BoolDef("use_selection", label="Use selection") ] - def apply_settings(self, project_settings, system_settings): + def get_creator_settings(self, project_settings, settings_key=None): + if not settings_key: + settings_key = self.__class__.__name__ + return project_settings["nuke"]["create"][settings_key] + + +class NukeWriteCreator(NukeCreator): + """Add Publishable Write node""" + + identifier = "create_write" + label = "Create Write" + family = "write" + icon = "sign-out" + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + selected_nodes = nuke.selectedNodes() + if selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one camera node") + self.selected_node = selected_nodes[0] + else: + self.selected_node = None + + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef("use_selection", label="Use selection"), + self._get_render_target_enum() + ] + return attr_defs + + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + return attr_defs + + def _get_render_target_enum(self): + rendering_targets = { + "local": "Local machine rendering", + "frames": "Use existing frames" + } + if ("farm_rendering" in self.instance_attributes): + rendering_targets["farm"] = "Farm rendering" + + return EnumDef( + "render_target", + items=rendering_targets, + label="Render target" + ) + + def _get_reviewable_bool(self): + return BoolDef( + "review", + default=("reviewable" in self.instance_attributes), + label="Review" + ) + + def create(self, subset_name, instance_data, pre_create_data): + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + set_node_data( + instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) + + return instance + + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + return instance + + def apply_settings( + self, + project_settings, + system_settings, + # anatomy_settings + ): """Method called on initialization of plugin to apply settings.""" - self.creators_settings = project_settings["nuke"]["create"] + + # plugin settings + plugin_settings = self.get_creator_settings(project_settings) + + self.log.debug("___________________") + self.log.debug(pformat(plugin_settings)) + + # individual attributes + self.instance_attributes = plugin_settings[ + "instance_attributes"] + self.prenodes = plugin_settings["prenodes"] + self.default_variants = plugin_settings["default_variants"] + self.temp_rendering_path_template = plugin_settings[ + "temp_rendering_path_template"] class OpenPypeCreator(LegacyCreator): From 36672bb82704fbd2e2fdf698d1368d92dba7b3d6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:19:42 +0200 Subject: [PATCH 050/480] Nuke: add camera creator --- .../nuke/plugins/create/create_camera.py | 106 +++++++++++------- 1 file changed, 63 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 3b13c80dc4..0b538d0088 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -1,55 +1,75 @@ import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection ) -class CreateCamera(plugin.OpenPypeCreator): - """Add Publishable Backdrop""" +class CreateCamera(NukeCreator): + """Add Publishable Camera""" - name = "camera" + identifier = "create_camera" label = "Create 3d Camera" family = "camera" icon = "camera" - defaults = ["Main"] - def __init__(self, *args, **kwargs): - super(CreateCamera, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xff9100ff" - return + # plugin attributes + node_color = "0xff9100ff" - def process(self): - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if len(nodes) >= 1: - # loop selected nodes - for n in nodes: - data = self.data.copy() - if len(nodes) > 1: - # rename subset name only if more - # then one node are selected - subset = self.family + n["name"].value().capitalize() - data["subset"] = subset - - # change node color - n["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - set_avalon_knob_data(n, data) - return True + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if self.selected_nodes: + created_node = self.selected_nodes[0] else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return + created_node = nuke.createNode("Camera2") + + created_node["tile_color"].setValue( + int(self.node_color, 16)) + + created_node["name"].setValue(node_name) + + self.add_info_knob(created_node) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(subset_name)) + + instance = super(CreateCamera, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(self.selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one camera node") else: - # if selected is off then create one node - camera_node = nuke.createNode("Camera2") - camera_node["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - instance = set_avalon_knob_data(camera_node, self.data) - return instance + self.selected_nodes = [] + + self.log.debug("Selection is: {}".format(self.selected_nodes)) + + def apply_settings(self, project_settings, system_settings): + """Method called on initialization of plugin to apply settings.""" + + # only selected keys ideally + # settings = self.get_creator_settings(project_settings) + + # self.key = settings["key"] + pass \ No newline at end of file From 5bafa21ea84da086f078999e4ee67a9f4d181a20 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 17:20:19 +0200 Subject: [PATCH 051/480] nuke: refactor create write render to new publisher --- .../plugins/create/create_write_render.py | 139 +++++++++--------- 1 file changed, 71 insertions(+), 68 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 23846c0332..3d7f5b8e37 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,86 +1,89 @@ +from pprint import pformat import nuke +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + UILabelDef +) from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import ( - create_write_node, create_write_node_legacy) + create_write_node) -class CreateWriteRender(plugin.AbstractWriteRender): - # change this to template preset - name = "WriteRender" +class CreateWriteRender(plugin.NukeWriteCreator): + identifier = "create_write_render" label = "Create Write Render" - hosts = ["nuke"] - n_class = "Write" family = "render" icon = "sign-out" - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" - defaults = ["Main", "Mask"] - prenodes = { - "Reformat01": { - "nodeclass": "Reformat", - "dependent": None, - "knobs": [ - { - "type": "text", - "name": "resize", - "value": "none" - }, - { - "type": "bool", - "name": "black_outside", - "value": True - } - ] - } - } + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef("use_selection", label="Use selection"), + self._get_render_target_enum() + ] + return attr_defs - def __init__(self, *args, **kwargs): - super(CreateWriteRender, self).__init__(*args, **kwargs) + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + if "farm_rendering" in self.instance_attributes: + attr_defs.extend([ + UISeparatorDef(), + UILabelDef("Farm rendering attributes"), + BoolDef("suspended_publish", label="Suspended publishing"), + NumberDef( + "farm_priority", + label="Priority", + minimum=1, + maximum=99, + default=50 + ), + NumberDef( + "farm_chunk", + label="Chunk size", + minimum=1, + maximum=99, + default=10 + ), + NumberDef( + "farm_concurency", + label="Concurent tasks", + minimum=1, + maximum=10, + default=1 + ) + ]) + return attr_defs - def _create_write_node(self, selected_node, inputs, outputs, write_data): + def create_instance_node(self, subset_name, instance_data): # add fpath_template - write_data["fpath_template"] = self.fpath_template + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } + + write_data.update(instance_data) - # add reformat node to cut off all outside of format bounding box # get width and height - try: - width, height = (selected_node.width(), selected_node.height()) - except AttributeError: + if self.selected_node: + width, height = ( + self.selected_node.width(), self.selected_node.height()) + else: actual_format = nuke.root().knob('format').value() width, height = (actual_format.width(), actual_format.height()) - if not self.is_legacy(): - return create_write_node( - self.data["subset"], - write_data, - input=selected_node, - prenodes=self.prenodes, - **{ - "width": width, - "height": height - } - ) - else: - _prenodes = [ - { - "name": "Reformat01", - "class": "Reformat", - "knobs": [ - ("resize", 0), - ("black_outside", 1), - ], - "dependent": None - } - ] - - return create_write_node_legacy( - self.data["subset"], - write_data, - input=selected_node, - prenodes=_prenodes - ) - - def _modify_write_node(self, write_node): - return write_node + return create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + **{ + "width": width, + "height": height + } + ) From 3a02562b7aa8d401216af3fefab189ab78cad510 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 21:08:13 +0200 Subject: [PATCH 052/480] Nuke: adding missing api functions --- openpype/hosts/nuke/api/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 8b18d4bc3b..47a62aa3e0 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -11,6 +11,7 @@ from .command import ( ) from .plugin import ( NukeCreator, + NukeWriteCreator, NukeCreatorError, OpenPypeCreator ) @@ -40,7 +41,8 @@ from .lib import ( convert_knob_value_to_correct_type, get_node_data, set_node_data, - update_node_data + update_node_data, + create_write_node ) from .utils import ( colorspace_exists_on_node, @@ -58,6 +60,7 @@ __all__ = ( "viewer_update_and_undo_stop", "NukeCreator", + "NukeWriteCreator", "NukeCreatorError", "OpenPypeCreator", "NukeHost", @@ -85,6 +88,7 @@ __all__ = ( "get_node_data", "set_node_data", "update_node_data", + "create_write_node", "colorspace_exists_on_node", "get_colorspace_list" From 7635528199e89bf8a87b76d8ca24f6d55d6ee2a8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 21:08:38 +0200 Subject: [PATCH 053/480] nuke: cosmetic changes --- openpype/hosts/nuke/api/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 8bc905dfe8..a9e12c0f4d 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1366,6 +1366,8 @@ def create_write_node( # adding write to read button add_button_clear_rendered(GN, os.path.dirname(fpath)) + GN.addKnob(nuke.Text_Knob('', '')) + # set tile color tile_color = next( iter( From 96b6f87a922a5ab1ca32233a276ea7ea174d9f5e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 21:09:06 +0200 Subject: [PATCH 054/480] nuke: make the info text less aggressive --- openpype/hosts/nuke/api/plugin.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 091ef81d0f..32cdaf9c86 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -60,9 +60,10 @@ class NukeCreator(NewCreator): # add info text info_knob = nuke.Text_Knob("OP_info", "") info_knob.setValue(""" -

Do not erase manually !

+

This node is maintained by OpenPype Publisher.

-

We recomand to remove it from the Publisher gui.

+

To remove it use Publisher gui.

+
""") node.addKnob(info_knob) @@ -302,9 +303,8 @@ class NukeWriteCreator(NukeCreator): six.reraise( NukeCreatorError, NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2]) - - return instance + sys.exc_info()[2] + ) def apply_settings( self, From d7037a7b354e49fde1135e6ad100fae14f768bcb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 21:09:33 +0200 Subject: [PATCH 055/480] nuke: add create function into write render creator --- .../plugins/create/create_write_render.py | 59 +++++++++++++++++-- 1 file changed, 53 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 3d7f5b8e37..85aaedb208 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,18 +1,20 @@ -from pprint import pformat import nuke +import sys +import six +from openpype.pipeline import ( + CreatedInstance +) from openpype.lib import ( BoolDef, NumberDef, UISeparatorDef, UILabelDef ) -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node) +from openpype.hosts.nuke import api as napi -class CreateWriteRender(plugin.NukeWriteCreator): +class CreateWriteRender(napi.NukeWriteCreator): identifier = "create_write_render" label = "Create Write Render" family = "render" @@ -77,7 +79,7 @@ class CreateWriteRender(plugin.NukeWriteCreator): actual_format = nuke.root().knob('format').value() width, height = (actual_format.width(), actual_format.height()) - return create_write_node( + created_node = napi.create_write_node( subset_name, write_data, input=self.selected_node, @@ -87,3 +89,48 @@ class CreateWriteRender(plugin.NukeWriteCreator): "height": height } ) + self.add_info_knob(created_node) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) From 0e0df359d69c7aea92834a93d014fe9e072b3f9e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Oct 2022 21:32:30 +0200 Subject: [PATCH 056/480] nuke: adding relinking function --- openpype/hosts/nuke/api/plugin.py | 20 +++++++++++++++++++ .../plugins/create/create_write_render.py | 2 ++ 2 files changed, 22 insertions(+) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 32cdaf9c86..340ccc7abb 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -221,6 +221,26 @@ class NukeWriteCreator(NukeCreator): family = "write" icon = "sign-out" + def integrate_links(self, node, outputs=True): + # skip if no selection + if not self.selected_node: + return + + # collect dependencies + input_nodes = [self.selected_node] + dependent_nodes = self.selected_node.dependent() if outputs else [] + + # relinking to collected connections + for i, input in enumerate(input_nodes): + node.setInput(i, input) + + # make it nicer in graph + node.autoplace() + + # relink also dependent nodes + for dep_nodes in dependent_nodes: + dep_nodes.setInput(0, node) + def set_selected_nodes(self, pre_create_data): if pre_create_data.get("use_selection"): selected_nodes = nuke.selectedNodes() diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 85aaedb208..560b9e0c36 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -91,6 +91,8 @@ class CreateWriteRender(napi.NukeWriteCreator): ) self.add_info_knob(created_node) + self.integrate_links(created_node, outputs=False) + return created_node def create(self, subset_name, instance_data, pre_create_data): From 4aedaa975e8513b9b6aee9e70e23105c15b97bd9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 16:42:39 +0200 Subject: [PATCH 057/480] Nuke refactory write still creator --- openpype/hosts/nuke/api/lib.py | 1 - .../nuke/plugins/create/create_write_still.py | 193 +++++++++++------- 2 files changed, 115 insertions(+), 79 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index a9e12c0f4d..b09a164ac1 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1680,7 +1680,6 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): _knob_value = template.format( **kwargs ) - log.debug("__ knob_value0: {}".format(_knob_value)) except KeyError as msg: log.warning("__ msg: {}".format(msg)) raise KeyError(msg) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index bb08e8c2c6..7627bc3d9f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,96 +1,133 @@ import nuke +import sys +import six -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node, - create_write_node_legacy, - get_created_node_imageio_setting_legacy +from openpype.pipeline import ( + CreatedInstance ) - -# HACK: just to disable still image on projects which -# are not having anatomy imageio preset for CreateWriteStill -# TODO: remove this code as soon as it will be obsolete -imageio_writes = get_created_node_imageio_setting_legacy( - "Write", - "CreateWriteStill", - "stillMain" +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + EnumDef ) -print(imageio_writes["knobs"]) +from openpype.hosts.nuke import api as napi -class CreateWriteStill(plugin.AbstractWriteRender): - # change this to template preset - name = "WriteStillFrame" - label = "Create Write Still Image" - hosts = ["nuke"] - n_class = "Write" +class CreateWriteStill(napi.NukeWriteCreator): + identifier = "create_write_still" + label = "Create Write Still Frame" family = "still" - icon = "image" + icon = "sign-out" - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{ext}" - defaults = [ - "ImageFrame", - "MPFrame", - "LayoutFrame" - ] - prenodes = { - "FrameHold01": { - "nodeclass": "FrameHold", - "dependent": None, - "knobs": [ - { - "type": "formatable", - "name": "first_frame", - "template": "{frame}", - "to_type": "number" - } - ] + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef("use_selection", label="Use selection"), + self._get_render_target_enum(), + UISeparatorDef(), + self._get_frame_source_number() + ] + return attr_defs + + def _get_render_target_enum(self): + rendering_targets = { + "local": "Local machine rendering", + "frames": "Use existing frames" } - } - def __init__(self, *args, **kwargs): - super(CreateWriteStill, self).__init__(*args, **kwargs) + return EnumDef( + "render_target", + items=rendering_targets, + label="Render target" + ) - def _create_write_node(self, selected_node, inputs, outputs, write_data): + def _get_frame_source_number(self): + return NumberDef( + "active_frame", + label="Active frame", + default=nuke.frame() + ) + + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + return attr_defs + + def create_instance_node(self, subset_name, instance_data): # add fpath_template - write_data["fpath_template"] = self.fpath_template + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } - if not self.is_legacy(): - return create_write_node( - self.name, - write_data, - input=selected_node, - review=False, - prenodes=self.prenodes, - farm=False, - linked_knobs=["channels", "___", "first", "last", "use_limit"], - **{ - "frame": nuke.frame() - } - ) - else: - _prenodes = [ - { - "name": "FrameHold01", - "class": "FrameHold", - "knobs": [ - ("first_frame", nuke.frame()) - ], - "dependent": None - } - ] - return create_write_node_legacy( - self.name, - write_data, - input=selected_node, - review=False, - prenodes=_prenodes, - farm=False, - linked_knobs=["channels", "___", "first", "last", "use_limit"] + write_data.update(instance_data) + + created_node = napi.create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + linked_knobs=["channels", "___", "first", "last", "use_limit"], + **{ + "frame": nuke.frame() + } + ) + self.add_info_knob(created_node) + + self._add_frame_range_limit(created_node) + + self.integrate_links(created_node, outputs=False) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + subset_name = subset_name.format(**pre_create_data) + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self ) - def _modify_write_node(self, write_node): + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) + + def _add_frame_range_limit(self, write_node): write_node.begin() for n in nuke.allNodes(): # get write node From d8df51e49f589ccfa951fa92f511aa178c2a859b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 21:21:17 +0200 Subject: [PATCH 058/480] nuke: adding expression value knob to settings template --- openpype/hosts/nuke/api/lib.py | 7 +++++++ .../schemas/template_nuke_knob_inputs.json | 16 ++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index b09a164ac1..4478633198 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1672,6 +1672,13 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): if knob_name not in node.knobs(): continue + if knob_type == "expression": + knob_expression = knob["expression"] + node[knob_name].setExpression( + knob_expression + ) + continue + # first deal with formatable knob settings if knob_type == "formatable": template = knob["template"] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json index 52a14e0636..c9dee8681a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json @@ -28,6 +28,22 @@ } ] }, + { + "key": "expression", + "label": "Expression", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "expression", + "label": "Expression" + } + ] + }, { "key": "formatable", "label": "Formate from template", From 4b42d55ce1acac083aa95ee7babe2467fe83b32e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 21:22:05 +0200 Subject: [PATCH 059/480] nuke: improving still write creator settings --- openpype/settings/defaults/project_settings/nuke.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index cdb410b378..783b42348c 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -106,17 +106,18 @@ "MPFrame", "LayoutFrame" ], - "instance_attributes": [], + "instance_attributes": [ + "use_range_limit" + ], "prenodes": { "FrameHold01": { "nodeclass": "FrameHold", "dependent": "", "knobs": [ { - "type": "formatable", + "type": "expression", "name": "first_frame", - "template": "{frame}", - "to_type": "number" + "expression": "parent.first" } ] } From 63c46e10112392daaccea1f3fd0cde65305729b9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 21:22:43 +0200 Subject: [PATCH 060/480] nuke: adding optional use frame range to plugin --- openpype/hosts/nuke/plugins/create/create_write_still.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index 7627bc3d9f..a254df4b62 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -56,13 +56,16 @@ class CreateWriteStill(napi.NukeWriteCreator): return attr_defs def create_instance_node(self, subset_name, instance_data): + linked_knobs_ = [] + if "use_range_limit" in self.instance_attributes: + linked_knobs_ = ["channels", "___", "first", "last", "use_limit"] + # add fpath_template write_data = { "creator": self.__class__.__name__, "subset": subset_name, "fpath_template": self.temp_rendering_path_template } - write_data.update(instance_data) created_node = napi.create_write_node( @@ -70,7 +73,7 @@ class CreateWriteStill(napi.NukeWriteCreator): write_data, input=self.selected_node, prenodes=self.prenodes, - linked_knobs=["channels", "___", "first", "last", "use_limit"], + linked_knobs=linked_knobs_, **{ "frame": nuke.frame() } @@ -137,6 +140,6 @@ class CreateWriteStill(napi.NukeWriteCreator): w_node["use_limit"].setValue(True) w_node["first"].setValue(nuke.frame()) - w_node["last"].setValue(nuke.frame()) + w_node["last"].setExpression("first") return write_node From 5946ea708c63bc317419e3f3ea8212f558091caf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 21:24:01 +0200 Subject: [PATCH 061/480] nuke clear printing --- openpype/hosts/nuke/api/plugin.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 340ccc7abb..585841ece8 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -337,9 +337,6 @@ class NukeWriteCreator(NukeCreator): # plugin settings plugin_settings = self.get_creator_settings(project_settings) - self.log.debug("___________________") - self.log.debug(pformat(plugin_settings)) - # individual attributes self.instance_attributes = plugin_settings[ "instance_attributes"] From 82e18450ce55abf20d3967d53896c95cbd333b84 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Oct 2022 21:50:49 +0200 Subject: [PATCH 062/480] nuke: refectory prerender write creator --- .../plugins/create/create_write_prerender.py | 180 ++++++++++++++---- .../nuke/plugins/create/create_write_still.py | 3 + 2 files changed, 141 insertions(+), 42 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index fec97167fb..976b3261d9 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,56 +1,153 @@ import nuke +import sys +import six -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node, create_write_node_legacy) +from openpype.pipeline import ( + CreatedInstance +) +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + UILabelDef +) +from openpype.hosts.nuke import api as napi -class CreateWritePrerender(plugin.AbstractWriteRender): - # change this to template preset - name = "WritePrerender" +class CreateWritePrerender(napi.NukeWriteCreator): + identifier = "create_write_prerender" label = "Create Write Prerender" - hosts = ["nuke"] - n_class = "Write" family = "prerender" icon = "sign-out" - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" - defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"] - reviewable = False - use_range_limit = True + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef("use_selection", label="Use selection"), + self._get_render_target_enum() + ] + return attr_defs - def __init__(self, *args, **kwargs): - super(CreateWritePrerender, self).__init__(*args, **kwargs) + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + if "farm_rendering" in self.instance_attributes: + attr_defs.extend([ + UISeparatorDef(), + UILabelDef("Farm rendering attributes"), + BoolDef("suspended_publish", label="Suspended publishing"), + NumberDef( + "farm_priority", + label="Priority", + minimum=1, + maximum=99, + default=50 + ), + NumberDef( + "farm_chunk", + label="Chunk size", + minimum=1, + maximum=99, + default=10 + ), + NumberDef( + "farm_concurency", + label="Concurent tasks", + minimum=1, + maximum=10, + default=1 + ) + ]) + return attr_defs + + def create_instance_node(self, subset_name, instance_data): + linked_knobs_ = [] + if "use_range_limit" in self.instance_attributes: + linked_knobs_ = ["channels", "___", "first", "last", "use_limit"] - def _create_write_node(self, selected_node, inputs, outputs, write_data): # add fpath_template - write_data["fpath_template"] = self.fpath_template - write_data["use_range_limit"] = self.use_range_limit - write_data["frame_range"] = ( - nuke.root()["first_frame"].value(), - nuke.root()["last_frame"].value() + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } + + write_data.update(instance_data) + + # get width and height + if self.selected_node: + width, height = ( + self.selected_node.width(), self.selected_node.height()) + else: + actual_format = nuke.root().knob('format').value() + width, height = (actual_format.width(), actual_format.height()) + + created_node = napi.create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + linked_knobs=linked_knobs_, + **{ + "width": width, + "height": height + } + ) + self.add_info_knob(created_node) + + self._add_frame_range_limit(created_node) + + self.integrate_links(created_node, outputs=False) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data ) - if not self.is_legacy(): - return create_write_node( - self.data["subset"], - write_data, - input=selected_node, - review=self.reviewable, - linked_knobs=["channels", "___", "first", "last", "use_limit"] - ) - else: - return create_write_node_legacy( - self.data["subset"], - write_data, - input=selected_node, - review=self.reviewable, - linked_knobs=["channels", "___", "first", "last", "use_limit"] + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self ) - def _modify_write_node(self, write_node): - # open group node + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) + + def _add_frame_range_limit(self, write_node): + if "use_range_limit" not in self.instance_attributes: + return + write_node.begin() for n in nuke.allNodes(): # get write node @@ -58,9 +155,8 @@ class CreateWritePrerender(plugin.AbstractWriteRender): w_node = n write_node.end() - if self.use_range_limit: - w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.root()["first_frame"].value()) - w_node["last"].setValue(nuke.root()["last_frame"].value()) + w_node["use_limit"].setValue(True) + w_node["first"].setValue(nuke.root()["first_frame"].value()) + w_node["last"].setValue(nuke.root()["last_frame"].value()) return write_node diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index a254df4b62..e77c51e9be 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -131,6 +131,9 @@ class CreateWriteStill(napi.NukeWriteCreator): ) def _add_frame_range_limit(self, write_node): + if "use_range_limit" not in self.instance_attributes: + return + write_node.begin() for n in nuke.allNodes(): # get write node From 42e993c0bb3c3a2e7c2d1746264e8c837c533ce4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Oct 2022 14:08:00 +0200 Subject: [PATCH 063/480] nuke: adding function for passing attributes from pre create to instance --- openpype/hosts/nuke/api/plugin.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 585841ece8..a509630fb7 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -53,6 +53,19 @@ class NukeCreatorError(CreatorError): class NukeCreator(NewCreator): selected_nodes = [] + def pass_pre_attributes_to_instance( + self, + instance_data, + pre_create_data, + keys=None + ): + if not keys: + keys = pre_create_data.keys() + + creator_attrs = instance_data["creator_attributes"] = {} + for pass_key in keys: + creator_attrs[pass_key] = pre_create_data[pass_key] + def add_info_knob(self, node): if "OP_info" in node.knobs().keys(): return From a68f74a37b6b06f94ce754ca36abf0201744abda Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Oct 2022 14:08:24 +0200 Subject: [PATCH 064/480] nuke: updating write creators --- .../plugins/create/create_write_prerender.py | 17 +++++++++-- .../plugins/create/create_write_render.py | 14 ++++++++- .../nuke/plugins/create/create_write_still.py | 29 +++++++++++++++---- 3 files changed, 51 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 976b3261d9..1ce30379ad 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -22,7 +22,11 @@ class CreateWritePrerender(napi.NukeWriteCreator): def get_pre_create_attr_defs(self): attr_defs = [ - BoolDef("use_selection", label="Use selection"), + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), self._get_render_target_enum() ] return attr_defs @@ -98,11 +102,20 @@ class CreateWritePrerender(napi.NukeWriteCreator): self._add_frame_range_limit(created_node) - self.integrate_links(created_node, outputs=False) + self.integrate_links(created_node, outputs=True) return created_node def create(self, subset_name, instance_data, pre_create_data): + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "render_target" + ] + ) + # make sure selected nodes are added self.set_selected_nodes(pre_create_data) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 560b9e0c36..89696f55e0 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -22,7 +22,11 @@ class CreateWriteRender(napi.NukeWriteCreator): def get_pre_create_attr_defs(self): attr_defs = [ - BoolDef("use_selection", label="Use selection"), + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), self._get_render_target_enum() ] return attr_defs @@ -96,6 +100,14 @@ class CreateWriteRender(napi.NukeWriteCreator): return created_node def create(self, subset_name, instance_data, pre_create_data): + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "render_target" + ] + ) # make sure selected nodes are added self.set_selected_nodes(pre_create_data) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index e77c51e9be..b3a096a5ba 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -22,7 +22,11 @@ class CreateWriteStill(napi.NukeWriteCreator): def get_pre_create_attr_defs(self): attr_defs = [ - BoolDef("use_selection", label="Use selection"), + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), self._get_render_target_enum(), UISeparatorDef(), self._get_frame_source_number() @@ -80,15 +84,25 @@ class CreateWriteStill(napi.NukeWriteCreator): ) self.add_info_knob(created_node) - self._add_frame_range_limit(created_node) + self._add_frame_range_limit(created_node, instance_data) - self.integrate_links(created_node, outputs=False) + self.integrate_links(created_node, outputs=True) return created_node def create(self, subset_name, instance_data, pre_create_data): subset_name = subset_name.format(**pre_create_data) + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "active_frame", + "render_target" + ] + ) + # make sure selected nodes are added self.set_selected_nodes(pre_create_data) @@ -100,7 +114,7 @@ class CreateWriteStill(napi.NukeWriteCreator): instance_node = self.create_instance_node( subset_name, - instance_data + instance_data, ) try: @@ -130,10 +144,13 @@ class CreateWriteStill(napi.NukeWriteCreator): sys.exc_info()[2] ) - def _add_frame_range_limit(self, write_node): + def _add_frame_range_limit(self, write_node, instance_data): if "use_range_limit" not in self.instance_attributes: return + active_frame = ( + instance_data["creator_attributes"].get("active_frame")) + write_node.begin() for n in nuke.allNodes(): # get write node @@ -142,7 +159,7 @@ class CreateWriteStill(napi.NukeWriteCreator): write_node.end() w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.frame()) + w_node["first"].setValue(active_frame or nuke.frame()) w_node["last"].setExpression("first") return write_node From 519642b6c5d7dba2456b4dbb0112c0262a2b5f06 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Oct 2022 15:21:21 +0200 Subject: [PATCH 065/480] nuke: two way compatibility for getting node from instance --- openpype/hosts/nuke/api/__init__.py | 4 +++- openpype/hosts/nuke/api/plugin.py | 10 +++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 47a62aa3e0..156b3f0de0 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -13,7 +13,8 @@ from .plugin import ( NukeCreator, NukeWriteCreator, NukeCreatorError, - OpenPypeCreator + OpenPypeCreator, + get_instance_node ) from .pipeline import ( NukeHost, @@ -64,6 +65,7 @@ __all__ = ( "NukeCreatorError", "OpenPypeCreator", "NukeHost", + "get_instance_node", "ls", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index a509630fb7..193c2e8c2d 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,4 +1,3 @@ -from pprint import pformat import nuke import os @@ -407,6 +406,15 @@ class OpenPypeCreator(LegacyCreator): return instance +def get_instance_node(instance): + # new publisher way + if instance.data.get("transientData"): + return instance.data["transientData"]["node"] + else: + # or backward compatible + return instance[0] + + def get_review_presets_config(): settings = get_current_project_settings() review_profiles = ( From 61068436c418513cf0399123a2ea93d9e7c9b195 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Oct 2022 15:22:04 +0200 Subject: [PATCH 066/480] nuke: two way compatibility for publish plugins getting instance node and child nodes --- .../nuke/plugins/publish/collect_backdrop.py | 9 ++------ .../nuke/plugins/publish/collect_gizmo.py | 5 ++--- .../nuke/plugins/publish/collect_model.py | 7 +++--- .../nuke/plugins/publish/collect_reads.py | 9 ++++---- .../plugins/publish/collect_slate_node.py | 3 ++- .../nuke/plugins/publish/extract_gizmo.py | 6 +++-- .../nuke/plugins/publish/extract_model.py | 6 +++-- .../plugins/publish/extract_render_local.py | 6 ++++- .../nuke/plugins/publish/extract_thumbnail.py | 12 ++++------ .../nuke/plugins/publish/precollect_writes.py | 7 +++++- .../plugins/publish/validate_asset_name.py | 9 ++++---- .../nuke/plugins/publish/validate_backdrop.py | 10 ++++----- .../nuke/plugins/publish/validate_gizmo.py | 8 +++---- .../publish/validate_output_resolution.py | 20 ++++++++++++----- .../plugins/publish/validate_read_legacy.py | 22 +++++++++---------- .../publish/validate_rendered_frames.py | 7 ++++-- .../plugins/publish/validate_write_legacy.py | 6 ++--- .../plugins/publish/validate_write_nodes.py | 19 ++++++++++++---- 18 files changed, 99 insertions(+), 72 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index 688b1054ab..dcabedf231 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -1,6 +1,7 @@ from pprint import pformat import pyblish.api from openpype.hosts.nuke.api import lib as pnlib +from openpype.hosts.nuke import api as napi import nuke @@ -17,12 +18,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug(pformat(instance.data)) - # new publisher way - if instance.data.get("transientData"): - bckn = instance.data["transientData"]["node"] - else: - # or backward compatible - bckn = instance[0] + bckn = napi.get_instance_node(instance) # define size of the backdrop left = bckn.xpos() @@ -91,5 +87,4 @@ class CollectBackdrops(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py index 3db26096ae..755c196f8e 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py @@ -1,4 +1,5 @@ import pyblish.api +from openpype.hosts.nuke import api as napi import nuke @@ -13,8 +14,7 @@ class CollectGizmo(pyblish.api.InstancePlugin): families = ["gizmo"] def process(self, instance): - - grpn = instance[0] + grpn = napi.get_instance_node(instance) # add family to familiess instance.data["families"].insert(0, instance.data["family"]) @@ -46,5 +46,4 @@ class CollectGizmo(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Gizmo content collected: `{}`".format(instance[:])) self.log.info("Gizmo instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_model.py b/openpype/hosts/nuke/plugins/publish/collect_model.py index 5fca240553..253121ffd4 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_model.py +++ b/openpype/hosts/nuke/plugins/publish/collect_model.py @@ -1,6 +1,6 @@ import pyblish.api import nuke - +from openpype.hosts.nuke import api as napi @pyblish.api.log class CollectModel(pyblish.api.InstancePlugin): @@ -14,12 +14,12 @@ class CollectModel(pyblish.api.InstancePlugin): def process(self, instance): - grpn = instance[0] + geo_node = napi.get_instance_node(instance) # add family to familiess instance.data["families"].insert(0, instance.data["family"]) # make label nicer - instance.data["label"] = grpn.name() + instance.data["label"] = geo_node.name() # Get frame range handle_start = instance.context.data["handleStart"] @@ -45,5 +45,4 @@ class CollectModel(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Model content collected: `{}`".format(instance[:])) self.log.info("Model instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index b79d9646d5..7390ec6884 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io +from openpype.hosts.nuke import api as napi @pyblish.api.log @@ -17,6 +18,8 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): + node = napi.get_instance_node(instance) + project_name = legacy_io.active_project() asset_name = legacy_io.Session["AVALON_ASSET"] asset_doc = get_asset_by_name(project_name, asset_name) @@ -25,7 +28,6 @@ class CollectNukeReads(pyblish.api.InstancePlugin): self.log.debug("checking instance: {}".format(instance)) - node = instance[0] if node.Class() != "Read": return @@ -99,10 +101,7 @@ class CollectNukeReads(pyblish.api.InstancePlugin): } instance.data["representations"].append(representation) - transfer = False - if "publish" in node.knobs(): - transfer = node["publish"] - + transfer = node["publish"] if "publish" in node.knobs() else False instance.data['transfer'] = transfer # Add version data to instance diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index bfe32d8fd1..0a98bcf973 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -1,4 +1,5 @@ import pyblish.api +from openpype.hosts.nuke import api as napi import nuke @@ -11,7 +12,7 @@ class CollectSlate(pyblish.api.InstancePlugin): families = ["render", "render.local", "render.farm"] def process(self, instance): - node = instance[0] + node = napi.get_instance_node(instance) slate = next((n for n in nuke.allNodes() if "slate" in n.name().lower() diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 3047ad6724..a3df497893 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -4,6 +4,7 @@ import nuke import pyblish.api from openpype.pipeline import publish +from openpype.hosts.nuke import api as napi from openpype.hosts.nuke.api import utils as pnutils from openpype.hosts.nuke.api.lib import ( maintained_selection, @@ -24,8 +25,9 @@ class ExtractGizmo(publish.Extractor): families = ["gizmo"] def process(self, instance): - tmp_nodes = list() - orig_grpn = instance[0] + tmp_nodes = [] + orig_grpn = napi.get_instance_node(instance) + # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index d82cb3110b..5ca617f147 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -3,6 +3,7 @@ from pprint import pformat import nuke import pyblish.api +from openpype.hosts.nuke import api as napi from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, @@ -36,8 +37,9 @@ class ExtractModel(publish.Extractor): self.log.info("instance.data: `{}`".format( pformat(instance.data))) - rm_nodes = list() - model_node = instance[0] + rm_nodes = [] + model_node = napi.get_instance_node(instance) + self.log.info("Crating additional nodes") subset = instance.data["subset"] staging_dir = self.staging_dir(instance) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 843d588786..811b2d4ffb 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -23,9 +23,13 @@ class NukeRenderLocal(publish.Extractor): def process(self, instance): families = instance.data["families"] + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": node = x diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 19eae9638b..dfb1b38284 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -4,11 +4,7 @@ import nuke import pyblish.api from openpype.pipeline import publish -from openpype.hosts.nuke.api import ( - maintained_selection, - get_view_process_node -) - +from openpype.hosts.nuke import api as napi if sys.version_info[0] >= 3: unicode = str @@ -38,7 +34,7 @@ class ExtractThumbnail(publish.Extractor): if "render.farm" in instance.data["families"]: return - with maintained_selection(): + with napi.maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) @@ -69,7 +65,7 @@ class ExtractThumbnail(publish.Extractor): bake_viewer_input_process_node = kwargs[ "bake_viewer_input_process"] - node = instance[0] # group node + node = napi.get_instance_node(instance) # group node self.log.info("Creating staging dir...") if "representations" not in instance.data: @@ -144,7 +140,7 @@ class ExtractThumbnail(publish.Extractor): if bake_viewer_process: if bake_viewer_input_process_node: # get input process and connect it to baking - ipn = get_view_process_node() + ipn = napi.get_view_process_node() if ipn is not None: ipn.setInput(0, previous_node) previous_node = ipn diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index 17c4bc30cf..10eeb91644 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -27,8 +27,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): _families_test = [instance.data["family"]] + instance.data["families"] self.log.debug("_families_test: {}".format(_families_test)) + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": node = x diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 52731140ff..0d64da074e 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -6,7 +6,7 @@ import nuke import pyblish.api import openpype.hosts.nuke.api.lib as nlib -import openpype.hosts.nuke.api as nuke_api +from openpype.hosts.nuke import api as napi from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -85,8 +85,8 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: - origin_node = instance[0] - nuke_api.lib.recreate_instance( + origin_node = napi.get_instance_node(instance) + napi.lib.recreate_instance( origin_node, avalon_data={"asset": context_asset} ) @@ -112,6 +112,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] + node = napi.get_instance_node(instance) msg = ( "Instance `{}` has wrong shot/asset name:\n" @@ -123,7 +124,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): if asset != context_asset: raise PublishXmlValidationError( self, msg, formatting_data={ - "node_name": instance[0]["name"].value(), + "node_name": node.name(), "wrong_name": asset, "correct_name": context_asset } diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index 7ce3ce0e3f..8ee8e631b8 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,6 @@ import nuke import pyblish -from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError @@ -25,14 +25,14 @@ class SelectCenterInNodeGraph(pyblish.api.Action): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) - all_xC = list() - all_yC = list() + all_xC = [] + all_yC = [] # maintain selection - with maintained_selection(): + with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - bdn = instance[0] + bdn = napi.get_instance_node(instance) xC = bdn.xpos() + bdn.screenWidth() / 2 yC = bdn.ypos() + bdn.screenHeight() / 2 diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 2321bd1fd4..05a4085483 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,6 @@ import pyblish from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.nuke.api import maintained_selection +from openpype.hosts.nuke import api as napi import nuke @@ -26,10 +26,10 @@ class OpenFailedGroupNode(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) # maintain selection - with maintained_selection(): + with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - grpn = instance[0] + grpn = napi.get_instance_node(instance) nuke.showDag(grpn) @@ -45,7 +45,7 @@ class ValidateGizmo(pyblish.api.InstancePlugin): actions = [OpenFailedGroupNode] def process(self, instance): - grpn = instance[0] + grpn = napi.get_instance_node(instance) with grpn: connections_out = nuke.allNodes('Output') diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 1e59880f90..b7095e9ed7 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype.hosts.nuke.api import maintained_selection +from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError from openpype.pipeline.publish import RepairAction import nuke @@ -30,8 +30,13 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): @classmethod def get_reformat(cls, instance): + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + reformat = None - for inode in instance: + for inode in child_nodes: if inode.Class() != "Reformat": continue reformat = inode @@ -64,21 +69,26 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + invalid = cls.get_invalid(instance) - grp_node = instance[0] + grp_node = napi.get_instance_node(instance) if cls.missing_msg == invalid: # make sure we are inside of the group node with grp_node: # find input node and select it _input = None - for inode in instance: + for inode in child_nodes: if inode.Class() != "Input": continue _input = inode # add reformat node under it - with maintained_selection(): + with napi.maintained_selection(): _input['selected'].setValue(True) _rfn = nuke.createNode("Reformat", "name Reformat01") _rfn["resize"].setValue(0) diff --git a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py index 2bf1ff81f8..ffd5cee710 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py +++ b/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py @@ -5,7 +5,7 @@ import nuke import toml import pyblish.api from bson.objectid import ObjectId - +from openpype.hosts.nuke import api as napi from openpype.pipeline import ( discover_loader_plugins, load_container, @@ -31,22 +31,22 @@ class RepairReadLegacyAction(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) for instance in instances: - - data = toml.loads(instance[0]["avalon"].value()) - data["name"] = instance[0].name() - data["xpos"] = instance[0].xpos() - data["ypos"] = instance[0].ypos() + node = napi.get_instance_node(instance) + data = toml.loads(node["avalon"].value()) + data["name"] = node.name() + data["xpos"] = node.xpos() + data["ypos"] = node.ypos() data["extension"] = os.path.splitext( - instance[0]["file"].value() + node["file"].value() )[1][1:] data["connections"] = [] - for d in instance[0].dependent(): + for d in node.dependent(): for i in range(d.inputs()): - if d.input(i) == instance[0]: + if d.input(i) == node: data["connections"].append([i, d]) - nuke.delete(instance[0]) + nuke.delete(node) loader_name = "LoadSequence" if data["extension"] == "mov": @@ -72,7 +72,7 @@ class RepairReadLegacyAction(pyblish.api.Action): class ValidateReadLegacy(pyblish.api.InstancePlugin): - """Validate legacy read instance[0]s.""" + """Validate legacy read nodes.""" order = pyblish.api.ValidatorOrder optional = True diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 237ff423e5..c1e511f534 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,6 +1,7 @@ import os import pyblish.api import clique +from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError @@ -23,6 +24,7 @@ class RepairActionBase(pyblish.api.Action): def repair_knob(self, instances, state): for instance in instances: + node = napi.get_instance_node(instance) files_remove = [os.path.join(instance.data["outputDir"], f) for r in instance.data.get("representations", []) for f in r.get("files", []) @@ -31,7 +33,7 @@ class RepairActionBase(pyblish.api.Action): for f in files_remove: os.remove(f) self.log.debug("removing file: {}".format(f)) - instance[0]["render"].setValue(state) + node["render"].setValue(state) self.log.info("Rendering toggled to `{}`".format(state)) @@ -62,9 +64,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] def process(self, instance): + node = napi.get_instance_node(instance) f_data = { - "node_name": instance[0]["name"].value() + "node_name": node.name() } for repre in instance.data["representations"]: diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py index 699526ef57..e9bcb08b00 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py @@ -7,7 +7,7 @@ import pyblish.api from openpype.pipeline import discover_creator_plugins from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import get_avalon_knob_data - +from openpype.hosts.nuke import api as napi class ValidateWriteLegacy(pyblish.api.InstancePlugin): """Validate legacy write nodes.""" @@ -20,7 +20,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin): actions = [RepairAction] def process(self, instance): - node = instance[0] + node = napi.get_instance_node(instance) msg = "Clean up legacy write node \"{}\"".format(instance) if node.Class() not in ["Group", "Write"]: @@ -48,7 +48,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): - node = instance[0] + node = napi.get_instance_node(instance) if "Write" in node.Class(): data = toml.loads(node["avalon"].value()) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 26a563b13b..490ab2e5ae 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -5,6 +5,7 @@ from openpype.hosts.nuke.api.lib import ( set_node_knobs_from_settings, color_gui_to_int ) +from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError @@ -18,10 +19,15 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): instances = get_errored_instances_from_context(context) for instance in instances: - write_group_node = instance[0] + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + + write_group_node = napi.get_instance_node(instance) # get write node from inside of group write_node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": write_node = x @@ -47,11 +53,16 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): hosts = ["nuke"] def process(self, instance): - write_group_node = instance[0] + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + + write_group_node = napi.get_instance_node(instance) # get write node from inside of group write_node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": write_node = x From f8cba8d477853ff024899084d9798f6c110d87af Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Oct 2022 15:39:35 +0200 Subject: [PATCH 067/480] nuke: renaming precollect to collect --- .../publish/{precollect_instances.py => collect_instances.py} | 4 ++-- .../publish/{precollect_workfile.py => collect_workfile.py} | 2 +- .../publish/{precollect_writes.py => collect_writes.py} | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/nuke/plugins/publish/{precollect_instances.py => collect_instances.py} (98%) rename openpype/hosts/nuke/plugins/publish/{precollect_workfile.py => collect_workfile.py} (98%) rename openpype/hosts/nuke/plugins/publish/{precollect_writes.py => collect_writes.py} (99%) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/collect_instances.py similarity index 98% rename from openpype/hosts/nuke/plugins/publish/precollect_instances.py rename to openpype/hosts/nuke/plugins/publish/collect_instances.py index b396056eb9..54ad0229dd 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/collect_instances.py @@ -8,11 +8,11 @@ from openpype.hosts.nuke.api.lib import ( @pyblish.api.log -class PreCollectNukeInstances(pyblish.api.ContextPlugin): +class CollectNukeInstances(pyblish.api.ContextPlugin): """Collect all nodes with Avalon knob.""" order = pyblish.api.CollectorOrder - 0.49 - label = "Pre-collect Instances" + label = "Collect Instances" hosts = ["nuke", "nukeassist"] # presets diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/collect_workfile.py similarity index 98% rename from openpype/hosts/nuke/plugins/publish/precollect_workfile.py rename to openpype/hosts/nuke/plugins/publish/collect_workfile.py index 822f405a6f..8b6ab5d072 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/collect_workfile.py @@ -15,7 +15,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): """Collect current script for publish.""" order = pyblish.api.CollectorOrder - 0.50 - label = "Pre-collect Workfile" + label = "Collect Workfile" hosts = ['nuke'] def process(self, context): # sourcery skip: avoid-builtin-shadow diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py similarity index 99% rename from openpype/hosts/nuke/plugins/publish/precollect_writes.py rename to openpype/hosts/nuke/plugins/publish/collect_writes.py index 10eeb91644..8d7ae5d0df 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -19,7 +19,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): """Collect all write nodes.""" order = pyblish.api.CollectorOrder - 0.48 - label = "Pre-collect Writes" + label = "Collect Writes" hosts = ["nuke", "nukeassist"] families = ["write"] From 173a531f5f9d3b16c0779176093eb5b3488bef25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:51:15 +0200 Subject: [PATCH 068/480] nuke: updating settings --- openpype/settings/defaults/project_anatomy/imageio.json | 2 +- openpype/settings/defaults/project_settings/nuke.json | 6 +++--- .../schemas/projects_schema/schema_project_nuke.json | 4 ++-- .../projects_schema/schemas/schema_nuke_publish.json | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index f0be8f95f4..caa2a8a206 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -156,7 +156,7 @@ }, { "plugins": [ - "CreateWriteStill" + "CreateWriteImage" ], "nukeNodeClass": "Write", "knobs": [ diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 783b42348c..8e4dd163ab 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -99,10 +99,10 @@ ], "prenodes": {} }, - "CreateWriteStill": { + "CreateWriteImage": { "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", "default_variants": [ - "ImageFrame", + "StillFrame", "MPFrame", "LayoutFrame" ], @@ -125,7 +125,7 @@ } }, "publish": { - "PreCollectNukeInstances": { + "CollectInstanceData": { "sync_workfile_version_on_families": [ "nukenodes", "camera", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 520985154e..00ba3d5f03 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -205,8 +205,8 @@ { "type": "dict", "collapsible": true, - "key": "CreateWriteStill", - "label": "CreateWriteStill", + "key": "CreateWriteImage", + "label": "CreateWriteImage", "is_group": true, "children": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index e5827a92c4..aa0c785e7a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -11,8 +11,8 @@ { "type": "dict", "collapsible": true, - "key": "PreCollectNukeInstances", - "label": "PreCollectNukeInstances", + "key": "CollectInstanceData", + "label": "CollectInstanceData", "is_group": true, "children": [ { From 5ba40a60526fb1d221b6af6a323da3ebc8501eaa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:52:16 +0200 Subject: [PATCH 069/480] Nuke: rename still to image and change family --- .../{create_write_still.py => create_write_image.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/nuke/plugins/create/{create_write_still.py => create_write_image.py} (96%) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_image.py similarity index 96% rename from openpype/hosts/nuke/plugins/create/create_write_still.py rename to openpype/hosts/nuke/plugins/create/create_write_image.py index b3a096a5ba..1bb114903f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -14,10 +14,10 @@ from openpype.lib import ( from openpype.hosts.nuke import api as napi -class CreateWriteStill(napi.NukeWriteCreator): - identifier = "create_write_still" - label = "Create Write Still Frame" - family = "still" +class CreateWriteImage(napi.NukeWriteCreator): + identifier = "create_write_image" + label = "Create Write Image" + family = "image" icon = "sign-out" def get_pre_create_attr_defs(self): From f8455915ac5dcb6f422c517b68438e5371ad4b4a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:52:52 +0200 Subject: [PATCH 070/480] Nuke: refactor instance collector --- .../plugins/publish/collect_instance_data.py | 45 +++++ .../nuke/plugins/publish/collect_instances.py | 158 ------------------ 2 files changed, 45 insertions(+), 158 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/collect_instance_data.py delete mode 100644 openpype/hosts/nuke/plugins/publish/collect_instances.py diff --git a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py new file mode 100644 index 0000000000..9614ebdea6 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py @@ -0,0 +1,45 @@ +import nuke +import pyblish.api + + +@pyblish.api.log +class CollectInstanceData(pyblish.api.InstancePlugin): + """Collect all nodes with Avalon knob.""" + + order = pyblish.api.CollectorOrder - 0.49 + label = "Collect Instance Data" + hosts = ["nuke", "nukeassist"] + + # presets + sync_workfile_version_on_families = [] + + def process(self, instance): + family = instance.data["family"] + + # Get format + root = nuke.root() + format_ = root['format'].value() + resolution_width = format_.width() + resolution_height = format_.height() + pixel_aspect = format_.pixelAspect() + + # sync workfile version + if family in self.sync_workfile_version_on_families: + self.log.debug( + "Syncing version with workfile for '{}'".format( + family + ) + ) + # get version to instance for integration + instance.data['version'] = instance.context.data['version'] + + instance.data.update({ + "step": 1, + "fps": root['fps'].value(), + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect + + }) + self.log.debug("Collected instance: {}".format( + instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_instances.py b/openpype/hosts/nuke/plugins/publish/collect_instances.py deleted file mode 100644 index 54ad0229dd..0000000000 --- a/openpype/hosts/nuke/plugins/publish/collect_instances.py +++ /dev/null @@ -1,158 +0,0 @@ -import nuke -import pyblish.api - -from openpype.hosts.nuke.api.lib import ( - add_publish_knob, - get_avalon_knob_data -) - - -@pyblish.api.log -class CollectNukeInstances(pyblish.api.ContextPlugin): - """Collect all nodes with Avalon knob.""" - - order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Instances" - hosts = ["nuke", "nukeassist"] - - # presets - sync_workfile_version_on_families = [] - - def process(self, context): - instances = [] - - root = nuke.root() - - self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) - for node in nuke.allNodes(): - - if node.Class() in ["Viewer", "Dot"]: - continue - - try: - if node["disable"].value(): - continue - except Exception as E: - self.log.warning(E) - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) - - self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] != "pyblish.avalon.instance": - continue - - # establish families - family = avalon_knob_data["family"] - families_ak = avalon_knob_data.get("families", []) - families = [] - - # except disabled nodes but exclude backdrops in test - if ("nukenodes" not in family) and (node["disable"].value()): - continue - - subset = avalon_knob_data.get( - "subset", None) or node["name"].value() - - # Create instance - instance = context.create_instance(subset) - instance.append(node) - - suspend_publish = False - if "suspend_publish" in node.knobs(): - suspend_publish = node["suspend_publish"].value() - instance.data["suspend_publish"] = suspend_publish - - # get review knob value - review = False - if "review" in node.knobs(): - review = node["review"].value() - - if review: - families.append("review") - - # Add all nodes in group instances. - if node.Class() == "Group": - # only alter families for render family - if families_ak and "write" in families_ak.lower(): - target = node["render"].value() - if target == "Use existing frames": - # Local rendering - self.log.info("flagged for no render") - families.append(families_ak.lower()) - elif target == "Local": - # Local rendering - self.log.info("flagged for local render") - families.append("{}.local".format(family)) - family = families_ak.lower() - elif target == "On farm": - # Farm rendering - self.log.info("flagged for farm render") - instance.data["transfer"] = False - instance.data["farm"] = True - families.append("{}.farm".format(family)) - family = families_ak.lower() - - node.begin() - for i in nuke.allNodes(): - instance.append(i) - node.end() - - if not families and families_ak and family not in [ - "render", "prerender"]: - families.append(families_ak.lower()) - - self.log.debug("__ family: `{}`".format(family)) - self.log.debug("__ families: `{}`".format(families)) - - # Get format - format_ = root['format'].value() - resolution_width = format_.width() - resolution_height = format_.height() - pixel_aspect = format_.pixelAspect() - - # get publish knob value - if "publish" not in node.knobs(): - add_publish_knob(node) - - # sync workfile version - _families_test = [family] + families - self.log.debug("__ _families_test: `{}`".format(_families_test)) - for family_test in _families_test: - if family_test in self.sync_workfile_version_on_families: - self.log.debug( - "Syncing version with workfile for '{}'".format( - family_test - ) - ) - # get version to instance for integration - instance.data['version'] = instance.context.data['version'] - - instance.data.update({ - "subset": subset, - "asset": avalon_knob_data["asset"], - "label": node.name(), - "name": node.name(), - "subset": subset, - "family": family, - "families": families, - "avalonKnob": avalon_knob_data, - "step": 1, - "publish": node.knob('publish').value(), - "fps": nuke.root()['fps'].value(), - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - "review": review, - "representations": [] - - }) - self.log.info("collected instance: {}".format(instance.data)) - instances.append(instance) - - self.log.debug("context: {}".format(context)) From 0e3a8ffb82008b895813caf8ac60b19f8b5c5cd9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:53:12 +0200 Subject: [PATCH 071/480] nuke: refactor collect writes --- .../nuke/plugins/publish/collect_writes.py | 224 ++++++++---------- 1 file changed, 102 insertions(+), 122 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 8d7ae5d0df..32d837f400 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -1,17 +1,8 @@ import os -import re from pprint import pformat import nuke import pyblish.api - -from openpype.client import ( - get_last_version_by_subset_name, - get_representations, -) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.hosts.nuke import api as napi @pyblish.api.log @@ -21,35 +12,49 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder - 0.48 label = "Collect Writes" hosts = ["nuke", "nukeassist"] - families = ["write"] + families = ["render", "prerender", "image"] def process(self, instance): - _families_test = [instance.data["family"]] + instance.data["families"] - self.log.debug("_families_test: {}".format(_families_test)) + self.log.debug(pformat(instance.data)) + instance.data.update(instance.data["creator_attributes"]) - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance + group_node = napi.get_instance_node(instance) + render_target = instance.data["render_target"] + family = instance.data["family"] + families = instance.data["families"] + + # add targeted family to families + instance.data["families"].append( + "{}.{}".format(family, render_target) ) + # add additional keys to farm targeted + if render_target == "farm": + # Farm rendering + self.log.info("flagged for farm render") + instance.data["transfer"] = False + instance.data["farm"] = True - node = None + child_nodes = napi.get_instance_group_node_childs(instance) + instance.data["transientData"]["childNodes"] = child_nodes + + write_node = None for x in child_nodes: if x.Class() == "Write": - node = x + write_node = x - if node is None: + if write_node is None: + self.log.warning( + "Created node '{}' is missing write node!".format( + group_node.name() + ) + ) return - instance.data["writeNode"] = node + instance.data["writeNode"] = write_node self.log.debug("checking instance: {}".format(instance)) # Determine defined file type - ext = node["file_type"].value() - - # Determine output type - output_type = "img" - if ext == "mov": - output_type = "mov" + ext = write_node["file_type"].value() # Get frame range handle_start = instance.context.data["handleStart"] @@ -58,105 +63,85 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): last_frame = int(nuke.root()["last_frame"].getValue()) frame_length = int(last_frame - first_frame + 1) - if node["use_limit"].getValue(): - first_frame = int(node["first"].getValue()) - last_frame = int(node["last"].getValue()) + if write_node["use_limit"].getValue(): + first_frame = int(write_node["first"].getValue()) + last_frame = int(write_node["last"].getValue()) - # Prepare expected output paths by evaluating each frame of write node - # - paths are first collected to set to avoid duplicated paths, then - # sorted and converted to list - node_file = node["file"] - expected_paths = list(sorted({ - node_file.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - expected_filenames = [ - os.path.basename(filepath) - for filepath in expected_paths - ] - path = nuke.filename(node) - output_dir = os.path.dirname(path) + write_file_path = nuke.filename(write_node) + output_dir = os.path.dirname(write_file_path) self.log.debug('output dir: {}'.format(output_dir)) - # create label - name = node.name() - # Include start and end render frame in label - label = "{0} ({1}-{2})".format( - name, - int(first_frame), - int(last_frame) - ) - - if [fm for fm in _families_test - if fm in ["render", "prerender", "still"]]: - if "representations" not in instance.data: - instance.data["representations"] = list() + if render_target == "frame": representation = { 'name': ext, 'ext': ext, "stagingDir": output_dir, - "tags": list() + "tags": [] } - try: - collected_frames = [ - filename - for filename in os.listdir(output_dir) - if filename in expected_filenames - ] - if collected_frames: - collected_frames_len = len(collected_frames) - frame_start_str = "%0{}d".format( - len(str(last_frame))) % first_frame - representation['frameStart'] = frame_start_str + # get file path knob + node_file_knob = write_node["file"] + # list file paths based on input frames + expected_paths = list(sorted({ + node_file_knob.evaluate(frame) + for frame in range(first_frame, last_frame + 1) + })) - # in case slate is expected and not yet rendered - self.log.debug("_ frame_length: {}".format(frame_length)) - self.log.debug( - "_ collected_frames_len: {}".format( - collected_frames_len)) - # this will only run if slate frame is not already - # rendered from previews publishes - if "slate" in _families_test \ - and (frame_length == collected_frames_len) \ - and ("prerender" not in _families_test): - frame_slate_str = "%0{}d".format( - len(str(last_frame))) % (first_frame - 1) - slate_frame = collected_frames[0].replace( - frame_start_str, frame_slate_str) - collected_frames.insert(0, slate_frame) + # convert only to base names + expected_filenames = [ + os.path.basename(filepath) + for filepath in expected_paths + ] + + # make sure files are existing at folder + collected_frames = [ + filename + for filename in os.listdir(output_dir) + if filename in expected_filenames + ] + + if collected_frames: + collected_frames_len = len(collected_frames) + frame_start_str = "%0{}d".format( + len(str(last_frame))) % first_frame + representation['frameStart'] = frame_start_str + + # in case slate is expected and not yet rendered + self.log.debug("_ frame_length: {}".format(frame_length)) + self.log.debug("_ collected_frames_len: {}".format( + collected_frames_len)) + + # this will only run if slate frame is not already + # rendered from previews publishes + if ( + "slate" in families + and frame_length == collected_frames_len + and family == "render" + ): + frame_slate_str = ( + "{{:0{}d}}".format(len(str(last_frame))) + ).format(first_frame - 1) + + slate_frame = collected_frames[0].replace( + frame_start_str, frame_slate_str) + collected_frames.insert(0, slate_frame) if collected_frames_len == 1: representation['files'] = collected_frames.pop() - if "still" in _families_test: - instance.data['family'] = 'image' - instance.data["families"].remove('still') else: representation['files'] = collected_frames - instance.data["representations"].append(representation) - except Exception: - instance.data["representations"].append(representation) - self.log.debug("couldn't collect frames: {}".format(label)) - # Add version data to instance - colorspace = node["colorspace"].value() - - # remove default part of the string - if "default (" in colorspace: - colorspace = re.sub(r"default.\(|\)", "", colorspace) - self.log.debug("colorspace: `{}`".format(colorspace)) + instance.data["representations"].append(representation) + # get colorspace and add to version data + colorspace = napi.get_colorspace_from_node(write_node) version_data = { - "families": [ - _f.replace(".local", "").replace(".farm", "") - for _f in _families_test if "write" != _f - ], "colorspace": colorspace } - group_node = [x for x in instance if x.Class() == "Group"][0] + # get deadline related attributes dl_chunk_size = 1 if "deadlineChunkSize" in group_node.knobs(): dl_chunk_size = group_node["deadlineChunkSize"].value() @@ -171,27 +156,16 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): instance.data.update({ "versionData": version_data, - "path": path, + "path": write_file_path, "outputDir": output_dir, "ext": ext, - "label": label, - "outputType": output_type, "colorspace": colorspace, "deadlineChunkSize": dl_chunk_size, "deadlinePriority": dl_priority, "deadlineConcurrentTasks": dl_concurrent_tasks }) - if self.is_prerender(_families_test): - instance.data.update({ - "handleStart": 0, - "handleEnd": 0, - "frameStart": first_frame, - "frameEnd": last_frame, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - else: + if family == "render": instance.data.update({ "handleStart": handle_start, "handleEnd": handle_end, @@ -200,13 +174,19 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameStartHandle": first_frame, "frameEndHandle": last_frame, }) + else: + instance.data.update({ + "handleStart": 0, + "handleEnd": 0, + "frameStart": first_frame, + "frameEnd": last_frame, + "frameStartHandle": first_frame, + "frameEndHandle": last_frame, + }) - # make sure rendered sequence on farm will - # be used for exctract review - if not instance.data["review"]: - instance.data["useSequenceForReview"] = False + # make sure rendered sequence on farm will + # be used for exctract review + if not instance.data["review"]: + instance.data["useSequenceForReview"] = False self.log.debug("instance.data: {}".format(pformat(instance.data))) - - def is_prerender(self, families): - return next((f for f in families if "prerender" in f), None) From ef27fc4cf21ffd99c0ffdd949f8d045caa86de88 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:53:34 +0200 Subject: [PATCH 072/480] Nuke: refactor collect slates --- openpype/hosts/nuke/plugins/publish/collect_slate_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 0a98bcf973..e3f24a233e 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -9,7 +9,7 @@ class CollectSlate(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.09 label = "Collect Slate Node" hosts = ["nuke"] - families = ["render", "render.local", "render.farm"] + families = ["render"] def process(self, instance): node = napi.get_instance_node(instance) From ce0d2b8fe16a03be90a917cc666f4dcc5158fb70 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:54:14 +0200 Subject: [PATCH 073/480] nuke: adding supporting functions to plugin.py --- openpype/hosts/nuke/api/__init__.py | 6 ++++- openpype/hosts/nuke/api/plugin.py | 37 ++++++++++++++++++++++++++++- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 156b3f0de0..8aaaff43b3 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -14,7 +14,9 @@ from .plugin import ( NukeWriteCreator, NukeCreatorError, OpenPypeCreator, - get_instance_node + get_instance_node, + get_instance_group_node_childs, + get_colorspace_from_node ) from .pipeline import ( NukeHost, @@ -66,6 +68,8 @@ __all__ = ( "OpenPypeCreator", "NukeHost", "get_instance_node", + "get_instance_group_node_childs", + "get_colorspace_from_node", "ls", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 193c2e8c2d..45f16b2943 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,5 +1,5 @@ import nuke - +import re import os import sys import six @@ -415,6 +415,41 @@ def get_instance_node(instance): return instance[0] +def get_instance_group_node_childs(instance): + """Return list of instance group node children + + Args: + instance (pyblish.Instance): pyblish instance + + Returns: + list: [nuke.Node] + """ + node = get_instance_node(instance) + + if node.Class() != "Group": + return + + # collect child nodes + child_nodes = [] + # iterate all nodes + for node in nuke.allNodes(group=node): + # add contained nodes to instance's node list + child_nodes.append(node) + + return child_nodes + + +def get_colorspace_from_node(node): + # Add version data to instance + colorspace = node["colorspace"].value() + + # remove default part of the string + if "default (" in colorspace: + colorspace = re.sub(r"default.\(|\)", "", colorspace) + + return colorspace + + def get_review_presets_config(): settings = get_current_project_settings() review_profiles = ( From 0bce4fc67ad3a93b2014a94834f3c293c51bf9f9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 8 Oct 2022 21:55:38 +0200 Subject: [PATCH 074/480] nuke: adding deprecated decorator --- openpype/hosts/nuke/api/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 4478633198..df0b303878 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -691,6 +691,7 @@ def get_nuke_imageio_settings(): return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] +@deprecated("openpype.hosts.nuke.api.lib.get_nuke_imageio_settings") def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): '''[DEPRICATED] Get preset data for dataflow (fileType, compression, bitDepth) ''' From a45666c4c45325bd13973397824d2ce5fc698a3a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Oct 2022 17:37:32 +0200 Subject: [PATCH 075/480] Nuke: plugin better update instances --- openpype/hosts/nuke/api/plugin.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 45f16b2943..748c52eb1f 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -198,15 +198,11 @@ class NukeCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: instance_node = created_inst.transient_data["node"] - current_data = created_inst.data set_node_data( instance_node, INSTANCE_DATA_KNOB, - { - key: value[1] for key, value in _changes.items() - if current_data.get(key) != value[0] - } + created_inst.data_to_store() ) def remove_instances(self, instances): From c975429799163f7f2fbbeb52c6654c1f604eb000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Oct 2022 17:38:04 +0200 Subject: [PATCH 076/480] Nuke: update write validator --- .../nuke/plugins/publish/validate_write_nodes.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 490ab2e5ae..ab1671ad0e 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -6,7 +6,10 @@ from openpype.hosts.nuke.api.lib import ( color_gui_to_int ) from openpype.hosts.nuke import api as napi -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin, +) @pyblish.api.log @@ -38,7 +41,10 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): self.log.info("Node attributes were fixed") -class ValidateNukeWriteNode(pyblish.api.InstancePlugin): +class ValidateNukeWriteNode( + OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin +): """ Validate Write node's knobs. Compare knobs on write node inside the render group @@ -48,11 +54,14 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True families = ["render"] - label = "Write Node" + label = "Validate write node" actions = [RepairNukeWriteNodeAction] hosts = ["nuke"] def process(self, instance): + if not self.is_active(instance.data): + return + child_nodes = ( instance.data.get("transientData", {}).get("childNodes") or instance From 0271e3b60fe018bf78f9fad25fe2c81afab26a33 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Oct 2022 20:58:10 +0200 Subject: [PATCH 077/480] Nuke: improving error message. --- openpype/hosts/nuke/plugins/create/create_backdrop.py | 4 ++-- openpype/hosts/nuke/plugins/create/create_camera.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index 40eaab08cf..d82456392a 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -45,8 +45,8 @@ class CreateBackdrop(NukeCreator): def create(self, subset_name, instance_data, pre_create_data): if self.check_existing_subset(subset_name, instance_data): raise NukeCreatorError( - ("subset {} is already published with different HDA" - "definition.").format(subset_name)) + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) instance = super(CreateBackdrop, self).create( subset_name, diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 0b538d0088..ea2fd4b971 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -42,8 +42,8 @@ class CreateCamera(NukeCreator): def create(self, subset_name, instance_data, pre_create_data): if self.check_existing_subset(subset_name, instance_data): raise NukeCreatorError( - ("subset {} is already published with different HDA" - "definition.").format(subset_name)) + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) instance = super(CreateCamera, self).create( subset_name, From 0a29bcfcc5f1724080993dfad76c47c1965b9f01 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Oct 2022 20:58:26 +0200 Subject: [PATCH 078/480] Nuke: refactory model creator --- .../hosts/nuke/plugins/create/create_model.py | 130 ++++++++---------- 1 file changed, 55 insertions(+), 75 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index 15a4e3ab8a..bfddcb64c3 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -1,87 +1,67 @@ import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection ) -class CreateModel(plugin.OpenPypeCreator): - """Add Publishable Model Geometry""" +class CreateModel(NukeCreator): + """Add Publishable Camera""" - name = "model" + identifier = "create_model" label = "Create 3d Model" family = "model" icon = "cube" - defaults = ["Main"] + default_variants = ["Main"] - def __init__(self, *args, **kwargs): - super(CreateModel, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xff3200ff" - return + # plugin attributes + node_color = "0xff3200ff" - def process(self): - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes - for n in nodes: - n['selected'].setValue(0) - end_nodes = list() - - # get the latest nodes in tree for selecion - for n in nodes: - x = n - end = 0 - while end == 0: - try: - x = x.dependent()[0] - except: - end_node = x - end = 1 - end_nodes.append(end_node) - - # set end_nodes - end_nodes = list(set(end_nodes)) - - # check if nodes is 3d nodes - for n in end_nodes: - n['selected'].setValue(1) - sn = nuke.createNode("Scene") - if not sn.input(0): - end_nodes.remove(n) - nuke.delete(sn) - - # loop over end nodes - for n in end_nodes: - n['selected'].setValue(1) - - self.nodes = nuke.selectedNodes() - nodes = self.nodes - if len(nodes) >= 1: - # loop selected nodes - for n in nodes: - data = self.data.copy() - if len(nodes) > 1: - # rename subset name only if more - # then one node are selected - subset = self.family + n["name"].value().capitalize() - data["subset"] = subset - - # change node color - n["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - set_avalon_knob_data(n, data) - return True + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if self.selected_nodes: + created_node = self.selected_nodes[0] else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return + created_node = nuke.createNode("Scene") + + created_node["tile_color"].setValue( + int(self.node_color, 16)) + + created_node["name"].setValue(node_name) + + self.add_info_knob(created_node) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) + + instance = super(CreateModel, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(self.selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one camera node") else: - # if selected is off then create one node - model_node = nuke.createNode("WriteGeo") - model_node["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - instance = set_avalon_knob_data(model_node, self.data) - return instance + self.selected_nodes = [] + + self.log.debug("Selection is: {}".format(self.selected_nodes)) From 3361c9a66d4921741ec836a5728a24c6ea6cb5f3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 17:30:53 +0200 Subject: [PATCH 079/480] nuke: convert_to_valid_instaces function convert old instances to new publisher --- openpype/hosts/nuke/api/plugin.py | 139 +++++++++++++++++++++++++++++- 1 file changed, 137 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 748c52eb1f..8ef6c60296 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,3 +1,4 @@ +from pprint import pformat import nuke import re import os @@ -37,6 +38,7 @@ from .lib import ( set_node_knobs_from_settings, get_view_process_node, set_node_data, + get_node_data, deprecated ) from .pipeline import ( @@ -189,6 +191,10 @@ class NukeCreator(NewCreator): def collect_instances(self): for (node, data) in list_instances(creator_id=self.identifier): + self.log.debug("_" * 50) + self.log.debug("_ self.identifier: `{}`".format(self.identifier)) + self.log.debug("_ data: `{}`".format(pformat(data))) + created_instance = CreatedInstance.from_existing( data, self ) @@ -337,8 +343,7 @@ class NukeWriteCreator(NukeCreator): def apply_settings( self, project_settings, - system_settings, - # anatomy_settings + system_settings ): """Method called on initialization of plugin to apply settings.""" @@ -1080,3 +1085,133 @@ class AbstractWriteRender(OpenPypeCreator): node (nuke.Node): group node with data as Knobs """ pass + + +def convert_to_valid_instaces(): + """ Check and convert to latest publisher instances + + Also save as new minor version of workfile. + """ + def family_to_identifier(family): + mapping = { + "render": "create_write_render", + "prerender": "create_write_prerender", + "still": "create_write_image", + "model": "create_model", + "camera": "create_camera", + "nukenodes": "create_backdrop", + "gizmo": "create_gizmo", + "source": "create_source" + + } + return mapping[family] + + from openpype.hosts.nuke.api.lib import ( + get_avalon_knob_data + ) + from openpype.hosts.nuke.api import workio + + task_name = legacy_io.Session["AVALON_TASK"] + + # save into new workfile + current_file = workio.current_file() + new_workfile = current_file[:-3] + "_publisherConvert" + current_file[-3:] + + path = new_workfile.replace("\\", "/") + nuke.scriptSaveAs(new_workfile, overwrite=1) + nuke.Root()["name"].setValue(path) + nuke.Root()["project_directory"].setValue(os.path.dirname(path)) + nuke.Root().setModified(False) + + # loop all nodes and convert + for node in nuke.allNodes(recurseGroups=True): + transfer_data = { + "creator_attributes": {} + } + creator_attr = transfer_data["creator_attributes"] + + if node.Class() in ["Viewer", "Dot"]: + continue + + if get_node_data(node, INSTANCE_DATA_KNOB): + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data( + node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + transfer_data.update({ + k: v for k, v in avalon_knob_data.items() + if k not in ["families", "creator"] + }) + + transfer_data["task"] = task_name + + family = avalon_knob_data["family"] + # establish families + families_ak = avalon_knob_data.get("families", []) + + if "suspend_publish" in node.knobs(): + creator_attr["suspended_publish"] = ( + node["suspend_publish"].value()) + + # get review knob value + if "review" in node.knobs(): + creator_attr["review"] = ( + node["review"].value()) + + if "publish" in node.knobs(): + transfer_data["active"] = ( + node["publish"].value()) + + # add idetifier + transfer_data["creator_identifier"] = family_to_identifier(family) + + # Add all nodes in group instances. + if node.Class() == "Group": + # only alter families for render family + if families_ak and "write" in families_ak.lower(): + target = node["render"].value() + if target == "Use existing frames": + creator_attr["render_target"] = "frames" + elif target == "Local": + # Local rendering + creator_attr["render_target"] = "local" + elif target == "On farm": + # Farm rendering + creator_attr["render_target"] = "farm" + + if "deadlinePriority" in node.knobs(): + transfer_data["farm_priority"] = ( + node["deadlinePriority"].value()) + if "deadlineChunkSize" in node.knobs(): + creator_attr["farm_chunk"] = ( + node["deadlineChunkSize"].value()) + if "deadlineConcurrentTasks" in node.knobs(): + creator_attr["farm_concurency"] = ( + node["deadlineConcurrentTasks"].value()) + + remove_knobs = [ + "review", "publish", "render", "suspend_publish", "warn", "divd", + "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", + "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" + ] + + # remove all old knobs + for knob in node.allKnobs(): + if knob.name() in remove_knobs: + node.removeKnob(knob) + elif "avalon" in knob.name(): + node.removeKnob(knob) + + # add new instance knob with transfer data + set_node_data( + node, INSTANCE_DATA_KNOB, transfer_data) + + nuke.scriptSave() From ed6cadb22b331f985348e5ed22168cea5ebb24d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 12 Oct 2022 18:25:14 +0200 Subject: [PATCH 080/480] :construction: remove staging from version logic --- igniter/bootstrap_repos.py | 219 +++++-------------------------------- igniter/tools.py | 11 +- 2 files changed, 30 insertions(+), 200 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index ccc9d4ac52..bbb3dd506c 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -61,7 +61,6 @@ class OpenPypeVersion(semver.VersionInfo): path (str): path to OpenPype """ - staging = False path = None _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?") # noqa: E501 _installed_version = None @@ -82,12 +81,10 @@ class OpenPypeVersion(semver.VersionInfo): build (str): an optional build string version (str): if set, it will be parsed and will override parameters like `major`, `minor` and so on. - staging (bool): set to True if version is staging. path (Path): path to version location. """ self.path = None - self.staging = False if "version" in kwargs.keys(): if not kwargs.get("version"): @@ -112,29 +109,8 @@ class OpenPypeVersion(semver.VersionInfo): if "path" in kwargs.keys(): kwargs.pop("path") - if kwargs.get("staging"): - self.staging = kwargs.get("staging", False) - kwargs.pop("staging") - - if "staging" in kwargs.keys(): - kwargs.pop("staging") - - if self.staging: - if kwargs.get("build"): - if "staging" not in kwargs.get("build"): - kwargs["build"] = f"{kwargs.get('build')}-staging" - else: - kwargs["build"] = "staging" - - if kwargs.get("build") and "staging" in kwargs.get("build", ""): - self.staging = True - super().__init__(*args, **kwargs) - def __eq__(self, other): - result = super().__eq__(other) - return bool(result and self.staging == other.staging) - def __repr__(self): return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>" @@ -149,43 +125,11 @@ class OpenPypeVersion(semver.VersionInfo): return True if self.finalize_version() == other.finalize_version() and \ - self.prerelease == other.prerelease and \ - self.is_staging() and not other.is_staging(): + self.prerelease == other.prerelease: return True return result - def set_staging(self) -> OpenPypeVersion: - """Set version as staging and return it. - - This will preserve current one. - - Returns: - OpenPypeVersion: Set as staging. - - """ - if self.staging: - return self - return self.replace(parts={"build": f"{self.build}-staging"}) - - def set_production(self) -> OpenPypeVersion: - """Set version as production and return it. - - This will preserve current one. - - Returns: - OpenPypeVersion: Set as production. - - """ - if not self.staging: - return self - return self.replace( - parts={"build": self.build.replace("-staging", "")}) - - def is_staging(self) -> bool: - """Test if current version is staging one.""" - return self.staging - def get_main_version(self) -> str: """Return main version component. @@ -215,8 +159,6 @@ class OpenPypeVersion(semver.VersionInfo): if not m: return None version = OpenPypeVersion.parse(string[m.start():m.end()]) - if "staging" in string[m.start():m.end()]: - version.staging = True return version @classmethod @@ -226,8 +168,6 @@ class OpenPypeVersion(semver.VersionInfo): openpype_version = cls(major=v.major, minor=v.minor, patch=v.patch, prerelease=v.prerelease, build=v.build) - if v.build and "staging" in v.build: - openpype_version.staging = True return openpype_version def __hash__(self): @@ -379,80 +319,28 @@ class OpenPypeVersion(semver.VersionInfo): return False @classmethod - def get_local_versions( - cls, production: bool = None, - staging: bool = None - ) -> List: + def get_local_versions(cls) -> List: """Get all versions available on this machine. - Arguments give ability to specify if filtering is needed. If both - arguments are set to None all found versions are returned. - - Args: - production (bool): Return production versions. - staging (bool): Return staging versions. - Returns: list: of compatible versions available on the machine. """ - # Return all local versions if arguments are set to None - if production is None and staging is None: - production = True - staging = True - - elif production is None and not staging: - production = True - - elif staging is None and not production: - staging = True - - # Just return empty output if both are disabled - if not production and not staging: - return [] - # DEPRECATED: backwards compatible way to look for versions in root dir_to_search = Path(user_data_dir("openpype", "pypeclub")) versions = OpenPypeVersion.get_versions_from_directory(dir_to_search) - filtered_versions = [] - for version in versions: - if version.is_staging(): - if staging: - filtered_versions.append(version) - elif production: - filtered_versions.append(version) - return list(sorted(set(filtered_versions))) + return list(sorted(set(versions))) @classmethod - def get_remote_versions( - cls, production: bool = None, - staging: bool = None - ) -> List: + def get_remote_versions(cls) -> List: """Get all versions available in OpenPype Path. - Arguments give ability to specify if filtering is needed. If both - arguments are set to None all found versions are returned. - - Args: - production (bool): Return production versions. - staging (bool): Return staging versions. + Returns: + list of OpenPypeVersions: Versions found in OpenPype path. """ # Return all local versions if arguments are set to None - if production is None and staging is None: - production = True - staging = True - - elif production is None and not staging: - production = True - - elif staging is None and not production: - staging = True - - # Just return empty output if both are disabled - if not production and not staging: - return [] dir_to_search = None if cls.openpype_path_is_accessible(): @@ -473,14 +361,7 @@ class OpenPypeVersion(semver.VersionInfo): versions = cls.get_versions_from_directory(dir_to_search) - filtered_versions = [] - for version in versions: - if version.is_staging(): - if staging: - filtered_versions.append(version) - elif production: - filtered_versions.append(version) - return list(sorted(set(filtered_versions))) + return list(sorted(set(versions))) @staticmethod def get_versions_from_directory( @@ -559,7 +440,6 @@ class OpenPypeVersion(semver.VersionInfo): @staticmethod def get_latest_version( - staging: bool = False, local: bool = None, remote: bool = None ) -> Union[OpenPypeVersion, None]: @@ -577,7 +457,6 @@ class OpenPypeVersion(semver.VersionInfo): 'False' in that case only build version can be used. Args: - staging (bool, optional): List staging versions if True. local (bool, optional): List local versions if True. remote (bool, optional): List remote versions if True. @@ -596,28 +475,15 @@ class OpenPypeVersion(semver.VersionInfo): remote = True installed_version = OpenPypeVersion.get_installed_version() - local_versions = [] - remote_versions = [] - if local: - local_versions = OpenPypeVersion.get_local_versions( - staging=staging - ) - if remote: - remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging - ) - all_versions = local_versions + remote_versions - if not staging: - all_versions.append(installed_version) - - if not all_versions: - return None + local_versions = OpenPypeVersion.get_local_versions() if local else [] + remote_versions = OpenPypeVersion.get_remote_versions() if remote else [] # noqa: E501 + all_versions = local_versions + remote_versions + installed_version all_versions.sort() return all_versions[-1] @classmethod - def get_expected_studio_version(cls, staging=False, global_settings=None): + def get_expected_studio_version(cls, global_settings=None): """Expected OpenPype version that should be used at the moment. If version is not defined in settings the latest found version is @@ -626,13 +492,12 @@ class OpenPypeVersion(semver.VersionInfo): Using precached global settings is needed for usage inside OpenPype. Args: - staging (bool): Staging version or production version. global_settings (dict): Optional precached global settings. Returns: OpenPypeVersion: Version that should be used. """ - result = get_expected_studio_version_str(staging, global_settings) + result = get_expected_studio_version_str(global_settings) if not result: return None return OpenPypeVersion(version=result) @@ -1121,14 +986,12 @@ class BootstrapRepos: @staticmethod def find_openpype_version( - version: Union[str, OpenPypeVersion], - staging: bool + version: Union[str, OpenPypeVersion] ) -> Union[OpenPypeVersion, None]: """Find location of specified OpenPype version. Args: version (Union[str, OpenPypeVersion): Version to find. - staging (bool): Filter staging versions. Returns: requested OpenPypeVersion. @@ -1141,9 +1004,7 @@ class BootstrapRepos: if installed_version == version: return installed_version - local_versions = OpenPypeVersion.get_local_versions( - staging=staging, production=not staging - ) + local_versions = OpenPypeVersion.get_local_versions() zip_version = None for local_version in local_versions: if local_version == version: @@ -1155,37 +1016,25 @@ class BootstrapRepos: if zip_version is not None: return zip_version - remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging, production=not staging - ) - for remote_version in remote_versions: - if remote_version == version: - return remote_version - return None + remote_versions = OpenPypeVersion.get_remote_versions() + return next( + ( + remote_version for remote_version in remote_versions + if remote_version == version + ), None) @staticmethod - def find_latest_openpype_version( - staging: bool - ) -> Union[OpenPypeVersion, None]: + def find_latest_openpype_version() -> Union[OpenPypeVersion, None]: """Find the latest available OpenPype version in all location. - Args: - staging (bool): True to look for staging versions. - Returns: Latest OpenPype version on None if nothing was found. """ installed_version = OpenPypeVersion.get_installed_version() - local_versions = OpenPypeVersion.get_local_versions( - staging=staging - ) - remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging - ) - all_versions = local_versions + remote_versions - if not staging: - all_versions.append(installed_version) + local_versions = OpenPypeVersion.get_local_versions() + remote_versions = OpenPypeVersion.get_remote_versions() + all_versions = local_versions + remote_versions + installed_version if not all_versions: return None @@ -1205,7 +1054,6 @@ class BootstrapRepos: def find_openpype( self, openpype_path: Union[Path, str] = None, - staging: bool = False, include_zips: bool = False ) -> Union[List[OpenPypeVersion], None]: """Get ordered dict of detected OpenPype version. @@ -1219,8 +1067,6 @@ class BootstrapRepos: Args: openpype_path (Path or str, optional): Try to find OpenPype on the given path or url. - staging (bool, optional): Filter only staging version, skip them - otherwise. include_zips (bool, optional): If set True it will try to find OpenPype in zip files in given directory. @@ -1268,7 +1114,7 @@ class BootstrapRepos: for dir_to_search in dirs_to_search: try: openpype_versions += self.get_openpype_versions( - dir_to_search, staging) + dir_to_search) except ValueError: # location is invalid, skip it pass @@ -1633,15 +1479,11 @@ class BootstrapRepos: return False return True - def get_openpype_versions( - self, - openpype_dir: Path, - staging: bool = False) -> list: + def get_openpype_versions(self, openpype_dir: Path) -> list: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. - staging (bool, optional): Find staging versions if True. Returns: list of OpenPypeVersion @@ -1659,8 +1501,7 @@ class BootstrapRepos: for item in openpype_dir.iterdir(): # if the item is directory with major.minor version, dive deeper if item.is_dir() and re.match(r"^\d+\.\d+$", item.name): - _versions = self.get_openpype_versions( - item, staging=staging) + _versions = self.get_openpype_versions(item) if _versions: openpype_versions += _versions @@ -1683,11 +1524,7 @@ class BootstrapRepos: continue detected_version.path = item - if staging and detected_version.is_staging(): - openpype_versions.append(detected_version) - - if not staging and not detected_version.is_staging(): - openpype_versions.append(detected_version) + openpype_versions.append(detected_version) return sorted(openpype_versions) diff --git a/igniter/tools.py b/igniter/tools.py index a9d592acf0..5c2c64a14b 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -184,11 +184,7 @@ def get_openpype_path_from_settings(settings: dict) -> Union[str, None]: if paths and isinstance(paths, str): paths = [paths] - # Loop over paths and return only existing - for path in paths: - if os.path.exists(path): - return path - return None + return next((path for path in paths if os.path.exists(path)), None) def get_expected_studio_version_str( @@ -206,10 +202,7 @@ def get_expected_studio_version_str( mongo_url = os.environ.get("OPENPYPE_MONGO") if global_settings is None: global_settings = get_openpype_global_settings(mongo_url) - if staging: - key = "staging_version" - else: - key = "production_version" + key = "staging_version" if staging else "production_version" return global_settings.get(key) or "" From 84b3bc3db2bd2a0f31af991b98e3c3a022e8e4e4 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 11:34:57 +0200 Subject: [PATCH 081/480] :recycle: remove staging logic --- igniter/bootstrap_repos.py | 18 ++---- openpype/cli.py | 17 +++-- openpype/lib/openpype_version.py | 8 +++ start.py | 104 +++++++++++-------------------- 4 files changed, 54 insertions(+), 93 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index bbb3dd506c..6d583469ef 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -57,7 +57,6 @@ class OpenPypeVersion(semver.VersionInfo): """Class for storing information about OpenPype version. Attributes: - staging (bool): True if it is staging version path (str): path to OpenPype """ @@ -161,15 +160,6 @@ class OpenPypeVersion(semver.VersionInfo): version = OpenPypeVersion.parse(string[m.start():m.end()]) return version - @classmethod - def parse(cls, version): - """Extends parse to handle ta handle staging variant.""" - v = super().parse(version) - openpype_version = cls(major=v.major, minor=v.minor, - patch=v.patch, prerelease=v.prerelease, - build=v.build) - return openpype_version - def __hash__(self): return hash(self.path) if self.path else hash(str(self)) @@ -448,7 +438,6 @@ class OpenPypeVersion(semver.VersionInfo): The version does not contain information about path and source. This is utility version to get the latest version from all found. - Build version is not listed if staging is enabled. Arguments 'local' and 'remote' define if local and remote repository versions are used. All versions are used if both are not set (or set @@ -483,7 +472,7 @@ class OpenPypeVersion(semver.VersionInfo): return all_versions[-1] @classmethod - def get_expected_studio_version(cls, global_settings=None): + def get_expected_studio_version(cls, staging=False, global_settings=None): """Expected OpenPype version that should be used at the moment. If version is not defined in settings the latest found version is @@ -492,12 +481,13 @@ class OpenPypeVersion(semver.VersionInfo): Using precached global settings is needed for usage inside OpenPype. Args: + staging (bool): Staging version or production version. global_settings (dict): Optional precached global settings. Returns: OpenPypeVersion: Version that should be used. """ - result = get_expected_studio_version_str(global_settings) + result = get_expected_studio_version_str(staging, global_settings) if not result: return None return OpenPypeVersion(version=result) @@ -567,7 +557,7 @@ class BootstrapRepos: """Get path for specific version in list of OpenPype versions. Args: - version (str): Version string to look for (1.2.4+staging) + version (str): Version string to look for (1.2.4-nightly.1+test) version_list (list of OpenPypeVersion): list of version to search. Returns: diff --git a/openpype/cli.py b/openpype/cli.py index 398d1a94c0..e3eacad8aa 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -16,14 +16,13 @@ from .pype_commands import PypeCommands @click.option("--use-staging", is_flag=True, expose_value=False, help="use staging variants") @click.option("--list-versions", is_flag=True, expose_value=False, - help=("list all detected versions. Use With `--use-staging " - "to list staging versions.")) + help="list all detected versions.") @click.option("--validate-version", expose_value=False, help="validate given version integrity") @click.option("--debug", is_flag=True, expose_value=False, - help=("Enable debug")) + help="Enable debug") @click.option("--verbose", expose_value=False, - help=("Change OpenPype log level (debug - critical or 0-50)")) + help="Change OpenPype log level (debug - critical or 0-50)") def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -416,20 +415,18 @@ def unpack_project(zipfile, root): @main.command() def interactive(): - """Interative (Python like) console. + """Interactive (Python like) console. - Helpfull command not only for development to directly work with python + Helpful command not only for development to directly work with python interpreter. Warning: - Executable 'openpype_gui' on windows won't work. + Executable 'openpype_gui' on Windows won't work. """ from openpype.version import __version__ - banner = "OpenPype {}\nPython {} on {}".format( - __version__, sys.version, sys.platform - ) + banner = f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}" code.interact(banner) diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index d547d34755..8c74f96da6 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -11,6 +11,7 @@ repository or locally available. import os import sys +import warnings import openpype.version @@ -60,9 +61,16 @@ def is_running_from_build(): def is_running_staging(): """Currently used OpenPype is staging version. + Deprecated: + Since 3.15 + Returns: bool: True if openpype version containt 'staging'. """ + warnings.warn( + "Staging version logic set by version string is deprecated.", + DeprecationWarning + ) if "staging" in get_openpype_version(): return True return False diff --git a/start.py b/start.py index d1198a85e4..e59de93236 100644 --- a/start.py +++ b/start.py @@ -516,8 +516,6 @@ def _process_arguments() -> tuple: if m and m.group('version'): use_version = m.group('version') _print(f">>> Requested version [ {use_version} ]") - if "+staging" in use_version: - use_staging = True break if use_version is None: @@ -682,8 +680,7 @@ def _find_frozen_openpype(use_version: str = None, Path: Path to version to be used. Raises: - RuntimeError: If no OpenPype version are found or no staging version - (if requested). + RuntimeError: If no OpenPype version are found. """ # Collect OpenPype versions @@ -698,13 +695,10 @@ def _find_frozen_openpype(use_version: str = None, if use_version.lower() == "latest": # Version says to use latest version _print(">>> Finding latest version defined by use version") - openpype_version = bootstrap.find_latest_openpype_version( - use_staging) + openpype_version = bootstrap.find_latest_openpype_version() else: _print(f">>> Finding specified version \"{use_version}\"") - openpype_version = bootstrap.find_openpype_version( - use_version, use_staging - ) + openpype_version = bootstrap.find_openpype_version(use_version) if openpype_version is None: raise OpenPypeVersionNotFound( @@ -714,8 +708,7 @@ def _find_frozen_openpype(use_version: str = None, elif studio_version is not None: # Studio has defined a version to use _print(f">>> Finding studio version \"{studio_version}\"") - openpype_version = bootstrap.find_openpype_version( - studio_version, use_staging) + openpype_version = bootstrap.find_openpype_version(studio_version) if openpype_version is None: raise OpenPypeVersionNotFound(( "Requested OpenPype version " @@ -728,20 +721,15 @@ def _find_frozen_openpype(use_version: str = None, _print(( ">>> Finding latest version " f"with [ {installed_version} ]")) - openpype_version = bootstrap.find_latest_openpype_version( - use_staging) + openpype_version = bootstrap.find_latest_openpype_version() if openpype_version is None: - if use_staging: - reason = "Didn't find any staging versions." - else: - reason = "Didn't find any versions." - raise OpenPypeVersionNotFound(reason) + raise OpenPypeVersionNotFound("Didn't find any versions.") # get local frozen version and add it to detected version so if it is # newer it will be used instead. if installed_version == openpype_version: - version_path = _bootstrap_from_code(use_version, use_staging) + version_path = _bootstrap_from_code(use_version) openpype_version = OpenPypeVersion( version=BootstrapRepos.get_version(version_path), path=version_path) @@ -805,8 +793,8 @@ def _find_frozen_openpype(use_version: str = None, return openpype_version.path -def _bootstrap_from_code(use_version, use_staging): - """Bootstrap live code (or the one coming with frozen OpenPype. +def _bootstrap_from_code(use_version): + """Bootstrap live code (or the one coming with frozen OpenPype). Args: use_version: (str): specific version to use. @@ -829,33 +817,25 @@ def _bootstrap_from_code(use_version, use_staging): local_version = bootstrap.get_version(Path(_openpype_root)) switch_str = f" - will switch to {use_version}" if use_version and use_version != local_version else "" # noqa _print(f" - booting version: {local_version}{switch_str}") - assert local_version + if not local_version: + raise OpenPypeVersionNotFound( + f"Cannot find version at {_openpype_root}") else: # get current version of OpenPype local_version = OpenPypeVersion.get_installed_version_str() # All cases when should be used different version than build - if (use_version and use_version != local_version) or use_staging: + if use_version and use_version != local_version: if use_version: # Explicit version should be used - version_to_use = bootstrap.find_openpype_version( - use_version, use_staging - ) + version_to_use = bootstrap.find_openpype_version(use_version) if version_to_use is None: raise OpenPypeVersionIncompatible( f"Requested version \"{use_version}\" was not found.") else: - # Staging version should be used - version_to_use = bootstrap.find_latest_openpype_version( - use_staging - ) + version_to_use = bootstrap.find_latest_openpype_version() if version_to_use is None: - if use_staging: - reason = "Didn't find any staging versions." - else: - # This reason is backup for possible bug in code - reason = "Didn't find any versions." - raise OpenPypeVersionNotFound(reason) + raise OpenPypeVersionNotFound("Didn't find any versions.") # Start extraction of version if needed if version_to_use.path.is_file(): @@ -913,10 +893,7 @@ def _bootstrap_from_code(use_version, use_staging): def _boot_validate_versions(use_version, local_version): _print(f">>> Validating version [ {use_version} ]") - openpype_versions = bootstrap.find_openpype(include_zips=True, - staging=True) - openpype_versions += bootstrap.find_openpype(include_zips=True, - staging=False) + openpype_versions = bootstrap.find_openpype(include_zips=True) v: OpenPypeVersion found = [v for v in openpype_versions if str(v) == use_version] if not found: @@ -932,14 +909,7 @@ def _boot_validate_versions(use_version, local_version): _print(f'{">>> " if valid else "!!! "}{message}') -def _boot_print_versions(use_staging, local_version, openpype_root): - if not use_staging: - _print("--- This will list only non-staging versions detected.") - _print(" To see staging versions, use --use-staging argument.") - else: - _print("--- This will list only staging versions detected.") - _print(" To see other version, omit --use-staging argument.") - +def _boot_print_versions(openpype_root): if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(openpype_root)) else: @@ -947,16 +917,12 @@ def _boot_print_versions(use_staging, local_version, openpype_root): compatible_with = OpenPypeVersion(version=local_version) if "--all" in sys.argv: - compatible_with = None _print("--- Showing all version (even those not compatible).") else: _print(("--- Showing only compatible versions " f"with [ {compatible_with.major}.{compatible_with.minor} ]")) - openpype_versions = bootstrap.find_openpype( - include_zips=True, - staging=use_staging, - ) + openpype_versions = bootstrap.find_openpype(include_zips=True) openpype_versions = [ version for version in openpype_versions if version.is_compatible( @@ -966,12 +932,11 @@ def _boot_print_versions(use_staging, local_version, openpype_root): list_versions(openpype_versions, local_version) -def _boot_handle_missing_version(local_version, use_staging, message): +def _boot_handle_missing_version(local_version, message): _print(message) if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": openpype_versions = bootstrap.find_openpype( - include_zips=True, staging=use_staging - ) + include_zips=True) list_versions(openpype_versions, local_version) else: igniter.show_message_dialog("Version not found", message) @@ -1005,7 +970,6 @@ def boot(): "is overridden by command line argument.")) else: _print(">>> version set by environment variable") - use_staging = "staging" in os.getenv("OPENPYPE_VERSION") use_version = os.getenv("OPENPYPE_VERSION") # ------------------------------------------------------------------------ @@ -1059,7 +1023,7 @@ def boot(): os.environ["OPENPYPE_PATH"] = openpype_path if "print_versions" in commands: - _boot_print_versions(use_staging, local_version, OPENPYPE_ROOT) + _boot_print_versions(OPENPYPE_ROOT) sys.exit(1) # ------------------------------------------------------------------------ @@ -1072,7 +1036,7 @@ def boot(): try: version_path = _find_frozen_openpype(use_version, use_staging) except OpenPypeVersionNotFound as exc: - _boot_handle_missing_version(local_version, use_staging, str(exc)) + _boot_handle_missing_version(local_version, str(exc)) sys.exit(1) except RuntimeError as e: @@ -1088,10 +1052,10 @@ def boot(): _print("--- version is valid") else: try: - version_path = _bootstrap_from_code(use_version, use_staging) + version_path = _bootstrap_from_code(use_version) except OpenPypeVersionNotFound as exc: - _boot_handle_missing_version(local_version, use_staging, str(exc)) + _boot_handle_missing_version(local_version, str(exc)) sys.exit(1) # set this to point either to `python` from venv in case of live code @@ -1172,10 +1136,10 @@ def get_info(use_staging=None) -> list: inf.append(("OpenPype variant", "staging")) else: inf.append(("OpenPype variant", "production")) - inf.append( - ("Running OpenPype from", os.environ.get('OPENPYPE_REPOS_ROOT')) + inf.extend([ + ("Running OpenPype from", os.environ.get('OPENPYPE_REPOS_ROOT')), + ("Using mongodb", components["host"])] ) - inf.append(("Using mongodb", components["host"])) if os.environ.get("FTRACK_SERVER"): inf.append(("Using FTrack at", @@ -1194,11 +1158,13 @@ def get_info(use_staging=None) -> list: mongo_components = get_default_components() if mongo_components["host"]: - inf.append(("Logging to MongoDB", mongo_components["host"])) - inf.append((" - port", mongo_components["port"] or "")) - inf.append((" - database", Logger.log_database_name)) - inf.append((" - collection", Logger.log_collection_name)) - inf.append((" - user", mongo_components["username"] or "")) + inf.extend([ + ("Logging to MongoDB", mongo_components["host"]), + (" - port", mongo_components["port"] or ""), + (" - database", Logger.log_database_name), + (" - collection", Logger.log_collection_name), + (" - user", mongo_components["username"] or "") + ]) if mongo_components["auth_db"]: inf.append((" - auth source", mongo_components["auth_db"])) From 59e23f21d02613d97217bf188f613796759c22b5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 11:35:38 +0200 Subject: [PATCH 082/480] :memo: change staging logic in documentation --- website/docs/admin_distribute.md | 10 +++------- website/docs/admin_openpype_commands.md | 2 +- website/docs/admin_use.md | 5 +---- 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/website/docs/admin_distribute.md b/website/docs/admin_distribute.md index 2cccce0fbd..169b97e63d 100644 --- a/website/docs/admin_distribute.md +++ b/website/docs/admin_distribute.md @@ -52,14 +52,10 @@ The default locations are: ### Staging vs. Production -You can have version of OpenPype with experimental features you want to try somewhere but you -don't want to disrupt your production. You can tag version as **staging** simply by appending `+staging` -to its name. +You can have version of OpenPype with experimental features you want to try somewhere, but you +don't want to disrupt your production. You can set such version in th Settings. -So if you have OpenPype version like `OpenPype-v3.0.0.zip` just name it `OpenPype-v3.0.0+staging.zip`. -When both these versions are present, production one will always take precedence over staging. - -You can run OpenPype with `--use-staging` argument to add use staging versions. +You can run OpenPype with `--use-staging` argument to use staging version specified in the Settings. :::note Running staging version is identified by orange **P** icon in system tray. diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 85f661d51e..131b6c0a51 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -22,7 +22,7 @@ openpype_console --use-version=3.0.0-foo+bar `--use-staging` - to use staging versions of OpenPype. -`--list-versions [--use-staging]` - to list available versions. +`--list-versions` - to list available versions. `--validate-version` - to validate integrity of given version diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 1c4ae9e01c..c92ac3a77a 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -43,8 +43,7 @@ You can use following command line arguments: openpype_console --use-version=3.0.1 ``` -`--use-staging` - to specify you prefer staging version. In that case it will be used -(if found) instead of production one. +`--use-staging` - to specify you prefer staging version. In that case it will be used instead of production one. :::tip List available versions To list all available versions, use: @@ -52,8 +51,6 @@ To list all available versions, use: ```shell openpype_console --list-versions ``` - -You can add `--use-staging` to list staging versions. ::: If you want to validate integrity of some available version, you can use: From 7bd7c0391c12a55236122d216f1f123e8f571021 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 11:59:10 +0200 Subject: [PATCH 083/480] :white_check_mark: fix tests --- tests/unit/igniter/test_bootstrap_repos.py | 30 ++++++++++++---------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/tests/unit/igniter/test_bootstrap_repos.py b/tests/unit/igniter/test_bootstrap_repos.py index 10278c4928..8b32bbe03c 100644 --- a/tests/unit/igniter/test_bootstrap_repos.py +++ b/tests/unit/igniter/test_bootstrap_repos.py @@ -33,11 +33,11 @@ def test_openpype_version(printer): assert str(v2) == "1.2.3-x" assert v1 > v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" From 9330d92fdc8520f7f032fcec803a6c0848b37460 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 15:26:30 +0200 Subject: [PATCH 084/480] fix 'get_remote_versions' call in version entity --- openpype/settings/entities/__init__.py | 8 ++------ openpype/settings/entities/op_version_entity.py | 17 +++-------------- .../schemas/system_schema/schema_general.json | 4 ++-- 3 files changed, 7 insertions(+), 22 deletions(-) diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index b2cb2204f4..5e3a76094e 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -123,10 +123,7 @@ from .dict_conditional import ( ) from .anatomy_entities import AnatomyEntity -from .op_version_entity import ( - ProductionVersionsInputEntity, - StagingVersionsInputEntity -) +from .op_version_entity import VersionsInputEntity __all__ = ( "DefaultsNotDefined", @@ -188,6 +185,5 @@ __all__ = ( "AnatomyEntity", - "ProductionVersionsInputEntity", - "StagingVersionsInputEntity" + "VersionsInputEntity", ) diff --git a/openpype/settings/entities/op_version_entity.py b/openpype/settings/entities/op_version_entity.py index 782d65a446..f79048222e 100644 --- a/openpype/settings/entities/op_version_entity.py +++ b/openpype/settings/entities/op_version_entity.py @@ -66,24 +66,13 @@ class OpenPypeVersionInput(TextEntity): return super(OpenPypeVersionInput, self).convert_to_valid_type(value) -class ProductionVersionsInputEntity(OpenPypeVersionInput): +class VersionsInputEntity(OpenPypeVersionInput): """Entity meant only for global settings to define production version.""" - schema_types = ["production-versions-text"] + schema_types = ["versions-text"] def _get_openpype_versions(self): - versions = get_remote_versions(staging=False, production=True) + versions = get_remote_versions() if versions is None: return [] versions.append(get_installed_version()) return sorted(versions) - - -class StagingVersionsInputEntity(OpenPypeVersionInput): - """Entity meant only for global settings to define staging version.""" - schema_types = ["staging-versions-text"] - - def _get_openpype_versions(self): - versions = get_remote_versions(staging=True, production=False) - if versions is None: - return [] - return sorted(versions) diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index 5b6d8d5d62..d6c22fe54c 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -146,12 +146,12 @@ "label": "Define explicit OpenPype version that should be used. Keep empty to use latest available version." }, { - "type": "production-versions-text", + "type": "versions-text", "key": "production_version", "label": "Production version" }, { - "type": "staging-versions-text", + "type": "versions-text", "key": "staging_version", "label": "Staging version" }, From e185ba57cd2b9adf8d1ffa182ccc57149c570b1d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Oct 2022 15:43:30 +0200 Subject: [PATCH 085/480] nuke: fixing missing instance data --- openpype/hosts/nuke/plugins/create/workfile_creator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py index ae54d6bcb2..ae05cee99f 100644 --- a/openpype/hosts/nuke/plugins/create/workfile_creator.py +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -42,7 +42,7 @@ class WorkfileCreator(AutoCreator): }) instance_data.update(self.get_dynamic_data( self.default_variant, task_name, asset_doc, - project_name, host_name + project_name, host_name, instance_data )) instance = CreatedInstance( From 8180a02b1873170dd7bf02364496dd69082a5d95 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Oct 2022 15:44:47 +0200 Subject: [PATCH 086/480] nuke: better creator names --- openpype/hosts/nuke/plugins/create/create_backdrop.py | 2 +- openpype/hosts/nuke/plugins/create/create_camera.py | 2 +- openpype/hosts/nuke/plugins/create/create_model.py | 2 +- openpype/hosts/nuke/plugins/create/create_read.py | 2 +- openpype/hosts/nuke/plugins/create/create_write_image.py | 2 +- openpype/hosts/nuke/plugins/create/create_write_prerender.py | 2 +- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index d82456392a..ebc66e95a7 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -13,7 +13,7 @@ class CreateBackdrop(NukeCreator): """Add Publishable Backdrop""" identifier = "create_backdrop" - label = "Create Backdrop" + label = "Nukenodes (backdrop)" family = "nukenodes" icon = "file-archive-o" maintain_selection = True diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index ea2fd4b971..2a48b8a72b 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -10,7 +10,7 @@ class CreateCamera(NukeCreator): """Add Publishable Camera""" identifier = "create_camera" - label = "Create 3d Camera" + label = "Camera (3d)" family = "camera" icon = "camera" diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index bfddcb64c3..a1a96bf412 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -10,7 +10,7 @@ class CreateModel(NukeCreator): """Add Publishable Camera""" identifier = "create_model" - label = "Create 3d Model" + label = "Model (3d)" family = "model" icon = "cube" default_variants = ["Main"] diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py index 87a9dff0f8..d4f63e6b9f 100644 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ b/openpype/hosts/nuke/plugins/create/create_read.py @@ -11,7 +11,7 @@ from openpype.hosts.nuke.api.lib import ( class CrateRead(plugin.OpenPypeCreator): # change this to template preset name = "ReadCopy" - label = "Create Read Copy" + label = "Source (read)" hosts = ["nuke"] family = "source" families = family diff --git a/openpype/hosts/nuke/plugins/create/create_write_image.py b/openpype/hosts/nuke/plugins/create/create_write_image.py index 1bb114903f..e9ff33a32b 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_image.py +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -16,7 +16,7 @@ from openpype.hosts.nuke import api as napi class CreateWriteImage(napi.NukeWriteCreator): identifier = "create_write_image" - label = "Create Write Image" + label = "Image (write)" family = "image" icon = "sign-out" diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 1ce30379ad..b7d0f4ade9 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -16,7 +16,7 @@ from openpype.hosts.nuke import api as napi class CreateWritePrerender(napi.NukeWriteCreator): identifier = "create_write_prerender" - label = "Create Write Prerender" + label = "Prerender (write)" family = "prerender" icon = "sign-out" diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 89696f55e0..3d96e8c13b 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -16,7 +16,7 @@ from openpype.hosts.nuke import api as napi class CreateWriteRender(napi.NukeWriteCreator): identifier = "create_write_render" - label = "Create Write Render" + label = "Render (write)" family = "render" icon = "sign-out" From 8521a2eac42a6f3c081e08e3351383b197de07c1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Oct 2022 15:45:38 +0200 Subject: [PATCH 087/480] nuke: missing try to catch ValueError --- openpype/hosts/nuke/api/pipeline.py | 8 ++++++-- openpype/hosts/nuke/api/plugin.py | 13 ++++++++----- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c93a4f090f..258b09f7a8 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -37,7 +37,6 @@ from .lib import ( check_inventory_versions, set_avalon_knob_data, read_avalon_data, - get_avalon_knob_data, on_script_load, dirmap_file_name_filter, add_scripts_menu, @@ -61,6 +60,7 @@ from .workio import ( work_root, current_file ) +from openpype.hosts.nuke.api import plugin log = Logger.get_logger(__name__) @@ -297,7 +297,11 @@ def _install_menu(): "Experimental tools...", lambda: host_tools.show_experimental_tools_dialog(parent=main_window) ) - + menu.addSeparator() + menu.addCommand( + "Convert old publishing instances", + plugin.convert_to_valid_instaces + ) # add reload pipeline only in debug mode if bool(os.getenv("NUKE_DEBUG")): menu.addSeparator() diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index c9460295fb..96d9b48ba6 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1202,14 +1202,17 @@ def convert_to_valid_instaces(): "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" ] + print(node.name()) # remove all old knobs for knob in node.allKnobs(): - if knob.name() in remove_knobs: - node.removeKnob(knob) - elif "avalon" in knob.name(): - node.removeKnob(knob) - + try: + if knob.name() in remove_knobs: + node.removeKnob(knob) + elif "avalon" in knob.name(): + node.removeKnob(knob) + except ValueError: + pass # add new instance knob with transfer data set_node_data( node, INSTANCE_DATA_KNOB, transfer_data) From 8965b5712bbe2a3639493f970374f5662a229697 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Oct 2022 16:24:52 +0200 Subject: [PATCH 088/480] nuke: removing unused code --- openpype/hosts/nuke/plugins/create/create_camera.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 2a48b8a72b..c559aa2756 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -64,12 +64,3 @@ class CreateCamera(NukeCreator): self.selected_nodes = [] self.log.debug("Selection is: {}".format(self.selected_nodes)) - - def apply_settings(self, project_settings, system_settings): - """Method called on initialization of plugin to apply settings.""" - - # only selected keys ideally - # settings = self.get_creator_settings(project_settings) - - # self.key = settings["key"] - pass \ No newline at end of file From 1407de25a5651909446070aa999aa869adfef82b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Oct 2022 16:25:11 +0200 Subject: [PATCH 089/480] nuke: refactor create read to create source --- .../hosts/nuke/plugins/create/create_read.py | 57 ------------ .../nuke/plugins/create/create_source.py | 93 +++++++++++++++++++ 2 files changed, 93 insertions(+), 57 deletions(-) delete mode 100644 openpype/hosts/nuke/plugins/create/create_read.py create mode 100644 openpype/hosts/nuke/plugins/create/create_source.py diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py deleted file mode 100644 index d4f63e6b9f..0000000000 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ /dev/null @@ -1,57 +0,0 @@ -from collections import OrderedDict - -import nuke - -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data -) - - -class CrateRead(plugin.OpenPypeCreator): - # change this to template preset - name = "ReadCopy" - label = "Source (read)" - hosts = ["nuke"] - family = "source" - families = family - icon = "film" - defaults = ["Effect", "Backplate", "Fire", "Smoke"] - - def __init__(self, *args, **kwargs): - super(CrateRead, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - data = OrderedDict() - data['family'] = self.family - data['families'] = self.families - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - - def process(self): - self.name = self.data["subset"] - nodes = self.nodes - - if not nodes or len(nodes) == 0: - msg = "Please select Read node" - self.log.error(msg) - nuke.message(msg) - else: - count_reads = 0 - for node in nodes: - if node.Class() != 'Read': - continue - avalon_data = self.data - avalon_data['subset'] = "{}".format(self.name) - set_avalon_knob_data(node, avalon_data) - node['tile_color'].setValue(16744935) - count_reads += 1 - - if count_reads < 1: - msg = "Please select Read node" - self.log.error(msg) - nuke.message(msg) - return diff --git a/openpype/hosts/nuke/plugins/create/create_source.py b/openpype/hosts/nuke/plugins/create/create_source.py new file mode 100644 index 0000000000..48f9f86219 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/create_source.py @@ -0,0 +1,93 @@ +import nuke +import six +import sys +from openpype.hosts.nuke.api import ( + INSTANCE_DATA_KNOB, + NukeCreator, + NukeCreatorError, + set_node_data +) +from openpype.pipeline import ( + CreatedInstance +) + + +class CreateSource(NukeCreator): + """Add Publishable Read with source""" + + identifier = "create_source" + label = "Source (read)" + family = "source" + icon = "film" + default_variants = ["Effect", "Backplate", "Fire", "Smoke"] + + # plugin attributes + node_color = "0xff9100ff" + + def create_instance_node( + self, + node_name, + read_node + ): + read_node["tile_color"].setValue( + int(self.node_color, 16)) + read_node["name"].setValue(node_name) + self.add_info_knob(read_node) + return read_node + + def create(self, subset_name, instance_data, pre_create_data): + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + try: + for read_node in self.selected_nodes: + if read_node.Class() != 'Read': + continue + + node_name = read_node.name() + _subset_name = subset_name + node_name + + # make sure subset name is unique + if self.check_existing_subset(_subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published" + "definition.").format(_subset_name)) + + instance_node = self.create_instance_node( + _subset_name, + read_node + ) + instance = CreatedInstance( + self.family, + _subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + set_node_data( + instance_node, + INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + else: + NukeCreatorError( + "Creator error: only supprted with active selection") + + self.log.debug("Selection is: {}".format(self.selected_nodes)) From d683c842f33344ddd12d6520381c764b61d673f3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 16:53:26 +0200 Subject: [PATCH 090/480] :bug: fix code signing and keyring access on macos --- tools/build.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tools/build.sh b/tools/build.sh index 4f0e4bdcf0..4de5092227 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -192,8 +192,15 @@ if [ "$disable_submodule_update" == 1 ]; then "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" if [[ "$OSTYPE" == "darwin"* ]]; then + # fix cx_Freeze libs issue + echo -e "${BIGreen}>>>${RST} Fixing libs ..." + mv "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/dependencies/cx_Freeze" "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/" + # fix code signing issue - codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" + echo -e "${BIGreen}>>>${RST} Fixing code signatures ..." + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_console" + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_gui" + # codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" if command -v create-dmg > /dev/null 2>&1; then create-dmg \ --volname "OpenPype $openpype_version Installer" \ From 814a55874c57cd7907d7e79ef89fb8d678166340 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 17:27:59 +0200 Subject: [PATCH 091/480] :arrow_up: re-specify python version and update lock --- poetry.lock | 84 +++++++++++++++++++++++++++----------------------- pyproject.toml | 4 +-- 2 files changed, 48 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 638dabdb7b..44c142699e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -121,10 +121,7 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" @@ -325,7 +322,7 @@ test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0 [[package]] name = "cx-freeze" -version = "6.11.1" +version = "6.12.0" description = "Create standalone executables from Python scripts" category = "dev" optional = false @@ -334,14 +331,14 @@ python-versions = ">=3.6" [package.dependencies] cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\" and python_version < \"3.10\""} importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -lief = {version = ">=0.11.5", markers = "sys_platform == \"win32\" and python_version <= \"3.10\""} +lief = {version = ">=0.11.5", markers = "sys_platform == \"win32\""} packaging = ">=21.0" patchelf = {version = ">=0.12", markers = "sys_platform == \"linux\""} [package.extras] -dev = ["bump2version (>=1.0.1)", "cibuildwheel (==2.6.1)", "pre-commit (>=2.17.0)", "pylint (>=2.13.0)"] -doc = ["sphinx (>=5.0.1,<6.0.0)", "sphinx-rtd-theme (==1.0.0)"] -test = ["nose (==1.3.7)", "pygments (>=2.11.2)", "pytest (>=7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytest-timeout (>=1.4.2)"] +dev = ["bump2version (>=1.0.1)", "cibuildwheel (>=2.8.1)", "pre-commit (>=2.17.0)", "pylint (>=2.13.0)", "wheel (>=0.36.2)"] +doc = ["sphinx (>=5.0.1,<5.2.0)", "sphinx-rtd-theme (==1.0.0)"] +test = ["nose (==1.3.7)", "pygments (>=2.11.2)", "pytest (>=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-mock (>=3.6.1)", "pytest-timeout (>=1.4.2)"] [[package]] name = "cx-logging" @@ -1738,8 +1735,8 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" -python-versions = "^3.9.1" -content-hash = "80cee7a98d806c905e8d220451a556c08e582d8994048bf1f619971d0eaa0c0d" +python-versions = ">=3.9.1,<3.10" +content-hash = "ecf46d6e998954aaf8fee958d6030e36ebe3f0d329b7edaeb38eaf247a60cc14" [metadata.files] acre = [] @@ -2058,31 +2055,41 @@ cryptography = [ {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, ] cx-freeze = [ - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e372b9e72ac0e2207ee65a9d404e2669da1134dc37f5ace9a2a779099d3aa868"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd293382e1ad270dddf5a2707db5dbb8600a1e0b0c9b0da7af9d61326eb1b325"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:feec2f36bce042da6a0d92690bc592b0dcec29218adc2278535cd13b28ec3485"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5aafcc6337856d5921b20f41acdcc8d0fe770388f3a072eb25163f8825f6c5d"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-win32.whl", hash = "sha256:b99cc0b6d6c1ba51bd9fe11dbfae9aabcf089ba779ea86d83d280e2e40f484e7"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:f0dfe6acf25eb096faba7d4b4b001bcd0f818e372ea1f05d900665b0ad82b0b9"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a68e70dcb27b0720b131a35c5fdd096012fe00119a8e51d935f3fb3cd251c39"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f7bde925042d8843af9b6242a1bf3865dbbae088f3183a89a575124ec2e14a4"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-win32.whl", hash = "sha256:7698fb82b6f84b3426774b5f3bee770601f26b612306319664a02f1ec5160861"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-win_amd64.whl", hash = "sha256:9848c975401b21a98aa896baabfed067c3e981afd5b5b0a8a5eabe5c9f23d3c5"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87dcf5ceb78dc6af910c45238128fda2394b7c430d3fa469e87e1efdeeb5d4cc"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb74d8cc1f8c658986acc19ea6875b985a979421f9bb9c310b43cd2ff5d90c44"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-win32.whl", hash = "sha256:971c0a8356ef0ee09a3097f9c9d5b52cde6d08d1ef80e997eb4a6e22fe0eff2f"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7c1cb44379b2093cbdde77e302a376f29aa61999c73be6e8a559463db84b85c4"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc90d6dbde66e8ddfe6b26f63fb2ea7d6d0e4568205f40660a63b8b200dcabcf"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f56f618a23d86bdcfff22b29ec993117effd32a401060013105517301c0bf32"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-win32.whl", hash = "sha256:4edfb5d65afb11eb9f0326d40d15445366481585705b3096f2cd090e30a36247"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfb5a8032bf424c04814c9426425fa1db4cf8c280da948969eead9f616c0fd92"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0a3e32980269cfabc2e814978bfdf4382fe3cbc9ac64f9f1bdb1cd2ddf3a40d0"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:798bb7ca037c3c885efd3eda6756c84c7927c712b730b22a7f256440faa36d38"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5aa1759098ca4853200a79138b626a9caa2ccf829d662b28c82ec7e71ea97cde"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7962680ae04ee3afda1012026b5394a534e2526b68681d591158b7d8bc733bcf"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-win32.whl", hash = "sha256:da4f82fe27e71571c0ab9d700b5e6c6c631ae39133d9b6d7157939f1e9f37312"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:aaf399b6ed5d54b7271980ae354605620bedcd52d722f57ad527bd989c56a875"}, - {file = "cx_Freeze-6.11.1.tar.gz", hash = "sha256:8f3a30c9e3394f290655e346d3b460910656b30ac6347a87499bb5ad365c6e7c"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8b1d64fad066dca4b70b9cf3e80026f380efc7b44ccea86d0a8991600f412dfd"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8518162462fec973b9c89bf8d2f34452f74386e00b533ff655994608a8a56c6"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8b0ed69d6e3733171c7bf78cb8544445cc36df7d96a20a40831d067b407a086"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad47aa57a89f0d86a6a08395c9eadce2780d1104002bf4c733ba7ac760a388c"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab810343e921d2bdd6e313c3df8234b276a3838f8f0d1d2ec6c500a47b6a016"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-win32.whl", hash = "sha256:a7c77ef922d2f3020409ef7e71d44fb90aa1df62be3a4f1a0210ea4e84fc2037"}, + {file = "cx_Freeze-6.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c56c29c95c81149d9265883bf48ff72b2788dccbf651f1443784a059e433c6b4"}, + {file = "cx_Freeze-6.12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:79fa96b39c51d4df912aa6d918f0a19d19b94f38bfff9ea199cdf33e373d7b8b"}, + {file = "cx_Freeze-6.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b6a8f361bae56daa4bf0170bc1eeb368a3515e23773e46b3578af07924633ab"}, + {file = "cx_Freeze-6.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ed10f5d81c2964928b52bb4ec5f4d473177d14b181d248b157a88f364ad2b24"}, + {file = "cx_Freeze-6.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:159f9fdfc86adc57c6f2a3a522275c195d2021c9890c54c007dccb7cb9db8383"}, + {file = "cx_Freeze-6.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6424852ab6b3fa71330bd65d5e86defd73b8811cbc6d9996e54e3e83ce46af16"}, + {file = "cx_Freeze-6.12.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3e1a61c4ce43914198ded561ef487fc6963f7d7c662f380814758e6f55b94f7c"}, + {file = "cx_Freeze-6.12.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e152398c5dbb1030245febb5c10537063c514ea7513e1fe6048a4ee07d678f5c"}, + {file = "cx_Freeze-6.12.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee93c21515a7e13fb55b34eb5e35eb5c904cb74e66771317b61d799028bdd5ad"}, + {file = "cx_Freeze-6.12.0-cp36-cp36m-win32.whl", hash = "sha256:890fee78b114083fecb82802d5dbbad6b37c3a9396aae1e4c959d213678fbc7d"}, + {file = "cx_Freeze-6.12.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2220ab28abd29ae25da2f86e79e85b37f30fa81dcb31e8da550cfb312be31b9f"}, + {file = "cx_Freeze-6.12.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c92cda2706764bc17d8b8162bf6c2df1026891ae3f48e52e067ec3fb227513a9"}, + {file = "cx_Freeze-6.12.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:072af5bb5543882578d586c0d2baaded7da3298673bd5072318b0011111a41b8"}, + {file = "cx_Freeze-6.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:918cab0c6a0eec7db285e3bddbafcdb4dbb16ec60c998b4898da4edf4f663670"}, + {file = "cx_Freeze-6.12.0-cp37-cp37m-win32.whl", hash = "sha256:36d7be0e1b1fe0812c9cc626867989e2fc7cb6fcf4002eb385156d1ca84d4bd6"}, + {file = "cx_Freeze-6.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8f82f11307bbf1332eb7aa2b91b93ff14519dc88e122134935e95e85f7df9f0d"}, + {file = "cx_Freeze-6.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4299bb469c0a94d72313d5b6259c5f9917a4389d400a156eff6269159168d986"}, + {file = "cx_Freeze-6.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:228c482687b1bd1451bb64dba219fedf10a1fa8f0aa0ada92059a29af1dfcf4f"}, + {file = "cx_Freeze-6.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffabcc6fde64ea851ccc6c0acda29de5e6ad4c35ca6949b22ba3e89285274ee6"}, + {file = "cx_Freeze-6.12.0-cp38-cp38-win32.whl", hash = "sha256:80b791e40184f8d3a26a9b8eae81a98778cadf23136f4de27c0b585eb9c5662a"}, + {file = "cx_Freeze-6.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:104c9a7d641e3f21faea8a64805bf24bbb79b599018e93add927372fa88532fd"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3be232b1dfcd498e497c44fa18e74d4a8ce7e877a22d765ddfa89b2b5bbef1db"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:358cd51f2978370aebaf00452ec997153cf1dd830513554d186d1d2f591a6c8b"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:596fb0ac82199c9d6120f34fdff5331fd6fab4cfbb10ed1605e2149f15ec6748"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6360a195c3f48d4617bff0bfbf23201d238727e8bec7cf34d4d9ab571877a0d"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d99446adae63480e60aeb4c7275e7034a2eb4269c6e8db18e4e917220d0d29"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-win32.whl", hash = "sha256:fba3fd32c46a0dec08674ad280fc94f1d55f14881396cb205a6a36ec0d69babe"}, + {file = "cx_Freeze-6.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:2eef6954b5152153440dadf6bc29d1233f1c4af9ba1c09f00b48b268af5d9fc5"}, + {file = "cx_Freeze-6.12.0.tar.gz", hash = "sha256:4caf5258192337e1d0c63376d79cc875a6af17690b676461d09e2ef018f67096"}, ] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, @@ -2542,8 +2549,8 @@ pillow = [ {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:adabc0bce035467fb537ef3e5e74f2847c8af217ee0be0455d4fec8adc0462fc"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:336b9036127eab855beec9662ac3ea13a4544a523ae273cbf108b228ecac8437"}, {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, @@ -2701,6 +2708,7 @@ pymongo = [ {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:71c0db2c313ea8a80825fb61b7826b8015874aec29ee6364ade5cb774fe4511b"}, {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b779e87300635b8075e8d5cfd4fdf7f46078cd7610c381d956bca5556bb8f97"}, {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:351a2efe1c9566c348ad0076f4bf541f4905a0ebe2d271f112f60852575f3c16"}, + {file = "pymongo-3.12.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:858af7c2ab98f21ed06b642578b769ecfcabe4754648b033168a91536f7beef9"}, {file = "pymongo-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a02313e71b7c370c43056f6b16c45effbb2d29a44d24403a3d5ba6ed322fa3f"}, {file = "pymongo-3.12.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:d3082e5c4d7b388792124f5e805b469109e58f1ab1eb1fbd8b998e8ab766ffb7"}, {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:514e78d20d8382d5b97f32b20c83d1d0452c302c9a135f0a9022236eb9940fda"}, diff --git a/pyproject.toml b/pyproject.toml index a16fa6edc8..979aef6e43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ packages = [ openpype = 'start:boot' [tool.poetry.dependencies] -python = "^3.9.1" +python = ">=3.9.1,<3.10" aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } @@ -75,7 +75,7 @@ aiohttp-middlewares = "^2.0.0" flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "6.11.1" +cx_freeze = "6.12.0" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" From e3864ead41f68d9900bd30293d09049c43c0832e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 15:21:25 +0200 Subject: [PATCH 092/480] OP-3426 - DL support for Maya automatic tests WIP --- tests/integration/hosts/maya/lib.py | 16 ++++- .../maya/test_deadline_publish_in_maya.py | 70 +++++++++++++++++++ .../hosts/maya/test_publish_in_maya.py | 4 +- tests/lib/testing_classes.py | 54 +++++++++++++- 4 files changed, 139 insertions(+), 5 deletions(-) create mode 100644 tests/integration/hosts/maya/test_deadline_publish_in_maya.py diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c065..6610fac118 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,14 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -39,3 +43,11 @@ class MayaTestClass(HostFixtures): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class MayaDeadlinePublishTestClass(MayaHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py new file mode 100644 index 0000000000..f14310a16c --- /dev/null +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -0,0 +1,70 @@ +from tests.integration.hosts.maya.lib import MayaDeadlinePublishTestClass + + +class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): + """Basic test case for publishing in Maya + + Shouldnt be running standalone only via 'runtests' pype command! (??) + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Always pulls and uses test data from GDrive! + + Opens Maya, runs publish on prepared workile. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + How to run: + (in cmd with activated {OPENPYPE_ROOT}/.venv) + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 + + """ + PERSIST = True + + TEST_FILES = [ + ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") + ] + + APP = "maya" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + assert 5 == dbcon.count_documents({"type": "version"}), \ + "Not expected no of versions" + + assert 0 == dbcon.count_documents({"type": "version", + "name": {"$ne": 1}}), \ + "Only versions with 1 expected" + + assert 1 == dbcon.count_documents({"type": "subset", + "name": "modelMain"}), \ + "modelMain subset must be present" + + assert 1 == dbcon.count_documents({"type": "subset", + "name": "workfileTest_task"}), \ + "workfileTest_task subset must be present" + + assert 11 == dbcon.count_documents({"type": "representation"}), \ + "Not expected no of representations" + + assert 2 == dbcon.count_documents({"type": "representation", + "context.subset": "modelMain", + "context.ext": "abc"}), \ + "Not expected no of representations with ext 'abc'" + + assert 2 == dbcon.count_documents({"type": "representation", + "context.subset": "modelMain", + "context.ext": "ma"}), \ + "Not expected no of representations with ext 'abc'" + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInMaya() diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b0564428..39b3f12263 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,7 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f81095..9d7dfa923e 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,9 +8,11 @@ import tempfile import shutil import glob import platform +import requests from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler +from openpype.modules import ModulesManager class BaseTest: @@ -333,7 +335,57 @@ class PublishTest(ModuleUnitTest): "\n".join(sorted(not_matched))) -class HostFixtures(PublishTest): +class DeadlinePublishTest(PublishTest): + @pytest.fixture(scope="module") + def publish_finished(self, dbcon, launched_app, download_test_data, + timeout): + """Dummy fixture waiting for publish to finish""" + import time + time_start = time.time() + timeout = timeout or self.TIMEOUT + timeout = float(timeout) + while launched_app.poll() is None: + time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + raise ValueError("Timeout reached") + + deadline_job_id = os.environ.get("DEADLINE_PUBLISH_JOB_ID") + if not deadline_job_id: + raise ValueError("DEADLINE_PUBLISH_JOB_ID empty, cannot find job") + + modules_manager = ModulesManager() + deadline_module = modules_manager.modules_by_name("deadline") + deadline_url = deadline_module.deadline_urls["default"] + + if not deadline_url: + raise ValueError("Must have default deadline url.") + + url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) + date_finished = None + + time_start = time.time() + while not date_finished: + time.sleep(0.5) + if time.time() - time_start > timeout: + raise ValueError("Timeout for DL finish reached") + + response = requests.get(url, timeout=10) + if not response.ok: + msg = "Couldn't connect to {}".format(deadline_url) + raise RuntimeError(msg) + + if not response.json(): + raise ValueError("Couldn't find {}".format(deadline_job_id)) + + date_finished = response.json()[0]["DateComp"] + + # some clean exit test possible? + print("Publish finished") + yield True + + +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): From 5ee97fa8e2497058ef407cad82fdfa38faee3bec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 15:24:48 +0200 Subject: [PATCH 093/480] OP-3426 - DL support for AE automatic tests WIP --- tests/integration/hosts/aftereffects/lib.py | 16 +++- .../test_deadline_publish_in_aftereffects.py | 79 +++++++++++++++++++ .../test_publish_in_aftereffects.py | 4 +- 3 files changed, 95 insertions(+), 4 deletions(-) create mode 100644 tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073d..ca637edba6 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,14 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +36,11 @@ class AfterEffectsTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class AEDeadlinePublishTestClass(AEHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py new file mode 100644 index 0000000000..5d2a7d7ebd --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -0,0 +1,79 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AEDeadlinePublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): + """Basic test case for DL publishing in AfterEffects + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run DL publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Waits for publish job on DL is finished. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", + "test_aftereffects_publish.zip", + "") + ] + + APP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackgroundcopy")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="reviewTesttask")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTestTaskDefault", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInAfterEffects() diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4925cbd2d7..191e86ebaf 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Uses generic TestCase to prepare fixtures for test data, testing DBs, From acde5a81fca7909820a48841f6339d8f9fdda9bf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 15:54:55 +0200 Subject: [PATCH 094/480] nuke: remove obsolete validators --- .../plugins/publish/validate_read_legacy.py | 87 -------------- .../publish/validate_write_deadline_tab.py | 53 --------- .../plugins/publish/validate_write_legacy.py | 108 ------------------ 3 files changed, 248 deletions(-) delete mode 100644 openpype/hosts/nuke/plugins/publish/validate_read_legacy.py delete mode 100644 openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py delete mode 100644 openpype/hosts/nuke/plugins/publish/validate_write_legacy.py diff --git a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py deleted file mode 100644 index ffd5cee710..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py +++ /dev/null @@ -1,87 +0,0 @@ -import os - -import nuke - -import toml -import pyblish.api -from bson.objectid import ObjectId -from openpype.hosts.nuke import api as napi -from openpype.pipeline import ( - discover_loader_plugins, - load_container, -) - - -class RepairReadLegacyAction(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - node = napi.get_instance_node(instance) - data = toml.loads(node["avalon"].value()) - data["name"] = node.name() - data["xpos"] = node.xpos() - data["ypos"] = node.ypos() - data["extension"] = os.path.splitext( - node["file"].value() - )[1][1:] - - data["connections"] = [] - for d in node.dependent(): - for i in range(d.inputs()): - if d.input(i) == node: - data["connections"].append([i, d]) - - nuke.delete(node) - - loader_name = "LoadSequence" - if data["extension"] == "mov": - loader_name = "LoadMov" - - loader_plugin = None - for Loader in discover_loader_plugins(): - if Loader.__name__ != loader_name: - continue - - loader_plugin = Loader - - load_container( - Loader=loader_plugin, - representation=ObjectId(data["representation"]) - ) - - node = nuke.toNode(data["name"]) - for connection in data["connections"]: - connection[1].setInput(connection[0], node) - - node.setXYpos(data["xpos"], data["ypos"]) - - -class ValidateReadLegacy(pyblish.api.InstancePlugin): - """Validate legacy read nodes.""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["read.legacy"] - label = "Read Legacy" - hosts = ["nuke"] - actions = [RepairReadLegacyAction] - - def process(self, instance): - - msg = "Clean up legacy read node \"{}\"".format(instance) - assert False, msg diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py deleted file mode 100644 index 907577a97d..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py +++ /dev/null @@ -1,53 +0,0 @@ -import pyblish.api -import openpype.hosts.nuke.lib - - -class RepairNukeWriteDeadlineTab(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - group_node = [x for x in instance if x.Class() == "Group"][0] - - # Remove existing knobs. - knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() - for name, knob in group_node.knobs().items(): - if name in knob_names: - group_node.removeKnob(knob) - - openpype.hosts.nuke.lib.add_deadline_tab(group_node) - - -class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin): - """Ensure Deadline tab is present and current.""" - - order = pyblish.api.ValidatorOrder - label = "Deadline Tab" - hosts = ["nuke"] - optional = True - families = ["render"] - actions = [RepairNukeWriteDeadlineTab] - - def process(self, instance): - group_node = [x for x in instance if x.Class() == "Group"][0] - - knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() - missing_knobs = [] - for name in knob_names: - if name not in group_node.knobs().keys(): - missing_knobs.append(name) - assert not missing_knobs, "Missing knobs: {}".format(missing_knobs) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py deleted file mode 100644 index e9bcb08b00..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ /dev/null @@ -1,108 +0,0 @@ -import toml - -import nuke - -import pyblish.api - -from openpype.pipeline import discover_creator_plugins -from openpype.pipeline.publish import RepairAction -from openpype.hosts.nuke.api.lib import get_avalon_knob_data -from openpype.hosts.nuke import api as napi - -class ValidateWriteLegacy(pyblish.api.InstancePlugin): - """Validate legacy write nodes.""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["write"] - label = "Validate Write Legacy" - hosts = ["nuke"] - actions = [RepairAction] - - def process(self, instance): - node = napi.get_instance_node(instance) - msg = "Clean up legacy write node \"{}\"".format(instance) - - if node.Class() not in ["Group", "Write"]: - return - - # test avalon knobs - family_knobs = ["ak:family", "avalon:family"] - family_test = [k for k in node.knobs().keys() if k in family_knobs] - self.log.debug("_ family_test: {}".format(family_test)) - - # test if render in family test knob - # and only one item should be available - assert len(family_test) == 1, msg + " > More avalon attributes" - assert "render" in node[family_test[0]].value() \ - or "still" in node[family_test[0]].value(), msg + \ - " > Not correct family" - # test if `file` knob in node, this way old - # non-group-node write could be detected - assert "file" not in node.knobs(), msg + \ - " > file knob should not be present" - - # check if write node is having old render targeting - assert "render_farm" not in node.knobs(), msg + \ - " > old way of setting render target" - - @classmethod - def repair(cls, instance): - node = napi.get_instance_node(instance) - - if "Write" in node.Class(): - data = toml.loads(node["avalon"].value()) - else: - data = get_avalon_knob_data(node) - - # collect reusable data - data["XYpos"] = (node.xpos(), node.ypos()) - data["input"] = node.input(0) - data["publish"] = node["publish"].value() - data["render"] = node["render"].value() - data["render_farm"] = node["render_farm"].value() - data["review"] = node["review"].value() - data["use_limit"] = node["use_limit"].value() - data["first"] = node["first"].value() - data["last"] = node["last"].value() - - family = data["family"] - cls.log.debug("_ orig node family: {}".format(family)) - - # define what family of write node should be recreated - if family == "render": - Create_name = "CreateWriteRender" - elif family == "prerender": - Create_name = "CreateWritePrerender" - elif family == "still": - Create_name = "CreateWriteStill" - - # get appropriate plugin class - creator_plugin = None - for Creator in discover_creator_plugins(): - if Creator.__name__ != Create_name: - continue - - creator_plugin = Creator - - # delete the legaci write node - nuke.delete(node) - - # create write node with creator - new_node_name = data["subset"] - creator_plugin(new_node_name, data["asset"]).process() - - node = nuke.toNode(new_node_name) - node.setXYpos(*data["XYpos"]) - node.setInput(0, data["input"]) - node["publish"].setValue(data["publish"]) - node["review"].setValue(data["review"]) - node["use_limit"].setValue(data["use_limit"]) - node["first"].setValue(data["first"]) - node["last"].setValue(data["last"]) - - # recreate render targets - if data["render"]: - node["render"].setValue("Local") - if data["render_farm"]: - node["render"].setValue("On farm") From 50dbbcdb184bdc2d96697294578ec8e73de57a91 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 16:00:00 +0200 Subject: [PATCH 095/480] nuke: no need for abstraction of getting instance node --- openpype/hosts/nuke/api/__init__.py | 2 -- openpype/hosts/nuke/api/plugin.py | 11 +---------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 8aaaff43b3..3b00ca9f6f 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -14,7 +14,6 @@ from .plugin import ( NukeWriteCreator, NukeCreatorError, OpenPypeCreator, - get_instance_node, get_instance_group_node_childs, get_colorspace_from_node ) @@ -67,7 +66,6 @@ __all__ = ( "NukeCreatorError", "OpenPypeCreator", "NukeHost", - "get_instance_node", "get_instance_group_node_childs", "get_colorspace_from_node", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 96d9b48ba6..7e3c131104 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -407,15 +407,6 @@ class OpenPypeCreator(LegacyCreator): return instance -def get_instance_node(instance): - # new publisher way - if instance.data.get("transientData"): - return instance.data["transientData"]["node"] - else: - # or backward compatible - return instance[0] - - def get_instance_group_node_childs(instance): """Return list of instance group node children @@ -425,7 +416,7 @@ def get_instance_group_node_childs(instance): Returns: list: [nuke.Node] """ - node = get_instance_node(instance) + node = instance.data["transientData"]["node"] if node.Class() != "Group": return From fcf84d1ba3c8373730f6dc68bb5b25cc9ebcb3cd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 16:00:58 +0200 Subject: [PATCH 096/480] nuke: get instance node directly form instance --- openpype/hosts/nuke/plugins/publish/collect_backdrop.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_gizmo.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_model.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_reads.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_slate_node.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_writes.py | 2 +- openpype/hosts/nuke/plugins/publish/extract_gizmo.py | 2 +- openpype/hosts/nuke/plugins/publish/extract_model.py | 2 +- openpype/hosts/nuke/plugins/publish/extract_thumbnail.py | 2 +- openpype/hosts/nuke/plugins/publish/validate_asset_name.py | 4 ++-- openpype/hosts/nuke/plugins/publish/validate_backdrop.py | 2 +- openpype/hosts/nuke/plugins/publish/validate_gizmo.py | 4 ++-- .../hosts/nuke/plugins/publish/validate_output_resolution.py | 2 +- .../hosts/nuke/plugins/publish/validate_rendered_frames.py | 4 ++-- openpype/hosts/nuke/plugins/publish/validate_write_nodes.py | 4 ++-- 15 files changed, 19 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index dcabedf231..2d375cfb62 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -18,7 +18,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug(pformat(instance.data)) - bckn = napi.get_instance_node(instance) + bckn = instance.data["transientData"]["node"] # define size of the backdrop left = bckn.xpos() diff --git a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py index 755c196f8e..5979cd123d 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py @@ -14,7 +14,7 @@ class CollectGizmo(pyblish.api.InstancePlugin): families = ["gizmo"] def process(self, instance): - grpn = napi.get_instance_node(instance) + grpn = instance.data["transientData"]["node"] # add family to familiess instance.data["families"].insert(0, instance.data["family"]) diff --git a/openpype/hosts/nuke/plugins/publish/collect_model.py b/openpype/hosts/nuke/plugins/publish/collect_model.py index 253121ffd4..ad3e99dfad 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_model.py +++ b/openpype/hosts/nuke/plugins/publish/collect_model.py @@ -14,7 +14,7 @@ class CollectModel(pyblish.api.InstancePlugin): def process(self, instance): - geo_node = napi.get_instance_node(instance) + geo_node = instance.data["transientData"]["node"] # add family to familiess instance.data["families"].insert(0, instance.data["family"]) diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 7390ec6884..3d14b2f231 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -18,7 +18,7 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): - node = napi.get_instance_node(instance) + node = instance.data["transientData"]["node"] project_name = legacy_io.active_project() asset_name = legacy_io.Session["AVALON_ASSET"] diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index e3f24a233e..1469292664 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -12,7 +12,7 @@ class CollectSlate(pyblish.api.InstancePlugin): families = ["render"] def process(self, instance): - node = napi.get_instance_node(instance) + node = instance.data["transientData"]["node"] slate = next((n for n in nuke.allNodes() if "slate" in n.name().lower() diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 32d837f400..8122776ccc 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -18,7 +18,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug(pformat(instance.data)) instance.data.update(instance.data["creator_attributes"]) - group_node = napi.get_instance_node(instance) + group_node = instance.data["transientData"]["node"] render_target = instance.data["render_target"] family = instance.data["family"] families = instance.data["families"] diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index a3df497893..7ea9435571 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -26,7 +26,7 @@ class ExtractGizmo(publish.Extractor): def process(self, instance): tmp_nodes = [] - orig_grpn = napi.get_instance_node(instance) + orig_grpn = instance.data["transientData"]["node"] # Define extract output file path stagingdir = self.staging_dir(instance) diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index 5ca617f147..f44d699e1b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -38,7 +38,7 @@ class ExtractModel(publish.Extractor): pformat(instance.data))) rm_nodes = [] - model_node = napi.get_instance_node(instance) + model_node = instance.data["transientData"]["node"] self.log.info("Crating additional nodes") subset = instance.data["subset"] diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index dfb1b38284..a1a0e241c0 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -65,7 +65,7 @@ class ExtractThumbnail(publish.Extractor): bake_viewer_input_process_node = kwargs[ "bake_viewer_input_process"] - node = napi.get_instance_node(instance) # group node + node = instance.data["transientData"]["node"] # group node self.log.info("Creating staging dir...") if "representations" not in instance.data: diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 0d64da074e..230717cf18 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -85,7 +85,7 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: - origin_node = napi.get_instance_node(instance) + origin_node = instance.data["transientData"]["node"] napi.lib.recreate_instance( origin_node, avalon_data={"asset": context_asset} ) @@ -112,7 +112,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] - node = napi.get_instance_node(instance) + node = instance.data["transientData"]["node"] msg = ( "Instance `{}` has wrong shot/asset name:\n" diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index 8ee8e631b8..e4df3430f9 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -32,7 +32,7 @@ class SelectCenterInNodeGraph(pyblish.api.Action): with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - bdn = napi.get_instance_node(instance) + bdn = instance.data["transientData"]["node"] xC = bdn.xpos() + bdn.screenWidth() / 2 yC = bdn.ypos() + bdn.screenHeight() / 2 diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 05a4085483..1fc3726a04 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -29,7 +29,7 @@ class OpenFailedGroupNode(pyblish.api.Action): with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - grpn = napi.get_instance_node(instance) + grpn = instance.data["transientData"]["node"] nuke.showDag(grpn) @@ -45,7 +45,7 @@ class ValidateGizmo(pyblish.api.InstancePlugin): actions = [OpenFailedGroupNode] def process(self, instance): - grpn = napi.get_instance_node(instance) + grpn = instance.data["transientData"]["node"] with grpn: connections_out = nuke.allNodes('Output') diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index b7095e9ed7..c581e682a7 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -75,7 +75,7 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): ) invalid = cls.get_invalid(instance) - grp_node = napi.get_instance_node(instance) + grp_node = instance.data["transientData"]["node"] if cls.missing_msg == invalid: # make sure we are inside of the group node diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index c1e511f534..89136124b8 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -24,7 +24,7 @@ class RepairActionBase(pyblish.api.Action): def repair_knob(self, instances, state): for instance in instances: - node = napi.get_instance_node(instance) + node = instance.data["transientData"]["node"] files_remove = [os.path.join(instance.data["outputDir"], f) for r in instance.data.get("representations", []) for f in r.get("files", []) @@ -64,7 +64,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] def process(self, instance): - node = napi.get_instance_node(instance) + node = instance.data["transientData"]["node"] f_data = { "node_name": node.name() diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index cfd0fb3871..fc5e054fa8 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -27,7 +27,7 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): or instance ) - write_group_node = napi.get_instance_node(instance) + write_group_node = instance.data["transientData"]["node"] # get write node from inside of group write_node = None for x in child_nodes: @@ -67,7 +67,7 @@ class ValidateNukeWriteNode( or instance ) - write_group_node = napi.get_instance_node(instance) + write_group_node = instance.data["transientData"]["node"] # get write node from inside of group write_node = None From c6ca7243b84a141add78af617514044c02198d07 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 16:32:56 +0200 Subject: [PATCH 097/480] nuke: fixing supporting function for resolving imageio nodes --- openpype/hosts/nuke/api/lib.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 157a2eb321..54fce77c79 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2362,26 +2362,34 @@ def get_write_node_template_attr(node): ''' Gets all defined data from presets ''' + # TODO: remove this backward compatibility for old settings + # TASK: add identifiers to settings and rename settings key + plugin_names_mapping = { + "create_write_image": "CreateWriteImage", + "create_write_prerender": "CreateWritePrerender", + "create_write_render": "CreateWriteRender" + } # get avalon data from node - avalon_knob_data = read_avalon_data(node) + node_data = get_node_data(node, INSTANCE_DATA_KNOB) + identifier = node_data["creator_identifier"] # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["families"], - plugin_name=avalon_knob_data["creator"], - subset=avalon_knob_data["subset"] + node_class="Write", + plugin_name=plugin_names_mapping[identifier], + subset=node_data["subset"] ) - # collecting correct data - correct_data = OrderedDict() + # collecting solved data + return_data = OrderedDict() - # adding imageio knob presets - for k, v in nuke_imageio_writes.items(): - if k in ["_id", "_previous"]: - continue - correct_data[k] = v + for knob in nuke_imageio_writes["knobs"]: + knob_type = knob["type"] + knob_value = knob["value"] + new_knob_value = convert_knob_value_to_correct_type( + knob_type, knob_value) + return_data[knob["name"]] = new_knob_value - # fix badly encoded data - return fix_data_for_node_create(correct_data) + return return_data def get_dependent_nodes(nodes): From 83d60936bc597a8f6a7f174e15b72f6b4812e728 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 16:33:25 +0200 Subject: [PATCH 098/480] Nuke: fixing validator --- .../nuke/plugins/publish/validate_write_nodes.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index fc5e054fa8..ec2fba7ea7 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -5,7 +5,7 @@ from openpype.hosts.nuke.api.lib import ( set_node_knobs_from_settings, color_gui_to_int ) -from openpype.hosts.nuke import api as napi + from openpype.pipeline.publish import ( PublishXmlValidationError, OptionalPyblishPluginMixin, @@ -80,19 +80,12 @@ class ValidateNukeWriteNode( correct_data = get_write_node_template_attr(write_group_node) - if correct_data: - check_knobs = correct_data["knobs"] - else: - return - check = [] self.log.debug("__ write_node: {}".format( write_node )) - for knob_data in check_knobs: - key = knob_data["name"] - value = knob_data["value"] + for key, value in correct_data.items(): node_value = write_node[key].value() # fix type differences From be51b3d977be2032a9386df32a41b26accb2faae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 17:13:49 +0200 Subject: [PATCH 099/480] OP-3426 - fix not dropping settings collection --- tests/lib/db_handler.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b181055012..108ebf560d 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ class DBHandler: "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) From d5e62039a760b6fa4bd200276f6adc0b3e4304b1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 19:26:21 +0200 Subject: [PATCH 100/480] OP-3426 - wait for DL publish job --- tests/lib/testing_classes.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 9d7dfa923e..e6f747c6a4 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -350,12 +350,22 @@ class DeadlinePublishTest(PublishTest): launched_app.terminate() raise ValueError("Timeout reached") - deadline_job_id = os.environ.get("DEADLINE_PUBLISH_JOB_ID") - if not deadline_job_id: - raise ValueError("DEADLINE_PUBLISH_JOB_ID empty, cannot find job") + metadata_json = glob.glob(os.path.join(download_test_data, + "**/*_metadata.json"), + recursive=True) + if not metadata_json: + raise RuntimeError("No metadata file found. No job id.") - modules_manager = ModulesManager() - deadline_module = modules_manager.modules_by_name("deadline") + if len(metadata_json) > 1: + raise RuntimeError("Too many metadata files found.") + + with open(metadata_json[0]) as fp: + job_info = json.load(fp) + + deadline_job_id = job_info["job"]["_id"] + + manager = ModulesManager() + deadline_module = manager.modules_by_name["deadline"] deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: @@ -378,7 +388,7 @@ class DeadlinePublishTest(PublishTest): if not response.json(): raise ValueError("Couldn't find {}".format(deadline_job_id)) - date_finished = response.json()[0]["DateComp"] + date_finished = response.json()[0]["MainEnd"] # some clean exit test possible? print("Publish finished") From 6324b0339c04cec093653e847340cccc93a46800 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 20:27:16 +0200 Subject: [PATCH 101/480] Nuke: using transient data for nodes --- openpype/hosts/nuke/plugins/publish/collect_backdrop.py | 2 +- openpype/hosts/nuke/plugins/publish/extract_camera.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index 2d375cfb62..b8317137da 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -51,7 +51,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): # make label nicer instance.data["label"] = "{0} ({1} nodes)".format( - bckn.name(), len(instance) - 1) + bckn.name(), len(instance.data["transientData"]["childNodes"])) instance.data["families"].append(instance.data["family"]) diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index b751bfab03..4286f71e83 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -28,6 +28,7 @@ class ExtractCamera(publish.Extractor): ] def process(self, instance): + camera_node = instance.data["transientData"]["node"] handle_start = instance.context.data["handleStart"] handle_end = instance.context.data["handleEnd"] first_frame = int(nuke.root()["first_frame"].getValue()) @@ -38,7 +39,7 @@ class ExtractCamera(publish.Extractor): self.log.info("instance.data: `{}`".format( pformat(instance.data))) - rm_nodes = list() + rm_nodes = [] self.log.info("Crating additional nodes") subset = instance.data["subset"] staging_dir = self.staging_dir(instance) @@ -58,7 +59,7 @@ class ExtractCamera(publish.Extractor): with maintained_selection(): # bake camera with axeses onto word coordinate XYZ rm_n = bakeCameraWithAxeses( - nuke.toNode(instance.data["name"]), output_range) + camera_node, output_range) rm_nodes.append(rm_n) # create scene node From 76ca1e83c09759703f43471c56530428e6b9bd6a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 20:27:32 +0200 Subject: [PATCH 102/480] nuke: removing unused import --- openpype/hosts/nuke/plugins/publish/collect_backdrop.py | 1 - openpype/hosts/nuke/plugins/publish/collect_gizmo.py | 1 - openpype/hosts/nuke/plugins/publish/collect_model.py | 2 +- openpype/hosts/nuke/plugins/publish/collect_reads.py | 1 - openpype/hosts/nuke/plugins/publish/collect_slate_node.py | 1 - openpype/hosts/nuke/plugins/publish/extract_gizmo.py | 1 - openpype/hosts/nuke/plugins/publish/extract_model.py | 1 - openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py | 1 - 8 files changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index b8317137da..1545bb602d 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -1,7 +1,6 @@ from pprint import pformat import pyblish.api from openpype.hosts.nuke.api import lib as pnlib -from openpype.hosts.nuke import api as napi import nuke diff --git a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py index 5979cd123d..2cd996971a 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py @@ -1,5 +1,4 @@ import pyblish.api -from openpype.hosts.nuke import api as napi import nuke diff --git a/openpype/hosts/nuke/plugins/publish/collect_model.py b/openpype/hosts/nuke/plugins/publish/collect_model.py index ad3e99dfad..d24382b757 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_model.py +++ b/openpype/hosts/nuke/plugins/publish/collect_model.py @@ -1,6 +1,6 @@ import pyblish.api import nuke -from openpype.hosts.nuke import api as napi + @pyblish.api.log class CollectModel(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 3d14b2f231..9fad8dddf8 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -5,7 +5,6 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -from openpype.hosts.nuke import api as napi @pyblish.api.log diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 1469292664..72b0614d2b 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -1,5 +1,4 @@ import pyblish.api -from openpype.hosts.nuke import api as napi import nuke diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 7ea9435571..4ca80e9995 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -4,7 +4,6 @@ import nuke import pyblish.api from openpype.pipeline import publish -from openpype.hosts.nuke import api as napi from openpype.hosts.nuke.api import utils as pnutils from openpype.hosts.nuke.api.lib import ( maintained_selection, diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index f44d699e1b..814d404137 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -3,7 +3,6 @@ from pprint import pformat import nuke import pyblish.api -from openpype.hosts.nuke import api as napi from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 89136124b8..e0eb254ba1 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,7 +1,6 @@ import os import pyblish.api import clique -from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError From 2a0f4da03f7a2ec4b1a980e8f1522911ef25815b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 20:36:44 +0200 Subject: [PATCH 103/480] nuke: removing decoration which is not needed --- openpype/hosts/nuke/plugins/publish/collect_backdrop.py | 1 - openpype/hosts/nuke/plugins/publish/collect_gizmo.py | 1 - openpype/hosts/nuke/plugins/publish/collect_instance_data.py | 1 - openpype/hosts/nuke/plugins/publish/collect_model.py | 1 - openpype/hosts/nuke/plugins/publish/collect_reads.py | 2 -- openpype/hosts/nuke/plugins/publish/collect_writes.py | 1 - openpype/hosts/nuke/plugins/publish/validate_backdrop.py | 1 - openpype/hosts/nuke/plugins/publish/validate_gizmo.py | 1 - openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py | 1 - openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py | 1 - .../hosts/nuke/plugins/publish/validate_script_attributes.py | 1 - openpype/hosts/nuke/plugins/publish/validate_write_nodes.py | 1 - 12 files changed, 13 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index 1545bb602d..8eaefa6854 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -4,7 +4,6 @@ from openpype.hosts.nuke.api import lib as pnlib import nuke -@pyblish.api.log class CollectBackdrops(pyblish.api.InstancePlugin): """Collect Backdrop node instance and its content """ diff --git a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py index 2cd996971a..c194f70f7e 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py @@ -2,7 +2,6 @@ import pyblish.api import nuke -@pyblish.api.log class CollectGizmo(pyblish.api.InstancePlugin): """Collect Gizmo (group) node instance and its content """ diff --git a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py index 9614ebdea6..3908aef4bc 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py +++ b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py @@ -2,7 +2,6 @@ import nuke import pyblish.api -@pyblish.api.log class CollectInstanceData(pyblish.api.InstancePlugin): """Collect all nodes with Avalon knob.""" diff --git a/openpype/hosts/nuke/plugins/publish/collect_model.py b/openpype/hosts/nuke/plugins/publish/collect_model.py index d24382b757..9da056052b 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_model.py +++ b/openpype/hosts/nuke/plugins/publish/collect_model.py @@ -2,7 +2,6 @@ import pyblish.api import nuke -@pyblish.api.log class CollectModel(pyblish.api.InstancePlugin): """Collect Model node instance and its content """ diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 9fad8dddf8..a1144fbcc3 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -2,12 +2,10 @@ import os import re import nuke import pyblish.api - from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -@pyblish.api.log class CollectNukeReads(pyblish.api.InstancePlugin): """Collect all read nodes.""" diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 8122776ccc..e18fbbdf6f 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -5,7 +5,6 @@ import pyblish.api from openpype.hosts.nuke import api as napi -@pyblish.api.log class CollectNukeWrites(pyblish.api.InstancePlugin): """Collect all write nodes.""" diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index e4df3430f9..208d4a2498 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -46,7 +46,6 @@ class SelectCenterInNodeGraph(pyblish.api.Action): nuke.zoom(2, [min(all_xC), min(all_yC)]) -@pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): """ Validate amount of nodes on backdrop node in case user forgoten to add nodes above the publishing backdrop node. diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 1fc3726a04..57d7d5fc3b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -33,7 +33,6 @@ class OpenFailedGroupNode(pyblish.api.Action): nuke.showDag(grpn) -@pyblish.api.log class ValidateGizmo(pyblish.api.InstancePlugin): """Validate amount of output nodes in gizmo (group) node""" diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index dac240ad19..c26a03f31a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -17,7 +17,6 @@ class FixProxyMode(pyblish.api.Action): rootNode["proxy"].setValue(False) -@pyblish.api.log class ValidateProxyMode(pyblish.api.ContextPlugin): """Validate active proxy mode""" diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index e0eb254ba1..1c22c5b9d0 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -4,7 +4,6 @@ import clique from openpype.pipeline import PublishXmlValidationError -@pyblish.api.log class RepairActionBase(pyblish.api.Action): on = "failed" icon = "wrench" diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index f0632f8080..3ebc34f195 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -10,7 +10,6 @@ from openpype.hosts.nuke.api.lib import ( import nuke -@pyblish.api.log class ValidateScriptAttributes(pyblish.api.InstancePlugin): """ Validates file output. """ diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index ec2fba7ea7..c0b81f64ea 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -12,7 +12,6 @@ from openpype.pipeline.publish import ( ) -@pyblish.api.log class RepairNukeWriteNodeAction(pyblish.api.Action): label = "Repair" on = "failed" From 2fff046532f6a023b8be16ca3de405e4ea03d309 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 20:39:12 +0200 Subject: [PATCH 104/480] nuke: better logic --- .../hosts/nuke/plugins/publish/validate_gizmo.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 57d7d5fc3b..8e844247ee 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -48,22 +48,22 @@ class ValidateGizmo(pyblish.api.InstancePlugin): with grpn: connections_out = nuke.allNodes('Output') - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - if len(connections_out) > 1: + msg_multiple_outputs = ( + "Only one outcoming connection from " + "\"{}\" is allowed").format(instance.data["name"]) + raise PublishXmlValidationError( self, msg_multiple_outputs, "multiple_outputs", {"node_name": grpn["name"].value()} ) connections_in = nuke.allNodes('Input') - msg_missing_inputs = ( - "At least one Input node has to be inside Group: " - "\"{}\"").format(instance.data["name"]) - if len(connections_in) == 0: + msg_missing_inputs = ( + "At least one Input node has to be inside Group: " + "\"{}\"").format(instance.data["name"]) + raise PublishXmlValidationError( self, msg_missing_inputs, "no_inputs", {"node_name": grpn["name"].value()} From b6107fe5611345a379440932d1020a199e69ed8a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 21:05:27 +0200 Subject: [PATCH 105/480] nuke: refactor workfile collector --- .../nuke/plugins/publish/collect_workfile.py | 41 +++++-------------- 1 file changed, 10 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_workfile.py b/openpype/hosts/nuke/plugins/publish/collect_workfile.py index 8b6ab5d072..7f4849be67 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/collect_workfile.py @@ -3,22 +3,19 @@ import os import nuke import pyblish.api -import openpype.api as pype -from openpype.hosts.nuke.api.lib import ( - add_publish_knob, - get_avalon_knob_data -) +import openpype.api as api from openpype.pipeline import KnownPublishError -class CollectWorkfile(pyblish.api.ContextPlugin): +class CollectWorkfile(pyblish.api.InstancePlugin): """Collect current script for publish.""" - order = pyblish.api.CollectorOrder - 0.50 + order = pyblish.api.CollectorOrder - 0.499 label = "Collect Workfile" hosts = ['nuke'] + families = ["workfile"] - def process(self, context): # sourcery skip: avoid-builtin-shadow + def process(self, instance): # sourcery skip: avoid-builtin-shadow root = nuke.root() current_file = os.path.normpath(nuke.root().name()) @@ -29,23 +26,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "Use workfile tool to manage the name correctly." ) - knob_data = get_avalon_knob_data(root) - - add_publish_knob(root) - - family = "workfile" - task = os.getenv("AVALON_TASK", None) # creating instances per write node staging_dir = os.path.dirname(current_file) base_name = os.path.basename(current_file) - subset = family + task.capitalize() # Get frame range first_frame = int(root["first_frame"].getValue()) last_frame = int(root["last_frame"].getValue()) - handle_start = int(knob_data.get("handleStart", 0)) - handle_end = int(knob_data.get("handleEnd", 0)) + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] # Get format format = root['format'].value() @@ -53,33 +43,27 @@ class CollectWorkfile(pyblish.api.ContextPlugin): resolution_height = format.height() pixel_aspect = format.pixelAspect() - # Create instance - instance = context.create_instance(subset) - instance.add(root) - script_data = { - "asset": os.getenv("AVALON_ASSET", None), "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "resolutionWidth": resolution_width, "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, - # backward compatibility + # backward compatibility handles "handles": handle_start, - "handleStart": handle_start, "handleEnd": handle_end, "step": 1, "fps": root['fps'].value(), "currentFile": current_file, - "version": int(pype.get_version_from_path(current_file)), + "version": int(api.get_version_from_path(current_file)), "host": pyblish.api.current_host(), "hostVersion": nuke.NUKE_VERSION_STRING } - context.data.update(script_data) + instance.context.data.update(script_data) # creating representation representation = { @@ -91,12 +75,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # creating instance data instance.data.update({ - "subset": subset, - "label": base_name, "name": base_name, - "publish": root.knob('publish').value(), - "family": family, - "families": [family], "representations": [representation] }) From cdd11eae8f2acdc4c8172eb03ef106a2a42b0f40 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 21:34:44 +0200 Subject: [PATCH 106/480] Deadline: nuke submitter gets node with new way --- .../modules/deadline/plugins/publish/submit_nuke_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index b09d2935ab..083c2bcd78 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -40,7 +40,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["toBeRenderedOn"] = "deadline" families = instance.data["families"] - node = instance[0] + node = instance.data["transientData"]["node"] context = instance.context # get default deadline webservice url from deadline module From f8b4183b49b7b5d83af574f1ad78702a9ed7d5a2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 21:35:01 +0200 Subject: [PATCH 107/480] nuke: fixing review and frames conditions --- .../nuke/plugins/publish/collect_writes.py | 47 +++++++++---------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index e18fbbdf6f..3054e5a30c 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -15,7 +15,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug(pformat(instance.data)) - instance.data.update(instance.data["creator_attributes"]) + creator_attributes = instance.data["creator_attributes"] + instance.data.update(creator_attributes) group_node = instance.data["transientData"]["node"] render_target = instance.data["render_target"] @@ -26,12 +27,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): instance.data["families"].append( "{}.{}".format(family, render_target) ) - # add additional keys to farm targeted - if render_target == "farm": - # Farm rendering - self.log.info("flagged for farm render") - instance.data["transfer"] = False - instance.data["farm"] = True + if instance.data.get("review"): + instance.data["families"].append("review") child_nodes = napi.get_instance_group_node_childs(instance) instance.data["transientData"]["childNodes"] = child_nodes @@ -71,8 +68,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug('output dir: {}'.format(output_dir)) - if render_target == "frame": - + if render_target == "frames": representation = { 'name': ext, 'ext': ext, @@ -133,6 +129,21 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): representation['files'] = collected_frames instance.data["representations"].append(representation) + self.log.info("Publishing rendered frames ...") + + elif render_target == "farm": + farm_priority = creator_attributes.get("farm_priority") + farm_chunk = creator_attributes.get("farm_chunk") + farm_concurency = creator_attributes.get("farm_concurency") + instance.data.update({ + "deadlineChunkSize": farm_chunk or 1, + "deadlinePriority": farm_priority or 50, + "deadlineConcurrentTasks": farm_concurency or 0 + }) + # Farm rendering + instance.data["transfer"] = False + instance.data["farm"] = True + self.log.info("Farm rendering ON ...") # get colorspace and add to version data colorspace = napi.get_colorspace_from_node(write_node) @@ -140,28 +151,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "colorspace": colorspace } - # get deadline related attributes - dl_chunk_size = 1 - if "deadlineChunkSize" in group_node.knobs(): - dl_chunk_size = group_node["deadlineChunkSize"].value() - - dl_priority = 50 - if "deadlinePriority" in group_node.knobs(): - dl_priority = group_node["deadlinePriority"].value() - - dl_concurrent_tasks = 0 - if "deadlineConcurrentTasks" in group_node.knobs(): - dl_concurrent_tasks = group_node["deadlineConcurrentTasks"].value() - instance.data.update({ "versionData": version_data, "path": write_file_path, "outputDir": output_dir, "ext": ext, - "colorspace": colorspace, - "deadlineChunkSize": dl_chunk_size, - "deadlinePriority": dl_priority, - "deadlineConcurrentTasks": dl_concurrent_tasks + "colorspace": colorspace }) if family == "render": From abe5e24309e882eac5c51b080dc9c4069741de18 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 13:49:20 +0200 Subject: [PATCH 108/480] OP-3426 - fix check if job is finished --- tests/lib/testing_classes.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index e6f747c6a4..8102ad732d 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -372,10 +372,10 @@ class DeadlinePublishTest(PublishTest): raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) - date_finished = None + valid_date_finished = None time_start = time.time() - while not date_finished: + while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: raise ValueError("Timeout for DL finish reached") @@ -388,7 +388,8 @@ class DeadlinePublishTest(PublishTest): if not response.json(): raise ValueError("Couldn't find {}".format(deadline_job_id)) - date_finished = response.json()[0]["MainEnd"] + # '0001-...' returned until job is finished + valid_date_finished = response.json()[0]["DateComp"][:4] != "0001" # some clean exit test possible? print("Publish finished") From 646dcd92ae3ce91332321cf9706f1345496e5421 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:02:39 +0200 Subject: [PATCH 109/480] OP-3426 - fix proper zip id for deadline test --- .../hosts/aftereffects/test_deadline_publish_in_aftereffects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index 5d2a7d7ebd..103ce6fe5b 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -29,7 +29,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): PERSIST = False TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", + ("1xhd2ij2ixyjCyTjZgcJEPAIiBHLU1FEY", "test_aftereffects_publish.zip", "") ] From a45218d48a86220f190f8786f17e9264b80e8a95 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:03:57 +0200 Subject: [PATCH 110/480] OP-3426 - fix proper id for publish job It should wait for publish job, not rendering jobs. --- .../deadline/plugins/publish/submit_publish_job.py | 12 +++++++++++- tests/lib/testing_classes.py | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index aba505b3c6..0e418ccf77 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -212,6 +212,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): more universal code. Muster post job is sent directly by Muster submitter, so this type of code isn't necessary for it. + Returns: + (str): deadline_publish_job_id """ data = instance.data.copy() subset = data["subset"] @@ -331,6 +333,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if not response.ok: raise Exception(response.text) + deadline_publish_job_id = response.json()["_id"] + + return deadline_publish_job_id + def _copy_extend_frames(self, instance, representation): """Copy existing frames from latest version. @@ -984,7 +990,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.deadline_url = instance.data.get("deadlineUrl") assert self.deadline_url, "Requires Deadline Webservice URL" - self._submit_deadline_post_job(instance, render_job, instances) + deadline_publish_job_id = \ + self._submit_deadline_post_job(instance, render_job, instances) # publish job file publish_job = { @@ -1002,6 +1009,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "instances": instances } + if deadline_publish_job_id: + publish_job["deadline_publish_job_id"] = deadline_publish_job_id + # add audio to metadata file if available audio_file = context.data.get("audioFile") if audio_file and os.path.isfile(audio_file): diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8102ad732d..53eeae10cb 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -362,7 +362,7 @@ class DeadlinePublishTest(PublishTest): with open(metadata_json[0]) as fp: job_info = json.load(fp) - deadline_job_id = job_info["job"]["_id"] + deadline_job_id = job_info["deadline_publish_job_id"] manager = ModulesManager() deadline_module = manager.modules_by_name["deadline"] From 6f5c91c785d3678ca76b25b613a6bd9369315a45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:11:54 +0200 Subject: [PATCH 111/480] OP-3426 - refactor - Nuke to new testing classes --- tests/integration/hosts/nuke/lib.py | 18 +++++++++++++++--- .../hosts/nuke/test_publish_in_nuke.py | 9 +++++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba81..e3c610199b 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -2,10 +2,14 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -41,4 +45,12 @@ class NukeTestClass(HostFixtures): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class NukeDeadlinePublishTestClass(NukeHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b5..88defdc37a 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, @@ -20,7 +20,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -37,7 +38,7 @@ class TestPublishInNuke(NukeTestClass): # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): From fdaa7774a48746ff6ec2bc967fe512b9e3bb2994 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:22:52 +0200 Subject: [PATCH 112/480] OP-3426 - added new Nuke deadline testing class --- .../nuke/test_deadine_publish_in_nuke.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py new file mode 100644 index 0000000000..2ab886a257 --- /dev/null +++ b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py @@ -0,0 +1,76 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.nuke.lib import NukeDeadlinePublishTestClass + +log = logging.getLogger("test_publish_in_nuke") + + +class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): + """Basic test case for publishing in Nuke + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens Nuke, run publish on prepared workile. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + How to run: + (in cmd with activated {OPENPYPE_ROOT}/.venv) + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 + + To check log/errors from launched app's publish process keep PERSIST + to True and check `test_openpype.logs` collection. + """ + # https://drive.google.com/file/d/1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI/view?usp=sharing # noqa: E501 + TEST_FILES = [ + ("1SeWprClKhWMv2xVC9AcnekIJFExxnp_b", + "test_nuke_deadlinepublish.zip", "") + ] + + APP = "nuke" + + TIMEOUT = 120 # publish timeout + + # could be overwritten by command line arguments + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + PERSIST = False # True - keep test_db, test_openpype, outputted test files + TEST_DATA_FOLDER = None + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderCompositingInNukeMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderCompositingInNukeMain", + "context.ext": "exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInNuke() From 9159b67051fb5d69d6b389a428d0883204a83418 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 15:45:06 +0200 Subject: [PATCH 113/480] OP-3426 - fix source zip file for Nuke --- tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py index 2ab886a257..ddeae4bfa0 100644 --- a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py @@ -29,7 +29,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): # https://drive.google.com/file/d/1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI/view?usp=sharing # noqa: E501 TEST_FILES = [ ("1SeWprClKhWMv2xVC9AcnekIJFExxnp_b", - "test_nuke_deadlinepublish.zip", "") + "test_nuke_deadline_publish.zip", "") ] APP = "nuke" From 9923e56be596232ed50c8d0349f39e6763f4fa62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 17 Oct 2022 18:46:58 +0200 Subject: [PATCH 114/480] :bug: use short version --- tools/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/build.sh b/tools/build.sh index 4de5092227..8e65ba9a99 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -152,7 +152,7 @@ main () { openpype_root=$(dirname $(dirname "$(realpath ${BASH_SOURCE[0]})")) pushd "$openpype_root" > /dev/null || return > /dev/null - version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);" + version_command="import os;import re;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(re.search(r'(\d+\.\d+.\d+).*', version['__version__'])[1]);" openpype_version="$(python <<< ${version_command})" _inside_openpype_tool="1" From 8b29406c062140df22995f66912dd0b7e62bf230 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:02:33 +0200 Subject: [PATCH 115/480] OP-3426 - cleaned up test_projece DB dump Now contains only project and single asset document --- tests/resources/test_data.zip | Bin 7350 -> 4979 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..b37a30ed7a73b96d2c6398207657607b63fac0bc 100644 GIT binary patch delta 2196 zcmZXVc{JN;7sr2s)DonXSdv&G$1arCR!S{Fg|W1#u~iz>P8SVh8e2k9EkFA{C{v7T zm5Q~fJuix(sGU}&V=sz!hR%8adEa}UyPWfU&%O6~{`u5tZ_89e*+6Ykc+EQwHOOg8 zH(NVyAKrW>6ae@I0YC+WLq$OmK_r!QJ1}7Sno}T}fY5u+hhG*_MyBT2ic3ab@NM+Z zzUn`D-Y3W0A64rejiPyc7Mn5})9GjWk#0Dy;*J0gl_My{pb1z=mH^^}W3FPcGy80k z!2_QsH!n<;<0b9(JI{SoJ3h0y7YlSbtGkinP20q;6@fF(2VozI1D`EZEo7Ifg#kn9 z*=2QhMntWaZW5xSHu!LWf%HhGRqJ!>myFgzqsHQI--3OdgYn;b zxI~qL-`+%J;tSXK?^;r*rQDi(03tE9B_y<(Q^4`B+$#0Zgtq&b{>P`V2OlXU+s%zT z+*|6RqXhQ!E}ISG7-dCs>o_xjREoS{gH`|DrM z&(|I@J8;a>IrH5t)44RZ?aLU)?YjD6zKR13XHq+nosGxgib+0|E9Oc*UYxMYuA8)RKpSD+x8YUt%r2O9J|;1Gg|3443XhoV}*=W|&y>@0aNzav5`- zkwZ7M;vt<`>lk@^&!BW+{Mejfd$&kgle(B5&LNOk>nMD4p`ZlsYp6W=SZNQBl@Pk< z>XrQH0TB|+G^EmP^kX12|I?IPPTi_16bmdiYhNmE@*BDE596VF)C<%pU8rQnIhv!? zBz=1>@vN}w)5|}*X;Y!MG^RsrSS{rA^Gz`-=wFAWQpf-U-}Z`O&dT;2Er(j{%d0IR z12=YQl8K4*yBF%?y8Ex!Y+}S_``(#Vwz`lz_k`m$spg%GhrL5ybhr60Pd{nOSmpWA zOiNen4{g1DR|b^pD_MQ`s?-h2T)`9_N2mb_;vN8#0>}Qqh>R?HN4YsMksn}eNzZw zsP7YZsayCtS!F`XRia}&wQz3OrL><`g_CkHRk#d~{t~B?{eUV#CW2#-|1DS1>&|K0JK2tSXOlQc`;Iy6-Wk2KAub7+=f!a_0N$-GroP zW%keJN6rMf2~1U@?08dSZ4(5hQ;ju{%{Qz?NZQfnblqHv2FmvBS^;a>p?J8`TZG@sP#0F zmW(xE)-LJ$YKfWrzPR2kjR=*lUP2VNGA_JqzKIkK$Jwj}LFfd~YX?@+>3V1-8>0 zkt;n>bMLRA^3o-1)!vOk>K{=%+!0nOP?yNNB>$bU7u2{Sr$7(e9&usXI(N=s!@g~f?vm@L zynk-+vgy(j(ud_89(>EyNi1&}vMh7*!p{pDncDSFXgiaJhVx+N)R)6D?kTv^oex$P zvPrGP#T28qiW`6HvqJjrXUayljY==7G1~=xG0%(?xj5ZS#vO$fJt|wLzqr0U`vr8- z-yLe7ro=6cv@kMW3q};j>TGa2k{jXd!)1TlLA3V7c3*n$8~6+z-N!~0s@IKVFw%BB zss#SjA?Hu?EO_#@QGL{`!jMLcs z;awAdg)(rVZWgtw-%y;FQ%N@uS(k3K1gBXO-fM|3>2xQvj^-zC#Rtp=xpv=-l^wWl zXj&85IS>E@-~vtnNE!hregy!a;Mlc}{f@`&JD@Q1%rOV?)EDTI(|vsJDQqL~$XO2m zwwKq>$21ZbhnI7*njjLGX7A_*p{7C}eyj?8?6#S;R#6CL$V+&#QGd|d2_C>(`xf%> zLP$@j6*do^Qm;ziYtAi=Jm*=d^*qv9rG#(_{V~gLOV#T2ujY4g#KrYJwk*O9=aMDFP~> z5R(6g_T!iRYm&!F?_c3D>A?gP{D=5|zu`p82Aqf;cz$#h5tKR}f`N;k-25>3aa7<1 zv1kIs{Uo8mXtV}I5e7D*4Ir*C@H^TVk_rR$kYb=aMh?;s0~0V}{BQscv~mDIKL1IR Fe*i~{;(Gu9 delta 4380 zcmZ{n1yB@Tx4@UB(`8vYrDJKNyPFjdK|*S21ZfahT0oXsKxp0S6 zkl?ynXTSyLFYI6VdqY2{ceEg~1bKrh9!wz(C^3rI@?<7Mf`!cQA-ADu$Ey%2WzCBI z;IotMmmcQ`KdTn2?ol)kRfMrv0%G)4^iu=o;v@=1e4tkX$g8EbU1_?4C+r92QB;;0 zwKD+*A`Rhw)tqo(&7eiEI8oRI0i>^L&&FC%wZ(T-YEL~OjyzY#Rt2w;NYiX*mHE~kfXq{-ljKJT8q44)s2?j~SAdXi;h&cAHAn@G4 zZ!3i5r+wY!0cE}BnKuKeKG+qbAtEoY$VlH9P$v8IvT`>M@DjX_QRb!Uj6q&ao_hag zsFl@2odbVuUG5=uUbts;R5$#UhcOp*vw)nh%KM&d`P z3DJA>_C%5QTr$JzWU~qUUlRD|UG9&X5h!&13}K1GGz{@=kl-;&bmD z%9NLhIYvshw$QGcaz?iVf#tT^(Li`NOtTtDd_5NuW}uJ~$iQ3^SC#Zgbk=`{GuU06 zu!mVCV3hR^vtr->TOyrrd%TJ<_`^os#F^x?an6n&m<~1$1ONmO0Bqv74XcA$87lz* z!1bRY{7=2Q!+mV*ZG3D5T>ca;JZX$9LI|Yx8@{e@OOysP-S0;#Sd;A92_Z{36iKzO z{mob2)OBs6yE1Vt6r3G3%BYh&j~^<@kup)xT+-XMH*`}-GUQTo;v_R^X1*4K)5;xc z;kp7D^M9ioawAMY%GRVX^JvkS@O|Kqy)KB(p85*;N%IKw9}!n+JV+vf_?n@YZ#E+`02J`|4Q}f>px9+ zED8V!{<#<}Dw&)T@wX2TpIEvvJf`fH=@4@#`rL@r)*(7Fh|dj@6Wzb8k@#ppQ5lD= zZwuy98jXQ>di(pYFJ$iCN%}Dqcc#dng)LIEGX(&c!l(za71qf?EXe>YaD{O=NU$tP z78d7y)Fmn-nzs^=Yf3YdUW8u$W7s|p-Chl2*C54{tEhq6k{kaPt zed^5}m_tuZQu41$H(UEJ4+ML9m9J?yR z^S9e|jEIHfE($h*OE=LI(xt3o^^OfxCJ=pyw0hvvqjGe__I7otdG|qCld}AvgN{{$ zN(Y@3V6s_-AOFBxwahpt%UHA9*_^*vZ%f^}c`Jea}}!Gedy`^6#_Z1?5eXKDELIq37qp{1;Mye4#69NDaEZl`j} zq1(YoHr}7?PVPKZ&{CG2v+o~qlv+aTfUtdov0N;zbmZqck@I52mAF0+>@PYMi=Pa5 z!&QSQETi{FbIEiC!B#(1kd?(dr>5^x6NoU? zZ874a0v-OEyFd$AyU>d(WtE(PaWW6QJP-erm%~H-*li)d&TRYLJqbv@=}V2)-DT>r zxo}9Ot({c$0xGe3gD`>s1n%o@avFgzlMRK;m`o1@{=0(JH%d!&vY{YKzY&ME>+6dO z*8=ZT=*AjwW))**gl(<>v`Ek=LX2f}VNzZ&bbJ6`M+rF94YE`_IRMZ?nvX5<;9i6A zzlN(o+k-SPzG9sig~d;3D(qI5^<;^|8f@4mEp&B(T~im6wU&@rUTkI5Fh#aO^78}u zg>$%vgyj<9yFxhiN|*@L7;L>+j0X)>c3^m9V$P2nT1}-gY`|sWq3lwRDBB(K^;^;* z;pPIr8rF|r)eETA)+WC-8ky*vp6WyD5impsWm8&b{IpxH7W8ak(rowb!-cG5IeuII zlE=urV`jY7=nyo}YBJNr@#q_}h66WI^Nz$jR;%|rk|tQt^E!_n@J$DD(3C-=-Rp0G zaRcOU_C+TEUt@KnJgsaI`m37kdT?u*GV7(2MpYb7R3(Wt?%;aV0^9qt9Bb0>X6ESU zH$iI*gI8xz-xdpcD ztkuXk*8ZMh&UsGYiEvK1#;SDcGq;g_R{Ppw!|Ozj$}Mpj*Ao(u-cM3mvrXTGZBkQ$ zdy5Y(ICJ78D(UG-`&L#HkeVoDG16BELZw1K%r$pk+lJB)5hHXP`B_(F{_ufCP-G#A z4pmRmXb4=!OQtW?Ie;;+tHhU-g3zjNkXvpoO4&OS0&Ym_ zdg5zL3R&&K-e|ja;xX%YzqL%xW$mx+P*#@!&(KI!^(Q~aCwp=vJZf$yqo?H=lcYoO zOcC?X*=*fM@zuH#E^+sFaw1b#(`GH6b@*BD>Mfa3ys#}ax8?#7?{c+uCq>PqCMEZa(vr4)4A8&npRkRIsT>IA4@1Uc={N#uA^0dIvD;6|7+OZx&oAaG#V-VWGoL2N>x=$o& z;g_sKUGgY~`k@~j`RoXFV8p4HAR(pCp0&Hv1LTQ*Jkq^&kPWw1E5pZw6q`FJ(cPThfZ`$&jbs_^WOysKNh zomj`^QcP~`9c@*OQhjisq61yyJPy=PqdKjWZ^p%p$Dqupgr`Z`TZ7*q{iBN>2X5<1 zcwn6pa7&6o4Oa&zw18ebFyXt-Ma&V3`Pceme~)dPS&MPSj8BZ}8QBv@YE+1{Lu~t- zd_}6G3jX`Vc8pt%G%NS@Gkw(UgZ=;0vZLOB};X~^qB9{E=A2($xh&YVl zrlvQJ3?|*V54*b?$IsF?i2Zav4Wri>;9VO6IO^rKEo3|eOyb!p#8Hld;Q zwJSF!Z{E_GJM;=?%f4Vd@m)Tzfr1wv1bHfC!6^RIlM?fmGjc}1M^e`@dy-^CoYs{c zr3){rn1z&>EI4EYZoICCvN?tgUXw-OgbpA2zElW{XN-=={_PJ-AwkqCg$a*i6~Dov zsXe0Vy`BqY1=t-a0g9y`R?NDXW@I>|W!kK*XFqQZkir(=^lL5MS>)@$<%Xqo5kOv) zm97db8tV0#;w!2~s|?!Xsp|gnzYpZt2H7OO1U>iDj17CR4tmJ<+A(eVg7Hx~_oj{T zy20~=>!5@6ALcfKf!PS^$DM7_-%y5+=U!7hp2N03f9iYHR^#WLy=laJ&`wvL*xqVCkp!Uak; z0aM$ckC_UBFqhSL>3e12%CxkG(uU}asHsxtwzb@yCsGoIi(hlN*vhx5NTi{}yE_p* zzvNjDAIi0O)!*sBK5WxS# zTz|OhuV4XK{^(w8RQNrblw(5hzk~k=g`u|Y@dDd%{^Nl11QMcTK?1-_+95d|8b^7X_A8o|9N8@a!Mcu{GUZ#U@HacE4do* y`0qrIQUr?w+aRa_uoCb9g6aUP0$C|hSQKhNvp?HVFp_}*V1N%U005=_o8n*3r4_3H From 7c904861fd36a623547b28d99ab15319476fd59b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:35:59 +0200 Subject: [PATCH 116/480] OP-3426 - test class for Maya and Deadline --- .../maya/test_deadline_publish_in_maya.py | 51 ++++++++++++++----- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index f14310a16c..5621a03833 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -4,15 +4,13 @@ from tests.integration.hosts.maya.lib import MayaDeadlinePublishTestClass class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): """Basic test case for publishing in Maya - Shouldnt be running standalone only via 'runtests' pype command! (??) - - Uses generic TestCase to prepare fixtures for test data, testing DBs, - env vars. Always pulls and uses test data from GDrive! Opens Maya, runs publish on prepared workile. + Sends file to be rendered on Deadline. + Then checks content of DB (if subset, version, representations were created. Checks tmp folder if all expected files were published. @@ -22,10 +20,11 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 """ - PERSIST = True + PERSIST = False TEST_FILES = [ - ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") + ("1dDY7CbdFXfRksGVoiuwjhnPoTRCCf5ea", + "test_maya_deadline_publish.zip", "") ] APP = "maya" @@ -37,7 +36,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ + assert 3 == dbcon.count_documents({"type": "version"}), \ "Not expected no of versions" assert 0 == dbcon.count_documents({"type": "version", @@ -48,22 +47,46 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): "name": "modelMain"}), \ "modelMain subset must be present" - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + assert 1 == dbcon.count_documents({ + "type": "subset", "name": "renderTestTaskMain_beauty"}), \ + "renderTestTaskMain_beauty subset must be present" - assert 11 == dbcon.count_documents({"type": "representation"}), \ + assert 1 == dbcon.count_documents({"type": "subset", + "name": "workfileTesttask"}), \ + "workfileTesttask subset must be present" + + assert 6 == dbcon.count_documents({"type": "representation"}), \ "Not expected no of representations" - assert 2 == dbcon.count_documents({"type": "representation", + assert 1 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "abc"}), \ "Not expected no of representations with ext 'abc'" - assert 2 == dbcon.count_documents({"type": "representation", + assert 1 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + "Not expected no of representations with ext 'ma'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "workfileTesttask", # noqa + "context.ext": "mb"}), \ + "Not expected no of representations with ext 'mb'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "exr"}), \ + "Not expected no of representations with ext 'exr'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "jpg"}), \ + "Not expected no of representations with ext 'jpg'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "h264_exr"}), \ + "Not expected no of representations with ext 'h264_exr'" if __name__ == "__main__": From 2436fabd7f695dc55461f349258ea662eccf3bbd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:54:00 +0200 Subject: [PATCH 117/480] OP-3426 - update db asserts for simple Maya test --- tests/integration/hosts/maya/test_publish_in_maya.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 39b3f12263..a5a3e35b26 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -37,7 +37,7 @@ class TestPublishInMaya(MayaLocalPublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ + assert 2 == dbcon.count_documents({"type": "version"}), \ "Not expected no of versions" assert 0 == dbcon.count_documents({"type": "version", @@ -52,7 +52,7 @@ class TestPublishInMaya(MayaLocalPublishTestClass): "name": "workfileTest_task"}), \ "workfileTest_task subset must be present" - assert 11 == dbcon.count_documents({"type": "representation"}), \ + assert 5 == dbcon.count_documents({"type": "representation"}), \ "Not expected no of representations" assert 2 == dbcon.count_documents({"type": "representation", @@ -63,7 +63,12 @@ class TestPublishInMaya(MayaLocalPublishTestClass): assert 2 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + "Not expected no of representations with ext 'ma'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "workfileTest_task", # noqa + "context.ext": "mb"}), \ + "Not expected no of representations with ext 'mb'" if __name__ == "__main__": From eb772dd05af4bc29a947ec2e7249311aa871f285 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:58:03 +0200 Subject: [PATCH 118/480] OP-3426 - update db asserts for PS Removed integrate of thumbnails by default, eg less representations. --- tests/integration/hosts/photoshop/test_publish_in_photoshop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e..d4ab3e7734 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} From c27d8a26786fe393f6fe83f518497469c4e39ecf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 13:05:51 +0200 Subject: [PATCH 119/480] OP-3426 - refactor - changed style of db asserts for Maya --- .../maya/test_deadline_publish_in_maya.py | 85 ++++++++++--------- .../hosts/maya/test_publish_in_maya.py | 52 ++++++------ 2 files changed, 72 insertions(+), 65 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 5621a03833..7364945581 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -1,3 +1,4 @@ +from tests.lib.assert_classes import DBAssert from tests.integration.hosts.maya.lib import MayaDeadlinePublishTestClass @@ -36,57 +37,61 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 3 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({ - "type": "subset", "name": "renderTestTaskMain_beauty"}), \ - "renderTestTaskMain_beauty subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTestTaskMain_beauty")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTesttask"}), \ - "workfileTesttask subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTesttask")) - assert 6 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 6)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'ma'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "workfileTesttask", # noqa - "context.ext": "mb"}), \ - "Not expected no of representations with ext 'mb'" + additional_args = {"context.subset": "modelMain", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "exr"}), \ - "Not expected no of representations with ext 'exr'" + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "jpg"}), \ - "Not expected no of representations with ext 'jpg'" + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "h264_exr"}), \ - "Not expected no of representations with ext 'h264_exr'" + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "h264_exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) if __name__ == "__main__": diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index a5a3e35b26..bff7ccb2f7 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,3 +1,4 @@ +from tests.lib.assert_classes import DBAssert from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass @@ -37,38 +38,39 @@ class TestPublishInMaya(MayaLocalPublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 2 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 5 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'ma'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "workfileTest_task", # noqa - "context.ext": "mb"}), \ - "Not expected no of representations with ext 'mb'" + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) if __name__ == "__main__": From 0056be47b1c367a2fa9b99a94b2b1443a92e27a5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 13:23:22 +0200 Subject: [PATCH 120/480] OP-3426 - fix - wrong testing class used --- .../aftereffects/test_publish_in_aftereffects_multiframe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index c882e0f9b2..dd61e72c6f 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Should publish 5 frames From 6528efe9c68916b21eddcfcaefd4f48aa3fe7c56 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 18 Oct 2022 13:31:23 +0200 Subject: [PATCH 121/480] :bug: init globals --- tools/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/build.sh b/tools/build.sh index 8e65ba9a99..e32ac3f428 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -152,7 +152,7 @@ main () { openpype_root=$(dirname $(dirname "$(realpath ${BASH_SOURCE[0]})")) pushd "$openpype_root" > /dev/null || return > /dev/null - version_command="import os;import re;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(re.search(r'(\d+\.\d+.\d+).*', version['__version__'])[1]);" + version_command="import os;import re;version={};exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read(), version);print(re.search(r'(\d+\.\d+.\d+).*', version['__version__'])[1]);" openpype_version="$(python <<< ${version_command})" _inside_openpype_tool="1" From 01c99375eabe6a2b969bf7913b9726bd6b45f6b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 18 Oct 2022 16:37:18 +0200 Subject: [PATCH 122/480] :sparkles: better error handling on linux/mac --- tools/build.sh | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/tools/build.sh b/tools/build.sh index e32ac3f428..15fe2674af 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -181,7 +181,7 @@ if [ "$disable_submodule_update" == 1 ]; then echo -e "${BIYellow}***${RST} Not updating submodules ..." else echo -e "${BIGreen}>>>${RST} Making sure submodules are up-to-date ..." - git submodule update --init --recursive + git submodule update --init --recursive || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } fi echo -e "${BIGreen}>>>${RST} Building ..." if [[ "$OSTYPE" == "linux-gnu"* ]]; then @@ -189,26 +189,29 @@ if [ "$disable_submodule_update" == 1 ]; then elif [[ "$OSTYPE" == "darwin"* ]]; then "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac &> "$openpype_root/build/build.log" || { echo -e "${BIRed}------------------------------------------${RST}"; cat "$openpype_root/build/build.log"; echo -e "${BIRed}------------------------------------------${RST}"; echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; } fi - "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" + "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" || { echo -e "${BIRed}!!!>${RST} ${BIYellow}Failed to process dependencies${RST}"; return 1; } if [[ "$OSTYPE" == "darwin"* ]]; then # fix cx_Freeze libs issue echo -e "${BIGreen}>>>${RST} Fixing libs ..." - mv "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/dependencies/cx_Freeze" "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/" + mv "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/dependencies/cx_Freeze" "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/" || { echo -e "${BIRed}!!!>${RST} ${BIYellow}Can't move cx_Freeze libs${RST}"; return 1; } + # fix code signing issue - echo -e "${BIGreen}>>>${RST} Fixing code signatures ..." - codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_console" - codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_gui" - # codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" + echo -e "${BIGreen}>>>${RST} Fixing code signatures ...\c" + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_console" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_gui" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } + echo -e "${BIGreen}DONE${RST}" if command -v create-dmg > /dev/null 2>&1; then + echo -e "${BIGreen}>>>${RST} Creating dmg image ...\c" create-dmg \ --volname "OpenPype $openpype_version Installer" \ --window-pos 200 120 \ --window-size 600 300 \ --app-drop-link 100 50 \ "$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \ - "$openpype_root/build/OpenPype $openpype_version.app" + "$openpype_root/build/OpenPype $openpype_version.app" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } + echo -e "${BIGreen}DONE${RST}" else echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." fi From 74fae962e4fd4ccaffb3343bafc404c4ccdeae17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 18 Oct 2022 16:59:54 +0200 Subject: [PATCH 123/480] :bug: fix syntax --- tools/build.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tools/build.sh b/tools/build.sh index 15fe2674af..753a9c55b8 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -199,8 +199,8 @@ if [ "$disable_submodule_update" == 1 ]; then # fix code signing issue echo -e "${BIGreen}>>>${RST} Fixing code signatures ...\c" - codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_console" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } - codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_gui" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_console" || { echo -e "${BIRed}FAILED${RST}"; return 1; } + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/openpype_gui" || { echo -e "${BIRed}FAILED${RST}"; return 1; } echo -e "${BIGreen}DONE${RST}" if command -v create-dmg > /dev/null 2>&1; then echo -e "${BIGreen}>>>${RST} Creating dmg image ...\c" @@ -210,8 +210,10 @@ if [ "$disable_submodule_update" == 1 ]; then --window-size 600 300 \ --app-drop-link 100 50 \ "$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \ - "$openpype_root/build/OpenPype $openpype_version.app" || { echo -e "${BIRed}FAILED{$RST}"; return 1 } - echo -e "${BIGreen}DONE${RST}" + "$openpype_root/build/OpenPype $openpype_version.app" + + test $? -eq 0 || { echo -e "${BIRed}FAILED${RST}"; return 1; } + echo -e "${BIGreen}DONE${RST}" else echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." fi From b98e03726df930ed2b4043a547cb49e222c16300 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 12:27:24 +0200 Subject: [PATCH 124/480] OP-4180 - renamed DL testing class for Nuke Updated test scene name --- tests/integration/hosts/nuke/lib.py | 2 +- ...ublish_in_nuke.py => test_deadline_publish_in_nuke.py} | 8 ++++++++ tests/integration/hosts/nuke/test_publish_in_nuke.py | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) rename tests/integration/hosts/nuke/{test_deadine_publish_in_nuke.py => test_deadline_publish_in_nuke.py} (90%) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index e3c610199b..ed2af38d1c 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -15,7 +15,7 @@ class NukeHostFixtures(HostFixtures): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_TestTask_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py similarity index 90% rename from tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py rename to tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index ddeae4bfa0..98b463b9ae 100644 --- a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -12,6 +12,14 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 88defdc37a..a3947a30ae 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -12,6 +12,14 @@ class TestPublishInNuke(NukeLocalPublishTestClass): Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were From 29ca3f85566fb321a4bbc531d2a64fda4b2dc989 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 12:29:20 +0200 Subject: [PATCH 125/480] OP-4180 - change output folder It should be in `output` subfolder, not in root --- tests/integration/hosts/aftereffects/lib.py | 1 + tests/integration/hosts/maya/lib.py | 1 + tests/lib/testing_classes.py | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index ca637edba6..0f7513c7d3 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -21,6 +21,7 @@ class AEHostFixtures(HostFixtures): "workfile", "test_project_test_asset_TestTask_v001.aep") dest_folder = os.path.join(download_test_data, + "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 6610fac118..a0c5214181 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -21,6 +21,7 @@ class MayaHostFixtures(HostFixtures): "workfile", "test_project_test_asset_TestTask_v001.mb") dest_folder = os.path.join(output_folder_url, + "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 53eeae10cb..f0899e3e18 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -317,7 +317,7 @@ class PublishTest(ModuleUnitTest): Compares only presence, not size nor content! """ - published_dir_base = output_folder_url + published_dir_base = os.path.join(output_folder_url, "output") expected_dir_base = os.path.join(download_test_data, "expected") @@ -351,6 +351,7 @@ class DeadlinePublishTest(PublishTest): raise ValueError("Timeout reached") metadata_json = glob.glob(os.path.join(download_test_data, + "output", "**/*_metadata.json"), recursive=True) if not metadata_json: From 76de4e4bbba887b3f8fa96a950d51a26cadb4d45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 13:39:57 +0200 Subject: [PATCH 126/480] OP-3426 - added support for automatic tests into GlobalJobPreLoad Jobs sent to DL must propagate flag IS_TEST to note that job is an automatic tests, it should use different DBs from regular jobs. --- igniter/tools.py | 3 ++- .../publish/submit_aftereffects_deadline.py | 3 ++- .../plugins/publish/submit_harmony_deadline.py | 3 ++- .../plugins/publish/submit_maya_deadline.py | 3 ++- .../plugins/publish/submit_publish_job.py | 5 ++++- .../custom/plugins/GlobalJobPreLoad.py | 3 +++ start.py | 16 ++++++++++++++-- .../hosts/maya/test_deadline_publish_in_maya.py | 2 +- 8 files changed, 30 insertions(+), 8 deletions(-) diff --git a/igniter/tools.py b/igniter/tools.py index a9d592acf0..bd812edb92 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -153,7 +153,8 @@ def get_openpype_global_settings(url: str) -> dict: # Create mongo connection client = MongoClient(url, **kwargs) # Access settings collection - col = client["openpype"]["settings"] + openpype_db = os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype" + col = client[openpype_db]["settings"] # Query global settings global_settings = col.find_one({"type": "global_settings"}) or {} # Close Mongo connection diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 0c1ffa6bd7..bdf492bdcb 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -83,7 +83,8 @@ class AfterEffectsSubmitDeadline( "AVALON_APP_NAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 6327143623..b90c41d67b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -275,7 +275,8 @@ class HarmonySubmitDeadline( "AVALON_APP_NAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index c669f9a814..315c9461c2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -158,7 +158,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 0e418ccf77..e67b1cd00e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -142,7 +142,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB", "OPENPYPE_PUBLISH_JOB", "OPENPYPE_MONGO", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + + "IS_TEST" ] # custom deadline attributes @@ -247,6 +249,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" environment["OPENPYPE_RENDER_JOB"] = "0" + environment["IS_TEST"] = os.environ.get("IS_TEST") # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): mongo_url = os.environ.get("OPENPYPE_MONGO") diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 61b95cf06d..745a91ba81 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -176,6 +176,9 @@ def inject_openpype_environment(deadlinePlugin): add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') add_args["envgroup"] = "farm" + if job.GetJobEnvironmentKeyValue('IS_TEST'): + add_args["automatic_tests"] = "true" + if all(add_args.values()): for key, value in add_args.items(): args.append("--{}".format(key)) diff --git a/start.py b/start.py index d1198a85e4..1d14a7def8 100644 --- a/start.py +++ b/start.py @@ -486,6 +486,7 @@ def _process_arguments() -> tuple: use_version = None use_staging = False commands = [] + automatic_tests = False # OpenPype version specification through arguments use_version_arg = "--use-version" @@ -570,7 +571,11 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging, commands + if "--automatic_tests" in sys.argv: + sys.argv.remove("--automatic_tests") + automatic_tests = True + + return use_version, use_staging, commands, automatic_tests def _determine_mongodb() -> str: @@ -997,7 +1002,7 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, commands = _process_arguments() + use_version, use_staging, commands, automatic_tests = _process_arguments() if os.getenv("OPENPYPE_VERSION"): if use_version: @@ -1024,6 +1029,13 @@ def boot(): os.environ["OPENPYPE_DATABASE_NAME"] = \ os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype" + if automatic_tests: + # change source DBs to predefined ones set for automatic testing + os.environ["IS_TEST"] = "1" + os.environ["OPENPYPE_DATABASE_NAME"] += "_tests" + avalon_db = os.environ.get("AVALON_DB") or "avalon" + os.environ["AVALON_DB"] = avalon_db + "_tests" + global_settings = get_openpype_global_settings(openpype_mongo) _print(">>> run disk mapping command ...") diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 7364945581..16ff48d7fd 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -21,7 +21,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 """ - PERSIST = False + PERSIST = True TEST_FILES = [ ("1dDY7CbdFXfRksGVoiuwjhnPoTRCCf5ea", From 5a885ac2a7cb777e6e6f5836c31c1bdfaa04b3bb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 13:50:41 +0200 Subject: [PATCH 127/480] OP-3426 - changed source DBs It might be better to enhance testing DBs with _tests suffix and control it with single argument (or env var). --- tests/lib/testing_classes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index f0899e3e18..946ad67197 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -38,9 +38,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] From fde0a02621fb98e89183ef6275f70dd3f57b1b96 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 14:44:36 +0200 Subject: [PATCH 128/480] OP-3426 - add automatic_tests argument to cli for extractenvironments --- openpype/cli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index 398d1a94c0..5f0fd1de7b 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -127,7 +127,11 @@ def webpublisherwebserver(executable, upload_dir, host=None, port=None): @click.option( "--envgroup", help="Environment group (e.g. \"farm\")", default=None ) -def extractenvironments(output_json_path, project, asset, task, app, envgroup): +@click.option( + "--automatic_tests", help="Is this automatic test", default=None +) +def extractenvironments(output_json_path, project, asset, task, app, envgroup, + automatic_tests): """Extract environment variables for entered context to a json file. Entered output filepath will be created if does not exists. From 8f3b6e5c0e98f822f7c7b53e3361ae245ff89f2b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:15:03 +0200 Subject: [PATCH 129/480] OP-3426 - don't remove from sys.args Cli should have automatic_tests argument even if it is not pushed through, just for better documentation of command with --help --- start.py | 1 - 1 file changed, 1 deletion(-) diff --git a/start.py b/start.py index 1d14a7def8..5242a50f14 100644 --- a/start.py +++ b/start.py @@ -572,7 +572,6 @@ def _process_arguments() -> tuple: sys.argv.insert(idx, "tray") if "--automatic_tests" in sys.argv: - sys.argv.remove("--automatic_tests") automatic_tests = True return use_version, use_staging, commands, automatic_tests From f501c63369dfa584dbd25aca09845bd71d312dc2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:42:34 +0200 Subject: [PATCH 130/480] OP-3426 - propagate automatic_tests for publish process on DL --- openpype/cli.py | 5 ++++- openpype/hosts/nuke/api/plugin.py | 2 ++ .../deadline/plugins/publish/submit_publish_job.py | 3 +++ tests/lib/db_handler.py | 8 ++++---- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 5f0fd1de7b..add9d9d36e 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -152,7 +152,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -def publish(paths, targets, gui): +@click.option( + "--automatic_tests", help="Is this automatic test", default=None +) +def publish(paths, targets, gui, automatic_tests): """Start CLI publishing. Publish collects json from paths provided as an argument. diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 91bb90ff99..a7b0efabee 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -409,6 +409,8 @@ class ExporterReviewMov(ExporterReview): # create nk path path = os.path.splitext(self.path)[0] + ".nk" # save file to the path + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) shutil.copyfile(self.instance.context.data["currentFile"], path) self.log.info("Nodes exported...") diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index e67b1cd00e..ab3cd09179 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -266,6 +266,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "--targets", "farm" ] + if os.environ.get("IS_TEST"): + args["automatic_tests"] = "1" + # Generate the payload for Deadline submission payload = { "JobInfo": { diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index 108ebf560d..ae460e310b 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,12 +228,12 @@ class DBHandler: return query # Examples -# handler = DBHandler(uri="mongodb://localhost:27017") +handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") +#handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") +handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") # handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") From b996acaf9c82d23bfdc5d31fd499af7254a9485d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:44:50 +0200 Subject: [PATCH 131/480] OP-3426 - disable unwanted commands Enabling only for test preparation. Updated test_db to currently used avalon_tests --- tests/lib/db_handler.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index ae460e310b..b80a296b60 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,15 +228,15 @@ class DBHandler: return query # Examples -handler = DBHandler(uri="mongodb://localhost:27017") +# handler = DBHandler(uri="mongodb://localhost:27017") # # -backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" # # # -#handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon", collection="test_project") +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") From e9dfcf64bb8dc646fea107c18f5eb3bcef634a63 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:07:57 +0200 Subject: [PATCH 132/480] OP-3426 - added date string to batch name It would be better to have separate batches for automatic tests than single batch with a lot of runs. --- .../plugins/publish/submit_aftereffects_deadline.py | 7 +++++-- .../plugins/publish/submit_harmony_deadline.py | 6 +++++- .../plugins/publish/submit_houdini_remote_publish.py | 3 +++ .../plugins/publish/submit_houdini_render_deadline.py | 4 ++++ .../deadline/plugins/publish/submit_maya_deadline.py | 3 +++ .../publish/submit_maya_remote_publish_deadline.py | 3 +++ .../deadline/plugins/publish/submit_nuke_deadline.py | 10 +++++++--- 7 files changed, 30 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index bdf492bdcb..c6b94c2ce8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -2,6 +2,7 @@ import os import attr import getpass import pyblish.api +from datetime import datetime from openpype.lib import ( env_value_to_bool, @@ -48,9 +49,11 @@ class AfterEffectsSubmitDeadline( context = self._instance.context + batch_name = os.path.basename(self._instance.data["source"]) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = os.path.basename(self._instance. - data["source"]) + dln_job_info.BatchName = batch_name dln_job_info.Plugin = "AfterEffects" dln_job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index b90c41d67b..f98649cb1a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -5,6 +5,7 @@ from pathlib import Path from collections import OrderedDict from zipfile import ZipFile, is_zipfile import re +from datetime import datetime import attr import pyblish.api @@ -261,7 +262,10 @@ class HarmonySubmitDeadline( job_info.Pool = self._instance.data.get("primaryPool") job_info.SecondaryPool = self._instance.data.get("secondaryPool") job_info.ChunkSize = self.chunk_size - job_info.BatchName = os.path.basename(self._instance.data["source"]) + batch_name = os.path.basename(self._instance.data["source"]) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + job_info.BatchName = batch_name job_info.Department = self.department job_info.Group = self.group diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index 95856137e2..dbd8645817 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -1,5 +1,6 @@ import os import json +from datetime import datetime import requests import hou @@ -60,6 +61,8 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=code, scene=scenename) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") deadline_user = "roy" # todo: get deadline user dynamically # Get only major.minor version of Houdini, ignore patch version diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index beda753723..59472fdd54 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -1,6 +1,7 @@ import os import json import getpass +from datetime import datetime import requests import pyblish.api @@ -45,6 +46,9 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): if code: batch_name = "{0} - {1}".format(code, batch_name) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + # Output driver to render driver = instance[0] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 315c9461c2..ca3c765c65 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -118,6 +118,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): src_filepath = context.data["currentFile"] src_filename = os.path.basename(src_filepath) + if os.environ.get("IS_TEST"): + src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + job_info.Name = "%s - %s" % (src_filename, instance.name) job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 38ae5d2f7f..1023966af8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -1,5 +1,6 @@ import os import requests +from datetime import datetime from maya import cmds @@ -57,6 +58,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") # Generate the payload for Deadline submission payload = { diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index b09d2935ab..44abf12ef4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -2,6 +2,7 @@ import os import re import json import getpass +from datetime import datetime import requests import pyblish.api @@ -141,8 +142,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): responce_data=None ): render_dir = os.path.normpath(os.path.dirname(render_path)) - script_name = os.path.basename(script_path) - jobname = "%s - %s" % (script_name, instance.name) + batch_name = os.path.basename(script_path) + jobname = "%s - %s" % (batch_name, instance.name) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + output_filename_0 = self.preview_fname(render_path) @@ -176,7 +180,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): payload = { "JobInfo": { # Top-level group name - "BatchName": script_name, + "BatchName": batch_name, # Asset dependency to wait for at least the scene file to sync. # "AssetDependency0": script_path, From 63c053f6b8fe2a59e02ce916945e52e7427f9606 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 19 Oct 2022 16:28:32 +0200 Subject: [PATCH 133/480] :green_heart: update python version --- .github/workflows/test_build.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index ac7279117a..0e6c242bd6 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -18,7 +18,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [3.7] + python-version: [3.9] steps: - name: 🚛 Checkout Code @@ -45,7 +45,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: [3.9] steps: - name: 🚛 Checkout Code @@ -70,7 +70,7 @@ jobs: # runs-on: macos-latest # strategy: # matrix: - # python-version: [3.7] + # python-version: [3.9] # steps: # - name: 🚛 Checkout Code @@ -87,4 +87,4 @@ jobs: # - name: 🔨 Build # run: | - # ./tools/build.sh \ No newline at end of file + # ./tools/build.sh From d6bbc95786092a81ca438a3489cf758bc82d7107 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:21:34 +0200 Subject: [PATCH 134/480] OP-3426 - fix - wrong format of args --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index ab3cd09179..32ad22f6b0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -267,7 +267,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): ] if os.environ.get("IS_TEST"): - args["automatic_tests"] = "1" + args.extend(["--automatic_tests", "1"]) # Generate the payload for Deadline submission payload = { From ecbd54fa834ddf6d4be844471edfae64d71b2cd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 19 Oct 2022 16:30:14 +0200 Subject: [PATCH 135/480] :green_heart: update python --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 85864b4442..ad4139eb6f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.9 - name: Install Python requirements run: pip install gitpython semver PyGithub @@ -121,4 +121,4 @@ jobs: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' From 24ef0efd4b368d99ee8b700090e8d8c6e2b2eafa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 19 Oct 2022 16:31:00 +0200 Subject: [PATCH 136/480] :green_heart: update python --- .github/workflows/prerelease.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index bf39f8f956..3c5101b11f 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.9 - name: Install Python requirements run: pip install gitpython semver PyGithub @@ -92,4 +92,4 @@ jobs: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' From f47403403b8a3301a5a28974c5c8b20b7b2b6f97 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:37:54 +0200 Subject: [PATCH 137/480] OP-3426 - update CleanUp to skip for automatic tests Tests work with temp folders, so CleanUp might delete some required files (as metadata.json). Let automatic process cleanup itself. --- openpype/plugins/publish/cleanup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/cleanup.py b/openpype/plugins/publish/cleanup.py index f29e6ccd4e..34480dd199 100644 --- a/openpype/plugins/publish/cleanup.py +++ b/openpype/plugins/publish/cleanup.py @@ -44,6 +44,9 @@ class CleanUp(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + if os.environ.get("IS_TEST"): + # let automatic test process clean up temporary data + return # Get the errored instances failed = [] for result in instance.context.data["results"]: From 462e141ab00ec65454af1d932130b6c70e7c31aa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 17:41:36 +0200 Subject: [PATCH 138/480] removing old code --- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index 1d853f72b5..ee8a824d8f 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -62,16 +62,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): for instance in context: # Filter publisable instances. - if ( - instance.data.get("publish") - and instance.data["publish"] is False - ): - continue - - if ( - instance.data.get("active") - and instance.data["active"] is False - ): + if not instance.data.get("active"): continue # Filter families. From a7aff0e50cf46471497968646425d0fac28830ff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 17:46:01 +0200 Subject: [PATCH 139/480] nuke: removing old logic code --- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index ee8a824d8f..db21cdc7c5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -61,17 +61,11 @@ class ValidateKnobs(pyblish.api.ContextPlugin): invalid_knobs = [] for instance in context: - # Filter publisable instances. - if not instance.data.get("active"): - continue # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - if not families: - continue - # Get all knobs to validate. knobs = {} for family in families: From 71c2458d6a1c582aeee991842390bc12ffa21ebc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 10:36:38 +0200 Subject: [PATCH 140/480] nuke: fix tab --- openpype/hosts/nuke/api/lib.py | 38 +++++++++++++++++----------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 54fce77c79..fd9c0de9c0 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2726,29 +2726,29 @@ def recreate_instance(origin_node, avalon_data=None): def add_scripts_menu(): - try: - from scriptsmenu import launchfornuke - except ImportError: - log.warning( - "Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable." - ) - return + try: + from scriptsmenu import launchfornuke + except ImportError: + log.warning( + "Skipping studio.menu install, because " + "'scriptsmenu' module seems unavailable." + ) + return - # load configuration of custom menu - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) - config = project_settings["nuke"]["scriptsmenu"]["definition"] - _menu = project_settings["nuke"]["scriptsmenu"]["name"] + # load configuration of custom menu + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + config = project_settings["nuke"]["scriptsmenu"]["definition"] + _menu = project_settings["nuke"]["scriptsmenu"]["name"] - if not config: - log.warning("Skipping studio menu, no definition found.") - return + if not config: + log.warning("Skipping studio menu, no definition found.") + return - # run the launcher for Maya menu - studio_menu = launchfornuke.main(title=_menu.title()) + # run the launcher for Maya menu + studio_menu = launchfornuke.main(title=_menu.title()) - # apply configuration - studio_menu.build_from_configuration(studio_menu, config) + # apply configuration + studio_menu.build_from_configuration(studio_menu, config) def add_scripts_gizmo(): From bb3105cefb1bc10e6e0841c59303e3f703e47460 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 16:25:47 +0200 Subject: [PATCH 141/480] Nuke: make fallback in case node is erased manually --- openpype/hosts/nuke/api/plugin.py | 44 ++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 7e3c131104..88da6c5eb2 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -205,6 +205,13 @@ class NukeCreator(NewCreator): for created_inst, _changes in update_list: instance_node = created_inst.transient_data["node"] + # in case node is not existing anymore (user erased it manually) + try: + instance_node.fullName() + except ValueError: + self.remove_instances([created_inst]) + continue + set_node_data( instance_node, INSTANCE_DATA_KNOB, @@ -1114,6 +1121,8 @@ def convert_to_valid_instaces(): nuke.Root()["project_directory"].setValue(os.path.dirname(path)) nuke.Root().setModified(False) + _remove_old_knobs(nuke.Root()) + # loop all nodes and convert for node in nuke.allNodes(recurseGroups=True): transfer_data = { @@ -1188,24 +1197,29 @@ def convert_to_valid_instaces(): creator_attr["farm_concurency"] = ( node["deadlineConcurrentTasks"].value()) - remove_knobs = [ - "review", "publish", "render", "suspend_publish", "warn", "divd", - "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", - "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" - ] - print(node.name()) + _remove_old_knobs(node) - # remove all old knobs - for knob in node.allKnobs(): - try: - if knob.name() in remove_knobs: - node.removeKnob(knob) - elif "avalon" in knob.name(): - node.removeKnob(knob) - except ValueError: - pass # add new instance knob with transfer data set_node_data( node, INSTANCE_DATA_KNOB, transfer_data) nuke.scriptSave() + + +def _remove_old_knobs(node): + remove_knobs = [ + "review", "publish", "render", "suspend_publish", "warn", "divd", + "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", + "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" + ] + print(node.name()) + + # remove all old knobs + for knob in node.allKnobs(): + try: + if knob.name() in remove_knobs: + node.removeKnob(knob) + elif "avalon" in knob.name(): + node.removeKnob(knob) + except ValueError: + pass From d8bda695ce183950c320fcf4f04ae2fc983a8298 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 16:30:27 +0200 Subject: [PATCH 142/480] Nuke: Creator depricated --- openpype/hosts/nuke/api/pipeline.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 258b09f7a8..700db9a8ed 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -228,6 +228,11 @@ def _install_menu(): ) menu.addSeparator() + menu.addCommand( + "Create... [depricated]", + lambda: nuke.message( + "New position is in \"Publish...\" and \"Create\" tab") + ) menu.addCommand( "Publish...", host_tools.show_publisher From 88a1e11faee664d7740a28f417e3d590fa49104e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 16:52:32 +0200 Subject: [PATCH 143/480] nuke: shortcut could be set in project overrides --- openpype/hosts/nuke/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 700db9a8ed..7376166029 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -365,7 +365,7 @@ def add_shortcuts_from_presets(): item_label = menu_label_mapping[command_name] menuitem = menu.findItem(item_label) menuitem.setShortcut(shortcut_str) - except AttributeError as e: + except (AttributeError, KeyError) as e: log.error(e) From dc9a355965afb1b0149df8b1cd6c05241150187b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 17:49:15 +0200 Subject: [PATCH 144/480] Nuke: adding fallbacks if settings have overrides --- openpype/hosts/nuke/api/plugin.py | 13 ++++++++----- .../hosts/nuke/plugins/create/create_write_image.py | 11 +++++++++++ .../nuke/plugins/create/create_write_prerender.py | 13 +++++++++++++ .../nuke/plugins/create/create_write_render.py | 10 ++++++++++ 4 files changed, 42 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 88da6c5eb2..b8fc54e443 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -358,12 +358,15 @@ class NukeWriteCreator(NukeCreator): plugin_settings = self.get_creator_settings(project_settings) # individual attributes - self.instance_attributes = plugin_settings[ - "instance_attributes"] + self.instance_attributes = plugin_settings.get( + "instance_attributes") or self.instance_attributes self.prenodes = plugin_settings["prenodes"] - self.default_variants = plugin_settings["default_variants"] - self.temp_rendering_path_template = plugin_settings[ - "temp_rendering_path_template"] + self.default_variants = plugin_settings.get( + "default_variants") or self.default_variants + self.temp_rendering_path_template = ( + plugin_settings.get("temp_rendering_path_template") + or self.temp_rendering_path_template + ) class OpenPypeCreator(LegacyCreator): diff --git a/openpype/hosts/nuke/plugins/create/create_write_image.py b/openpype/hosts/nuke/plugins/create/create_write_image.py index e9ff33a32b..cf70063abc 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_image.py +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -20,6 +20,17 @@ class CreateWriteImage(napi.NukeWriteCreator): family = "image" icon = "sign-out" + instance_attributes = [ + "use_range_limit" + ] + default_variants = [ + "StillFrame", + "MPFrame", + "LayoutFrame" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + def get_pre_create_attr_defs(self): attr_defs = [ BoolDef( diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index b7d0f4ade9..daa2319c4b 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -20,6 +20,19 @@ class CreateWritePrerender(napi.NukeWriteCreator): family = "prerender" icon = "sign-out" + instance_attributes = [ + "use_range_limit" + ] + default_variants = [ + "Key01", + "Bg01", + "Fg01", + "Branch01", + "Part01" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + def get_pre_create_attr_defs(self): attr_defs = [ BoolDef( diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 3d96e8c13b..85133458d1 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -20,6 +20,16 @@ class CreateWriteRender(napi.NukeWriteCreator): family = "render" icon = "sign-out" + instance_attributes = [ + "reviewable" + ] + default_variants = [ + "Main", + "Mask" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + def get_pre_create_attr_defs(self): attr_defs = [ BoolDef( From 7e6c355fea5aa00b74dfc50b166504998c20f443 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 20 Oct 2022 18:10:48 +0200 Subject: [PATCH 145/480] OP-3426 - purge temp only if not error --- tests/lib/testing_classes.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 946ad67197..8b5050a6bb 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -50,6 +50,8 @@ class ModuleUnitTest(BaseTest): TEST_DATA_FOLDER = None + failed = False + @pytest.fixture(scope='session') def monkeypatch_session(self): """Monkeypatch couldn't be used with module or session fixtures.""" @@ -80,7 +82,7 @@ class ModuleUnitTest(BaseTest): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = persist or self.PERSIST or self.failed if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -143,7 +145,8 @@ class ModuleUnitTest(BaseTest): yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.failed + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -304,6 +307,7 @@ class PublishTest(ModuleUnitTest): while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -317,7 +321,7 @@ class PublishTest(ModuleUnitTest): Compares only presence, not size nor content! """ - published_dir_base = os.path.join(output_folder_url, "output") + published_dir_base = output_folder_url expected_dir_base = os.path.join(download_test_data, "expected") @@ -347,6 +351,7 @@ class DeadlinePublishTest(PublishTest): while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -355,9 +360,11 @@ class DeadlinePublishTest(PublishTest): "**/*_metadata.json"), recursive=True) if not metadata_json: + self.failed = True raise RuntimeError("No metadata file found. No job id.") if len(metadata_json) > 1: + self.failed = True raise RuntimeError("Too many metadata files found.") with open(metadata_json[0]) as fp: @@ -370,6 +377,7 @@ class DeadlinePublishTest(PublishTest): deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: + self.failed = True raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) @@ -379,14 +387,17 @@ class DeadlinePublishTest(PublishTest): while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True raise ValueError("Timeout for DL finish reached") response = requests.get(url, timeout=10) if not response.ok: + self.failed = True msg = "Couldn't connect to {}".format(deadline_url) raise RuntimeError(msg) if not response.json(): + self.failed = True raise ValueError("Couldn't find {}".format(deadline_job_id)) # '0001-...' returned until job is finished From 5d9aaecea0e48938c3f3735d5b46c76ec932e8f3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 15:58:25 +0200 Subject: [PATCH 146/480] OP-3426 - fix output folder output_folder_url should be used, contains already 'output' subfolder --- tests/integration/hosts/aftereffects/lib.py | 3 +-- tests/integration/hosts/maya/lib.py | 1 - tests/integration/hosts/nuke/lib.py | 15 ++++++++++++--- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 0f7513c7d3..541103cb2e 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -20,8 +20,7 @@ class AEHostFixtures(HostFixtures): "input", "workfile", "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, - "output", + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index a0c5214181..6610fac118 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -21,7 +21,6 @@ class MayaHostFixtures(HostFixtures): "workfile", "test_project_test_asset_TestTask_v001.mb") dest_folder = os.path.join(output_folder_url, - "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index ed2af38d1c..564a5d3b88 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,6 +1,6 @@ import os import pytest -import shutil +import re from tests.lib.testing_classes import ( HostFixtures, @@ -15,7 +15,7 @@ class NukeHostFixtures(HostFixtures): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_TestTask_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -31,7 +31,16 @@ class NukeHostFixtures(HostFixtures): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path From e38ae9d91911a86d8f2683d35cc92b6a02834631 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 16:45:14 +0200 Subject: [PATCH 147/480] OP-3426 - refactor - rename workfile --- tests/integration/hosts/maya/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 6610fac118..1411ccca5f 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -19,7 +19,7 @@ class MayaHostFixtures(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -27,7 +27,7 @@ class MayaHostFixtures(HostFixtures): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path From 127a0dcb1fc8816149f6b30e7e0e169cd9d1f92f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 16:46:11 +0200 Subject: [PATCH 148/480] OP-3426 - refactor - use class variable --- tests/lib/testing_classes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8b5050a6bb..e4f816f4d0 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -139,7 +139,7 @@ class ModuleUnitTest(BaseTest): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) From f703e53651bd0ceb845563a01601e48bf1d14f6d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 10:48:14 +0200 Subject: [PATCH 149/480] OP-3426 - fix - wrong flag used in pype_commands --- openpype/pype_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index d08a812c61..932fdc9be4 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -299,7 +299,7 @@ class PypeCommands: if pyargs: args.extend(["--pyargs", pyargs]) - if persist: + if test_data_folder: args.extend(["--test_data_folder", test_data_folder]) if persist: From d35ea96bd08e099adae7a52da5f0585e3d8a0760 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 11:52:08 +0200 Subject: [PATCH 150/480] OP-3426 - added filtering of published folders from comparing Some files or folders are dynamically created and cannot be part of comparing of published and expected folder structure. (Example is Logs in AE from DL) --- tests/integration/hosts/aftereffects/lib.py | 5 +++++ tests/integration/hosts/maya/lib.py | 4 ++++ tests/integration/hosts/nuke/lib.py | 3 +++ tests/integration/hosts/photoshop/lib.py | 4 ++++ tests/lib/testing_classes.py | 23 +++++++++++++++++---- 5 files changed, 35 insertions(+), 4 deletions(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 541103cb2e..3fb1fa18ce 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -37,6 +37,11 @@ class AEHostFixtures(HostFixtures): """Points Maya to userSetup file from input data""" pass + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs"] + class AELocalPublishTestClass(AEHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 1411ccca5f..e7480e25fa 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -44,6 +44,10 @@ class MayaHostFixtures(HostFixtures): os.pathsep, original_pythonpath)) + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index 564a5d3b88..8e97cd6fe4 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -56,6 +56,9 @@ class NukeHostFixtures(HostFixtures): os.pathsep, original_nuke_path)) + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae6..23ae79434e 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -32,3 +32,7 @@ class PhotoshopTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] \ No newline at end of file diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index e4f816f4d0..8aaeb4304b 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -9,6 +9,7 @@ import shutil import glob import platform import requests +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler @@ -316,7 +317,8 @@ class PublishTest(ModuleUnitTest): yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -334,9 +336,17 @@ class PublishTest(ModuleUnitTest): glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = set() + for pub_path in published: + for val in skip_compare_folders: + if not re.search(val, pub_path): + filtered_published.add(pub_path) + + not_matched = expected.symmetric_difference(filtered_published) + if not_matched: + self.failed = True + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_matched)))) class DeadlinePublishTest(PublishTest): @@ -419,3 +429,8 @@ class HostFixtures(): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError From e94c524eceb8fb0975645ba6cccc2d2ef2049b44 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 15:10:26 +0200 Subject: [PATCH 151/480] OP-3426 - fix - counts of expected items in DB Format of subset names was changed, default settings contain also hero version etc. --- .../maya/test_deadline_publish_in_maya.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 16ff48d7fd..1c23129b56 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -49,45 +49,47 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTestTaskMain_beauty")) + name="renderTest_taskMain_beauty")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTesttask")) + name="workfileTest_task")) - failures.append(DBAssert.count_of_types(dbcon, "representation", 6)) + failures.append(DBAssert.count_of_types(dbcon, "representation", 8)) + # hero included additional_args = {"context.subset": "modelMain", "context.ext": "abc"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) + # hero included additional_args = {"context.subset": "modelMain", "context.ext": "ma"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) additional_args = {"context.subset": "modelMain", "context.ext": "mb"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "jpg"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "h264_exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From 1f18639c4e866e7032caa23e096fbaa63ee265cb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 16:43:40 +0200 Subject: [PATCH 152/480] OP-3426 - fix - counts of expected items in DB Format of subset names was changed, default settings contain also hero version etc. --- tests/integration/hosts/nuke/test_publish_in_nuke.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index a3947a30ae..8fff54a4be 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -61,7 +61,7 @@ class TestPublishInNuke(NukeLocalPublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -70,7 +70,7 @@ class TestPublishInNuke(NukeLocalPublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From 22a86f89f73b351599b87cc6e310c8f4799a894d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 18:22:34 +0200 Subject: [PATCH 153/480] OP-3426 - updated PS testing class --- tests/integration/hosts/photoshop/lib.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 23ae79434e..9d51a11c06 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -35,4 +38,4 @@ class PhotoshopTestClass(HostFixtures): @pytest.fixture(scope="module") def skip_compare_folders(self): - yield [] \ No newline at end of file + yield [] From f73330565e9773032473d378e54425eed76ca09c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 11:28:14 +0200 Subject: [PATCH 154/480] OP-3426 - filter out Auto-Save folder --- tests/integration/hosts/aftereffects/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 3fb1fa18ce..c47121a035 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -40,7 +40,7 @@ class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def skip_compare_folders(self): # skip folder that contain "Logs", these come only from Deadline - return ["Logs"] + return ["Logs", "Auto-Save"] class AELocalPublishTestClass(AEHostFixtures, PublishTest): From 1bf4fca218b1cd7969f41a2d7dc994d9f8f08467 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 11:34:56 +0200 Subject: [PATCH 155/480] don't store settings to ordered list of staging and production (unused anyway) --- openpype/settings/handlers.py | 35 ++--------------------------------- 1 file changed, 2 insertions(+), 33 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index def8c16ea7..24544e24e2 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -577,8 +577,6 @@ class MongoSettingsHandler(SettingsHandler): key_suffix = "_versioned" _version_order_key = "versions_order" _all_versions_keys = "all_versions" - _production_versions_key = "production_versions" - _staging_versions_key = "staging_versions" def __init__(self): # Get mongo connection @@ -997,10 +995,7 @@ class MongoSettingsHandler(SettingsHandler): return self._version_order_checked = True - from openpype.lib.openpype_version import ( - get_OpenPypeVersion, - is_running_staging - ) + from openpype.lib.openpype_version import get_OpenPypeVersion OpenPypeVersion = get_OpenPypeVersion() # Skip if 'OpenPypeVersion' is not available @@ -1012,25 +1007,11 @@ class MongoSettingsHandler(SettingsHandler): if not doc: doc = {"type": self._version_order_key} - if self._production_versions_key not in doc: - doc[self._production_versions_key] = [] - - if self._staging_versions_key not in doc: - doc[self._staging_versions_key] = [] - if self._all_versions_keys not in doc: doc[self._all_versions_keys] = [] - if is_running_staging(): - versions_key = self._staging_versions_key - else: - versions_key = self._production_versions_key - # Skip if current version is already available - if ( - self._current_version in doc[self._all_versions_keys] - and self._current_version in doc[versions_key] - ): + if self._current_version in doc[self._all_versions_keys]: return if self._current_version not in doc[self._all_versions_keys]: @@ -1047,18 +1028,6 @@ class MongoSettingsHandler(SettingsHandler): str(version) for version in sorted(all_objected_versions) ] - if self._current_version not in doc[versions_key]: - objected_versions = [ - OpenPypeVersion(version=self._current_version) - ] - for version_str in doc[versions_key]: - objected_versions.append(OpenPypeVersion(version=version_str)) - - # Update versions list and push changes to Mongo - doc[versions_key] = [ - str(version) for version in sorted(objected_versions) - ] - self.collection.replace_one( {"type": self._version_order_key}, doc, From dab37abea925054efea5c90322db2868796eea70 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 11:30:59 +0200 Subject: [PATCH 156/480] OP-3426 - updated filtering logic --- tests/lib/db_handler.py | 10 ++++++---- tests/lib/testing_classes.py | 7 +++++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b80a296b60..16c4650ef6 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,12 +228,14 @@ class DBHandler: return query # Examples -# handler = DBHandler(uri="mongodb://localhost:27017") +handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" +backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon", collection="test_project") +handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8aaeb4304b..ca50376584 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -338,9 +338,12 @@ class PublishTest(ModuleUnitTest): filtered_published = set() for pub_path in published: - for val in skip_compare_folders: - if not re.search(val, pub_path): + if skip_compare_folders: + if not any([re.search(val, pub_path) + for val in skip_compare_folders]): filtered_published.add(pub_path) + else: + filtered_published.add(pub_path) not_matched = expected.symmetric_difference(filtered_published) if not_matched: From 2341f08492f09c12c0011a63e1a68df895c3ba4a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:45:40 +0200 Subject: [PATCH 157/480] store '--use-staging' to 'OPENPYPE_USE_STAGING' env variable --- start.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/start.py b/start.py index e59de93236..2fca2a763d 100644 --- a/start.py +++ b/start.py @@ -242,6 +242,9 @@ if "--debug" in sys.argv: sys.argv.remove("--debug") os.environ["OPENPYPE_DEBUG"] = "1" +if "--use-staging" in sys.argv: + sys.argv.remove("--use-staging") + os.environ["OPENPYPE_USE_STAGING"] = "1" import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 @@ -484,7 +487,6 @@ def _process_arguments() -> tuple: """ # check for `--use-version=3.0.0` argument and `--use-staging` use_version = None - use_staging = False commands = [] # OpenPype version specification through arguments @@ -542,10 +544,6 @@ def _process_arguments() -> tuple: " proper version string.")) sys.exit(1) - if "--use-staging" in sys.argv: - use_staging = True - sys.argv.remove("--use-staging") - if "--list-versions" in sys.argv: commands.append("print_versions") sys.argv.remove("--list-versions") @@ -568,7 +566,7 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging, commands + return use_version, commands def _determine_mongodb() -> str: @@ -962,7 +960,8 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, commands = _process_arguments() + use_version, commands = _process_arguments() + use_staging = os.environ.get("OPENPYPE_USE_STAGING") == "1" if os.getenv("OPENPYPE_VERSION"): if use_version: From 0e9164f7ff458606f4adb56845507819a42d7c57 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:46:03 +0200 Subject: [PATCH 158/480] settings have function to receive global settings --- openpype/settings/__init__.py | 4 ++- openpype/settings/handlers.py | 64 +++++++++++++++++++++++++++-------- openpype/settings/lib.py | 11 ++++++ 3 files changed, 63 insertions(+), 16 deletions(-) diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index ca7157812d..22d734ae58 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -18,11 +18,12 @@ from .exceptions import ( ) from .lib import ( get_general_environments, + get_global_settings, get_system_settings, get_project_settings, get_current_project_settings, get_anatomy_settings, - get_local_settings + get_local_settings, ) from .entities import ( SystemSettings, @@ -49,6 +50,7 @@ __all__ = ( "SaveWarningExc", "get_general_environments", + "get_global_settings", "get_system_settings", "get_project_settings", "get_current_project_settings", diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 24544e24e2..373029d9df 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -181,7 +181,16 @@ class SettingsStateInfo: @six.add_metaclass(ABCMeta) -class SettingsHandler: +class SettingsHandler(object): + global_keys = { + "openpype_path", + "admin_password", + "log_to_server", + "disk_mapping", + "production_version", + "staging_version" + } + @abstractmethod def save_studio_settings(self, data): """Save studio overrides of system settings. @@ -328,6 +337,19 @@ class SettingsHandler: """ pass + @abstractmethod + def get_global_settings(self): + """Studio global settings available across versions. + + Output must contain all keys from 'global_keys'. If value is not set + the output value should be 'None'. + + Returns: + Dict[str, Any]: Global settings same across versions. + """ + + pass + # Clear methods - per version # - clearing may be helpfull when a version settings were created for # testing purposes @@ -566,14 +588,6 @@ class CacheValues: class MongoSettingsHandler(SettingsHandler): """Settings handler that use mongo for storing and loading of settings.""" - global_general_keys = ( - "openpype_path", - "admin_password", - "log_to_server", - "disk_mapping", - "production_version", - "staging_version" - ) key_suffix = "_versioned" _version_order_key = "versions_order" _all_versions_keys = "all_versions" @@ -603,6 +617,7 @@ class MongoSettingsHandler(SettingsHandler): self.collection = settings_collection[database_name][collection_name] + self.global_settings_cache = CacheValues() self.system_settings_cache = CacheValues() self.project_settings_cache = collections.defaultdict(CacheValues) self.project_anatomy_cache = collections.defaultdict(CacheValues) @@ -636,6 +651,23 @@ class MongoSettingsHandler(SettingsHandler): self._prepare_project_settings_keys() return self._attribute_keys + def get_global_settings_doc(self): + if self.global_settings_cache.is_outdated: + global_settings_doc = self.collection.find_one({ + "type": GLOBAL_SETTINGS_KEY + }) or {} + self.global_settings_cache.update_data(global_settings_doc, None) + return self.global_settings_cache.data_copy() + + def get_global_settings(self): + global_settings_doc = self.get_global_settings_doc() + global_settings = global_settings_doc.get("data", {}) + return { + key: global_settings[key] + for key in self.global_keys + if key in global_settings + } + def _extract_global_settings(self, data): """Extract global settings data from system settings overrides. @@ -652,7 +684,7 @@ class MongoSettingsHandler(SettingsHandler): general_data = data["general"] # Add predefined keys to global settings if are set - for key in self.global_general_keys: + for key in self.global_keys: if key not in general_data: continue # Pop key from values @@ -696,7 +728,7 @@ class MongoSettingsHandler(SettingsHandler): # Check if data contain any key from predefined keys any_key_found = False if globals_data: - for key in self.global_general_keys: + for key in self.global_keys: if key in globals_data: any_key_found = True break @@ -723,7 +755,7 @@ class MongoSettingsHandler(SettingsHandler): system_settings_data["general"] = system_general overridden_keys = system_general.get(M_OVERRIDDEN_KEY) or [] - for key in self.global_general_keys: + for key in self.global_keys: if key not in globals_data: continue @@ -765,6 +797,10 @@ class MongoSettingsHandler(SettingsHandler): global_settings = self._extract_global_settings( system_settings_data ) + self.global_settings_cache.update_data( + global_settings, + None + ) system_settings_doc = self.collection.find_one( { @@ -1267,9 +1303,7 @@ class MongoSettingsHandler(SettingsHandler): def get_studio_system_settings_overrides(self, return_version): """Studio overrides of system settings.""" if self.system_settings_cache.is_outdated: - globals_document = self.collection.find_one({ - "type": GLOBAL_SETTINGS_KEY - }) + globals_document = self.get_global_settings_doc() document, version = self._get_system_settings_overrides_doc() last_saved_info = SettingsStateInfo.from_document( diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5eaddf6e6e..d6f31b2593 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -1043,6 +1043,17 @@ def get_current_project_settings(): return get_project_settings(project_name) +@require_handler +def get_global_settings(): + default_settings = load_openpype_default_settings() + default_values = default_settings["system_settings"]["general"] + studio_values = _SETTINGS_HANDLER.get_global_settings() + return { + key: studio_values.get(key, default_values.get(key)) + for key in _SETTINGS_HANDLER.global_keys + } + + def get_general_environments(): """Get general environments. From 5958229ac87d7a86c1379a355fa4d12a04f1e6f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:46:32 +0200 Subject: [PATCH 159/480] added function to check if staging is enabled --- openpype/lib/openpype_version.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 8c74f96da6..e287d20e2d 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -58,6 +58,10 @@ def is_running_from_build(): return True +def is_staging_enabled(): + return os.environ.get("OPENPYPE_USE_STAGING") == "1" + + def is_running_staging(): """Currently used OpenPype is staging version. From 929db9fd13c283cecb5ed21e12669206c093d2d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:46:53 +0200 Subject: [PATCH 160/480] fixed 'get_latest_version' --- igniter/bootstrap_repos.py | 2 +- openpype/lib/openpype_version.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index fd411272c4..2854a047ae 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -466,7 +466,7 @@ class OpenPypeVersion(semver.VersionInfo): installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions() if local else [] remote_versions = OpenPypeVersion.get_remote_versions() if remote else [] # noqa: E501 - all_versions = local_versions + remote_versions + installed_version + all_versions = local_versions + remote_versions + [installed_version] all_versions.sort() return all_versions[-1] diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index e287d20e2d..4c927ed191 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -143,13 +143,11 @@ def get_remote_versions(*args, **kwargs): return None -def get_latest_version(staging=None, local=None, remote=None): +def get_latest_version(local=None, remote=None): """Get latest version from repository path.""" - if staging is None: - staging = is_running_staging() + if op_version_control_available(): return get_OpenPypeVersion().get_latest_version( - staging=staging, local=local, remote=remote ) From 1cd4cbcc09079c07c50fbe802e9b9947401a4a77 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:47:03 +0200 Subject: [PATCH 161/480] reimplemented 'is_running_staging' --- openpype/lib/openpype_version.py | 59 ++++++++++++++++++++++++++------ 1 file changed, 49 insertions(+), 10 deletions(-) diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 4c927ed191..586fc56eb7 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -11,7 +11,6 @@ repository or locally available. import os import sys -import warnings import openpype.version @@ -65,19 +64,59 @@ def is_staging_enabled(): def is_running_staging(): """Currently used OpenPype is staging version. - Deprecated: - Since 3.15 + This function is not 100% proper check of staging version. It is possible + to have enabled to use staging version but be in different one. + + The function is based on 4 factors: + - env 'OPENPYPE_IS_STAGING' is set + - current production version + - current staging version + - use staging is enabled + + First checks for 'OPENPYPE_IS_STAGING' environment which can be set to '1'. + The value should be set only when a process without access to + OpenPypeVersion is launched (e.g. in DCCs). If current version is same + as production version it is expected that it is not staging, and it + doesn't matter what would 'is_staging_enabled' return. If current version + is same as staging version it is expected we're in staging. In all other + cases 'is_staging_enabled' is used as source of outpu value. + + The function is used to decide which icon is used. To check e.g. updates + the output should be combined with other functions from this file. Returns: - bool: True if openpype version containt 'staging'. + bool: Using staging version or not. """ - warnings.warn( - "Staging version logic set by version string is deprecated.", - DeprecationWarning - ) - if "staging" in get_openpype_version(): + + if os.environ.get("OPENPYPE_IS_STAGING") == "1": return True - return False + + if not op_version_control_available(): + return False + + from openpype.settings import get_global_settings + + global_settings = get_global_settings() + production_version = global_settings["production_version"] + latest_version = None + if not production_version or production_version == "latest": + latest_version = get_latest_version(local=False, remote=True) + production_version = latest_version + + current_version = get_openpype_version() + if current_version == production_version: + return False + + staging_version = global_settings["staging_version"] + if not staging_version or staging_version == "latest": + if latest_version is None: + latest_version = get_latest_version(local=False, remote=True) + staging_version = latest_version + + if current_version == production_version: + return True + + return is_staging_enabled() # ---------------------------------------- From 8f617d5d6e6410639fb147acbfd2c71bc564b1f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:47:56 +0200 Subject: [PATCH 162/480] use 'run_detached_process' from lib to start new tray --- openpype/tools/tray/pype_tray.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 3842a4e216..8817990f07 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -2,7 +2,6 @@ import collections import os import sys import atexit -import subprocess import platform @@ -11,8 +10,9 @@ from Qt import QtCore, QtGui, QtWidgets import openpype.version from openpype import resources, style from openpype.lib import ( - get_openpype_execute_args, Logger, + get_openpype_execute_args, + run_detached_process, ) from openpype.lib.openpype_version import ( op_version_control_available, @@ -590,14 +590,8 @@ class TrayManager: kwargs["env"].pop("OPENPYPE_VERSION", None) args.extend(additional_args) - if platform.system().lower() == "windows": - flags = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | subprocess.DETACHED_PROCESS - ) - kwargs["creationflags"] = flags - subprocess.Popen(args, **kwargs) + run_detached_process(args, **kwargs) self.exit() def exit(self): From ebbcbdde1221fedbbb315e6a5c3e1e17a4cf10de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:48:58 +0200 Subject: [PATCH 163/480] simplified args for subprocess --- openpype/tools/tray/pype_tray.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 8817990f07..a49471a944 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -562,9 +562,7 @@ class TrayManager: logic will decide which version will be used. """ args = get_openpype_execute_args() - kwargs = { - "env": dict(os.environ.items()) - } + envs = dict(os.environ.items()) # Create a copy of sys.argv additional_args = list(sys.argv) @@ -577,7 +575,7 @@ class TrayManager: expected_version = get_expected_version() if expected_version is not None: reset_version = False - kwargs["env"]["OPENPYPE_VERSION"] = str(expected_version) + envs["OPENPYPE_VERSION"] = str(expected_version) else: # Trigger reset of version if expected version was not found reset_version = True @@ -587,11 +585,11 @@ class TrayManager: # Add staging flag if was running from staging if is_running_staging(): args.append("--use-staging") - kwargs["env"].pop("OPENPYPE_VERSION", None) + envs.pop("OPENPYPE_VERSION", None) args.extend(additional_args) - run_detached_process(args, **kwargs) + run_detached_process(args, env=envs) self.exit() def exit(self): From 0b68cbae5c064353119c74e32eee5db08e6c3d28 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:49:13 +0200 Subject: [PATCH 164/480] added cleanup of additional arguments --- openpype/tools/tray/pype_tray.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index a49471a944..3a457d2951 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -571,7 +571,9 @@ class TrayManager: if args[-1] == additional_args[0]: additional_args.pop(0) + cleanup_additional_args = False if use_expected_version: + cleanup_additional_args = True expected_version = get_expected_version() if expected_version is not None: reset_version = False @@ -585,8 +587,17 @@ class TrayManager: # Add staging flag if was running from staging if is_running_staging(): args.append("--use-staging") + cleanup_additional_args = True envs.pop("OPENPYPE_VERSION", None) + if cleanup_additional_args: + _additional_args = [] + for arg in additional_args: + if arg == "--use-staging" or arg.startswith("--use-version"): + continue + _additional_args.append(arg) + additional_args = _additional_args + args.extend(additional_args) run_detached_process(args, env=envs) From d2127b510c83866a78b8c42f889ad5ebeb7ea042 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:49:30 +0200 Subject: [PATCH 165/480] skip '--use-staging' argument as env is already set --- openpype/tools/tray/pype_tray.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 3a457d2951..4a7d3e84b0 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -584,9 +584,6 @@ class TrayManager: # Pop OPENPYPE_VERSION if reset_version: - # Add staging flag if was running from staging - if is_running_staging(): - args.append("--use-staging") cleanup_additional_args = True envs.pop("OPENPYPE_VERSION", None) From 2ed190cdfd324166f21487bab1147488091a03a9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 18:51:09 +0200 Subject: [PATCH 166/480] OP-3426 - remove unwanted uncommenting --- tests/lib/db_handler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index 16c4650ef6..b5085aab3d 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,11 +228,11 @@ class DBHandler: return query # Examples -handler = DBHandler(uri="mongodb://localhost:27017") +# handler = DBHandler(uri="mongodb://localhost:27017") # # -backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # # # -handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") #handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") From c68f5f45d83b7ac1dc384ac9d1f7398145103257 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 18:52:52 +0200 Subject: [PATCH 167/480] OP-3426 - Hound --- tests/lib/db_handler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b5085aab3d..ef3c2b52f6 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -230,12 +230,12 @@ class DBHandler: # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") -#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa -# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa # handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") From ec0855ce7466bbcb3f66605d848f512a2b847fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 18:54:13 +0200 Subject: [PATCH 168/480] one more fix of list comprehention --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 2854a047ae..49de83cebd 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1026,7 +1026,7 @@ class BootstrapRepos: installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions() remote_versions = OpenPypeVersion.get_remote_versions() - all_versions = local_versions + remote_versions + installed_version + all_versions = local_versions + remote_versions + [installed_version] if not all_versions: return None From 0f8df529d0b7e51a28fccdf2afb1335105a0b46c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 18:54:19 +0200 Subject: [PATCH 169/480] removed unused import --- openpype/tools/tray/pype_tray.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 4a7d3e84b0..ada2e42121 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -21,7 +21,6 @@ from openpype.lib.openpype_version import ( is_current_version_studio_latest, is_current_version_higher_than_expected, is_running_from_build, - is_running_staging, get_openpype_version, ) from openpype.modules import TrayModulesManager From 46d06b01b0e9f7c4781381a880f42271000c7063 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 10:45:32 +0200 Subject: [PATCH 170/480] fix args in get expected version --- openpype/lib/openpype_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 586fc56eb7..23a4cd3443 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -207,7 +207,7 @@ def get_expected_version(staging=None): if expected_version is None: # Look for latest if expected version is not set in settings expected_version = get_latest_version( - staging=staging, + local=False, remote=True ) return expected_version From 12522aa5902e0dfeb459ba4fea0a872a868e72bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 11:00:57 +0200 Subject: [PATCH 171/480] fix how the expected studio version is checked --- openpype/lib/openpype_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 23a4cd3443..e052002468 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -195,9 +195,9 @@ def get_latest_version(local=None, remote=None): def get_expected_studio_version(staging=None): """Expected production or staging version in studio.""" - if staging is None: - staging = is_running_staging() if op_version_control_available(): + if staging is None: + staging = is_staging_enabled() return get_OpenPypeVersion().get_expected_studio_version(staging) return None From 6ed91d4634aea05d7ed6d33631482335d9691772 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 26 Oct 2022 11:38:49 +0200 Subject: [PATCH 172/480] missing dot --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 974afffc72..7237b8f398 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -692,7 +692,7 @@ def get_nuke_imageio_settings(): Context.project_name)["nuke"]["imageio"] # backward compatibility for project started before 3.10 - # those are still having `__legacy__` knob types + # those are still having `__legacy__` knob types. if not project_imageio["enabled"]: return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] From ff3325ddc67cfb981ccf7f9e9383415733ede6cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 12:16:10 +0200 Subject: [PATCH 173/480] added option to receive staging and production icon explicitly --- openpype/resources/__init__.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/resources/__init__.py b/openpype/resources/__init__.py index 49eee21002..0d7778e546 100644 --- a/openpype/resources/__init__.py +++ b/openpype/resources/__init__.py @@ -39,15 +39,21 @@ def get_liberation_font_path(bold=False, italic=False): return font_path +def get_openpype_production_icon_filepath(): + return get_resource("icons", "openpype_icon.png") + + +def get_openpype_staging_icon_filepath(): + return get_resource("icons", "openpype_icon_staging.png") + + def get_openpype_icon_filepath(staging=None): if staging is None: staging = is_running_staging() if staging: - icon_file_name = "openpype_icon_staging.png" - else: - icon_file_name = "openpype_icon.png" - return get_resource("icons", icon_file_name) + return get_openpype_staging_icon_filepath() + return get_openpype_production_icon_filepath() def get_openpype_splash_filepath(staging=None): From c59d5f814c390d44a9553971e412102077a5805a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 12:16:30 +0200 Subject: [PATCH 174/480] show a dialog in tray when staging and production versions are same --- openpype/tools/tray/pype_tray.py | 68 ++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index ada2e42121..01749aba61 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -22,6 +22,8 @@ from openpype.lib.openpype_version import ( is_current_version_higher_than_expected, is_running_from_build, get_openpype_version, + is_running_staging, + is_staging_enabled, ) from openpype.modules import TrayModulesManager from openpype.settings import ( @@ -201,6 +203,68 @@ class VersionUpdateDialog(QtWidgets.QDialog): self.accept() +class ProductionStagingDialog(QtWidgets.QDialog): + """Tell user that he has enabled staging but is in production version. + + This is showed only when staging is enabled with '--use-staging' and it's + version is the same as production's version. + """ + + def __init__(self, parent=None): + super(ProductionStagingDialog, self).__init__(parent) + + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) + self.setWindowTitle("Production and Staging versions are the same") + self.setWindowFlags( + self.windowFlags() + | QtCore.Qt.WindowStaysOnTopHint + ) + + top_widget = QtWidgets.QWidget(self) + + staging_pixmap = QtGui.QPixmap( + resources.get_openpype_staging_icon_filepath() + ) + staging_icon_label = PixmapLabel(staging_pixmap, top_widget) + message = ( + "Because production and staging versions are the same" + " your changes and work will affect both." + ) + content_label = QtWidgets.QLabel(message, self) + content_label.setWordWrap(True) + + top_layout = QtWidgets.QHBoxLayout(top_widget) + top_layout.setContentsMargins(0, 0, 0, 0) + top_layout.setSpacing(10) + top_layout.addWidget( + staging_icon_label, 0, + QtCore.Qt.AlignTop | QtCore.Qt.AlignHCenter + ) + top_layout.addWidget(content_label, 1) + + footer_widget = QtWidgets.QWidget(self) + ok_btn = QtWidgets.QPushButton("I understand", footer_widget) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addStretch(1) + footer_layout.addWidget(ok_btn) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(top_widget, 0) + main_layout.addStretch(1) + main_layout.addWidget(footer_widget, 0) + + self.setStyleSheet(style.load_stylesheet()) + self.resize(400, 140) + + ok_btn.clicked.connect(self._on_ok_clicked) + + def _on_ok_clicked(self): + self.close() + + class BuildVersionDialog(QtWidgets.QDialog): """Build/Installation version is too low for current OpenPype version. @@ -461,6 +525,10 @@ class TrayManager: dialog = BuildVersionDialog() dialog.exec_() + elif is_staging_enabled() and not is_running_staging(): + dialog = ProductionStagingDialog() + dialog.exec_() + def _validate_settings_defaults(self): valid = True try: From 96719a39b4719dd6d23b638547825bca88fadc7a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 12:21:37 +0200 Subject: [PATCH 175/480] set 'OPENPYPE_IS_STAGING' on app launch --- openpype/lib/applications.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 990dc7495a..317a17796e 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1368,6 +1368,7 @@ def get_app_environments_for_context( from openpype.modules import ModulesManager from openpype.pipeline import AvalonMongoDB, Anatomy + from openpype.lib.openpype_version import is_running_staging # Avalon database connection dbcon = AvalonMongoDB() @@ -1404,6 +1405,8 @@ def get_app_environments_for_context( "env": env }) data["env"].update(anatomy.root_environments()) + if is_running_staging(): + data["env"]["OPENPYPE_IS_STAGING"] = "1" prepare_app_environments(data, env_group, modules_manager) prepare_context_environments(data, env_group, modules_manager) From c63c9d5fb7a8fe96424882e52abb8fee6fd5371d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 26 Oct 2022 12:22:32 +0200 Subject: [PATCH 176/480] integrate creator legacy convertor plugin --- openpype/hosts/nuke/api/pipeline.py | 4 -- openpype/hosts/nuke/api/plugin.py | 15 ++++-- .../nuke/plugins/create/convert_legacy.py | 49 +++++++++++++++++++ 3 files changed, 60 insertions(+), 8 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/create/convert_legacy.py diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index f682247143..2622a45772 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -302,10 +302,6 @@ def _install_menu(): lambda: host_tools.show_experimental_tools_dialog(parent=main_window) ) menu.addSeparator() - menu.addCommand( - "Convert old publishing instances", - plugin.convert_to_valid_instaces - ) # add reload pipeline only in debug mode if bool(os.getenv("NUKE_DEBUG")): menu.addSeparator() diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index a2560317e6..1ef5b47bd7 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -32,6 +32,7 @@ from .lib import ( Knobby, check_subsetname_exists, maintained_selection, + get_avalon_knob_data, set_avalon_knob_data, add_publish_knob, get_nuke_imageio_settings, @@ -1135,16 +1136,22 @@ def convert_to_valid_instaces(): } return mapping[family] - from openpype.hosts.nuke.api.lib import ( - get_avalon_knob_data - ) from openpype.hosts.nuke.api import workio task_name = legacy_io.Session["AVALON_TASK"] # save into new workfile current_file = workio.current_file() - new_workfile = current_file[:-3] + "_publisherConvert" + current_file[-3:] + + # add file suffex if not + if "_publisherConvert" not in current_file: + new_workfile = ( + current_file[:-3] + + "_publisherConvert" + + current_file[-3:] + ) + else: + new_workfile = current_file path = new_workfile.replace("\\", "/") nuke.scriptSaveAs(new_workfile, overwrite=1) diff --git a/openpype/hosts/nuke/plugins/create/convert_legacy.py b/openpype/hosts/nuke/plugins/create/convert_legacy.py new file mode 100644 index 0000000000..08bf66309e --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/convert_legacy.py @@ -0,0 +1,49 @@ +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.nuke.api.lib import ( + INSTANCE_DATA_KNOB, + get_node_data, + get_avalon_knob_data +) +from openpype.hosts.nuke.api.plugin import convert_to_valid_instaces + +import nuke + + +class LegacyConverted(SubsetConvertorPlugin): + identifier = "legacy.converter" + + def find_instances(self): + + legacy_found = False + # search for first available legacy item + for node in nuke.allNodes(recurseGroups=True): + + if node.Class() in ["Viewer", "Dot"]: + continue + + if get_node_data(node, INSTANCE_DATA_KNOB): + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data( + node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + # catch and break + legacy_found = True + break + + if legacy_found: + # if not item do not add legacy instance convertor + self.add_convertor_item("Convert legacy instances") + + def convert(self): + # loop all instances and convert them + convert_to_valid_instaces() + # remove legacy item if all is fine + self.remove_convertor_item() From b6c6fad10e029552c48ae61c4e90c487b2b27d76 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 26 Oct 2022 12:31:53 +0200 Subject: [PATCH 177/480] remove unused imports --- openpype/hosts/nuke/api/pipeline.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 2622a45772..eaab98f292 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -42,8 +42,7 @@ from .lib import ( add_scripts_menu, add_scripts_gizmo, get_node_data, - set_node_data, - update_node_data + set_node_data ) from .workfile_template_builder import ( NukePlaceholderLoadPlugin, @@ -60,7 +59,6 @@ from .workio import ( work_root, current_file ) -from openpype.hosts.nuke.api import plugin log = Logger.get_logger(__name__) From 8a2c10e3d1ed558054e204672b219d28c327e0da Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 11:20:21 +0200 Subject: [PATCH 178/480] Nuke: rewrite collect workfile so we are able to disable its publishing --- .../plugins/publish/collect_context_data.py | 69 +++++++++++++++++++ .../nuke/plugins/publish/collect_workfile.py | 51 +------------- 2 files changed, 72 insertions(+), 48 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/collect_context_data.py diff --git a/openpype/hosts/nuke/plugins/publish/collect_context_data.py b/openpype/hosts/nuke/plugins/publish/collect_context_data.py new file mode 100644 index 0000000000..5a1cdcf49e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_context_data.py @@ -0,0 +1,69 @@ +import os +import nuke +import pyblish.api +import openpype.api as api +import openpype.hosts.nuke.api as napi +from openpype.pipeline import KnownPublishError + + +class CollectContextData(pyblish.api.ContextPlugin): + """Collect current context publish.""" + + order = pyblish.api.CollectorOrder - 0.499 + label = "Collect context data" + hosts = ['nuke'] + + def process(self, context): # sourcery skip: avoid-builtin-shadow + root_node = nuke.root() + + current_file = os.path.normpath(root_node.name()) + + if current_file.lower() == "root": + raise KnownPublishError( + "Workfile is not correct file name. \n" + "Use workfile tool to manage the name correctly." + ) + + # Get frame range + first_frame = int(root_node["first_frame"].getValue()) + last_frame = int(root_node["last_frame"].getValue()) + + # get instance data from root + root_instance_context = napi.get_node_data( + root_node, napi.INSTANCE_DATA_KNOB + ) + + handle_start = root_instance_context["handleStart"] + handle_end = root_instance_context["handleEnd"] + + # Get format + format = root_node['format'].value() + resolution_width = format.width() + resolution_height = format.height() + pixel_aspect = format.pixelAspect() + + script_data = { + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect, + + # backward compatibility handles + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "step": 1, + "fps": root_node['fps'].value(), + + "currentFile": current_file, + "version": int(api.get_version_from_path(current_file)), + + "host": pyblish.api.current_host(), + "hostVersion": nuke.NUKE_VERSION_STRING + } + + context.data["scriptData"] = script_data + context.data.update(script_data) + + self.log.info('Context from Nuke script collected') diff --git a/openpype/hosts/nuke/plugins/publish/collect_workfile.py b/openpype/hosts/nuke/plugins/publish/collect_workfile.py index 7f4849be67..54c77b1e4e 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/collect_workfile.py @@ -1,70 +1,25 @@ import os - import nuke - import pyblish.api -import openpype.api as api -from openpype.pipeline import KnownPublishError class CollectWorkfile(pyblish.api.InstancePlugin): """Collect current script for publish.""" - order = pyblish.api.CollectorOrder - 0.499 + order = pyblish.api.CollectorOrder - 0.498 label = "Collect Workfile" hosts = ['nuke'] families = ["workfile"] def process(self, instance): # sourcery skip: avoid-builtin-shadow - root = nuke.root() + script_data = instance.context.data["scriptData"] current_file = os.path.normpath(nuke.root().name()) - if current_file.lower() == "root": - raise KnownPublishError( - "Workfile is not correct file name. \n" - "Use workfile tool to manage the name correctly." - ) - # creating instances per write node staging_dir = os.path.dirname(current_file) base_name = os.path.basename(current_file) - # Get frame range - first_frame = int(root["first_frame"].getValue()) - last_frame = int(root["last_frame"].getValue()) - - handle_start = instance.data["handleStart"] - handle_end = instance.data["handleEnd"] - - # Get format - format = root['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() - - script_data = { - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - - # backward compatibility handles - "handles": handle_start, - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": root['fps'].value(), - - "currentFile": current_file, - "version": int(api.get_version_from_path(current_file)), - - "host": pyblish.api.current_host(), - "hostVersion": nuke.NUKE_VERSION_STRING - } - instance.context.data.update(script_data) - # creating representation representation = { 'name': 'nk', @@ -82,4 +37,4 @@ class CollectWorkfile(pyblish.api.InstancePlugin): # adding basic script data instance.data.update(script_data) - self.log.info('Publishing script version') + self.log.info("Collect script version") From fc87bf19e8e79ec30cfd3fa058f5a2bb3020a848 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 12:00:52 +0200 Subject: [PATCH 179/480] nuke: fixing extractor for transient data --- .../hosts/nuke/plugins/publish/extract_ouput_node.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py index eb9bc0b429..6067e6cf6b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py +++ b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py @@ -16,9 +16,12 @@ class CreateOutputNode(pyblish.api.ContextPlugin): def process(self, context): # capture selection state with maintained_selection(): - active_node = [node for inst in context - for node in inst - if "ak:family" in node.knobs()] + + active_node = [ + inst.data.get("transientData", {}).get("node") + for inst in context + if inst.data.get("transientData", {}).get("node") + ] if active_node: self.log.info(active_node) From 125fafa79006792db0ba61c7c13a04e320ac4853 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:10:47 +0200 Subject: [PATCH 180/480] Nuke: depricated function in lib --- openpype/hosts/nuke/api/lib.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 7237b8f398..869843e711 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -477,7 +477,7 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"): return node -def get_avalon_knob_data(node, prefix="avalon:"): +def get_avalon_knob_data(node, prefix="avalon:", create=True): """[DEPRICATED] Gets a data from nodes's avalon knob Arguments: @@ -506,8 +506,11 @@ def get_avalon_knob_data(node, prefix="avalon:"): except NameError as e: # if it doesn't then create it log.debug("Creating avalon knob: `{}`".format(e)) - node = set_avalon_knob_data(node) - return get_avalon_knob_data(node) + if create: + node = set_avalon_knob_data(node) + return get_avalon_knob_data(node) + else: + return {} # get data from filtered knobs data.update({k.replace(p, ''): node[k].value() @@ -2660,6 +2663,7 @@ def process_workfile_builder(): open_file(last_workfile_path) +@deprecated def recreate_instance(origin_node, avalon_data=None): """Recreate input instance to different data From 95bf35b6fab3b0f7f9aedd7147739aec1020dc86 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:11:25 +0200 Subject: [PATCH 181/480] nuke: improving list instances --- openpype/hosts/nuke/api/pipeline.py | 8 ++--- openpype/hosts/nuke/api/plugin.py | 30 +++++++++---------- .../nuke/plugins/create/convert_legacy.py | 2 +- 3 files changed, 19 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index eaab98f292..0f49419903 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -490,7 +490,6 @@ def ls(): nodes = [n for n in all_nodes] for n in nodes: - log.debug("name: `{}`".format(n.name())) container = parse_container(n) if container: yield container @@ -513,9 +512,9 @@ def list_instances(creator_id=None): try: if node["disable"].value(): continue - except Exception as _exc: - log.debug("Node {} have no disable knob - {}".format( - node.fullName(), _exc)) + except NameError: + # pass if disable knob doesn't exist + pass # get data from avalon knob instance_data = get_node_data( @@ -527,7 +526,6 @@ def list_instances(creator_id=None): if instance_data["id"] != "pyblish.avalon.instance": continue - log.debug("_ creator_id: {}".format(creator_id)) if creator_id and instance_data["creator_identifier"] != creator_id: continue diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 1ef5b47bd7..1f102a2de5 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,4 +1,3 @@ -from pprint import pformat import nuke import re import os @@ -6,7 +5,7 @@ import sys import six import random import string -from collections import OrderedDict +from collections import OrderedDict, defaultdict from abc import abstractmethod from openpype.client import ( @@ -49,6 +48,18 @@ from .pipeline import ( ) +def _collect_and_cache_nodes(creator): + key = "openpype.nuke.nodes" + if key not in creator.collection_shared_data: + instances_by_identifier = defaultdict(list) + for item in list_instances(): + _, instance_data = item + identifier = instance_data["creator_identifier"] + instances_by_identifier[identifier].append(item) + creator.collection_shared_data[key] = instances_by_identifier + return creator.collection_shared_data[key] + + class NukeCreatorError(CreatorError): pass @@ -150,7 +161,6 @@ class NukeCreator(NewCreator): else: self.selected_nodes = [] - self.log.debug("Selection is: {}".format(self.selected_nodes)) def create(self, subset_name, instance_data, pre_create_data): @@ -192,11 +202,8 @@ class NukeCreator(NewCreator): sys.exc_info()[2]) def collect_instances(self): - for (node, data) in list_instances(creator_id=self.identifier): - self.log.debug("_" * 50) - self.log.debug("_ self.identifier: `{}`".format(self.identifier)) - self.log.debug("_ data: `{}`".format(pformat(data))) - + cached_instances = _collect_and_cache_nodes(self) + for (node, data) in cached_instances[self.identifier]: created_instance = CreatedInstance.from_existing( data, self ) @@ -555,7 +562,6 @@ class ExporterReview(object): def get_file_info(self): if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) # get path self.fname = os.path.basename(self.collection.format( "{head}{padding}{tail}")) @@ -690,7 +696,6 @@ class ExporterReviewLut(ExporterReview): # connect self._temp_nodes.append(cms_node) self.previous_node = cms_node - self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) if bake_viewer_process: # Node View Process @@ -723,8 +728,6 @@ class ExporterReviewLut(ExporterReview): # connect gen_lut_node.setInput(0, self.previous_node) self._temp_nodes.append(gen_lut_node) - self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation # Export lut file @@ -738,8 +741,6 @@ class ExporterReviewLut(ExporterReview): # ---------- generate representation data self.get_representation_data() - self.log.debug("Representation... `{}`".format(self.data)) - # ---------- Clean up self.clean_nodes() @@ -990,7 +991,6 @@ class AbstractWriteRender(OpenPypeCreator): self.data = data self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) def process(self): diff --git a/openpype/hosts/nuke/plugins/create/convert_legacy.py b/openpype/hosts/nuke/plugins/create/convert_legacy.py index 08bf66309e..d7341c625f 100644 --- a/openpype/hosts/nuke/plugins/create/convert_legacy.py +++ b/openpype/hosts/nuke/plugins/create/convert_legacy.py @@ -26,7 +26,7 @@ class LegacyConverted(SubsetConvertorPlugin): # get data from avalon knob avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) + node, ["avalon:", "ak:"], create=False) if not avalon_knob_data: continue From a16d7c95b76eadde20f40bfb2c436aa539af1f8f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:12:03 +0200 Subject: [PATCH 182/480] removing logging --- openpype/hosts/nuke/plugins/create/create_camera.py | 2 -- openpype/hosts/nuke/plugins/create/create_model.py | 2 -- openpype/hosts/nuke/plugins/create/create_source.py | 2 -- openpype/hosts/nuke/plugins/create/workfile_creator.py | 2 -- 4 files changed, 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index c559aa2756..ac1a0c7db7 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -62,5 +62,3 @@ class CreateCamera(NukeCreator): NukeCreatorError("Creator error: Select only one camera node") else: self.selected_nodes = [] - - self.log.debug("Selection is: {}".format(self.selected_nodes)) diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index a1a96bf412..22b28cb7ce 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -63,5 +63,3 @@ class CreateModel(NukeCreator): NukeCreatorError("Creator error: Select only one camera node") else: self.selected_nodes = [] - - self.log.debug("Selection is: {}".format(self.selected_nodes)) diff --git a/openpype/hosts/nuke/plugins/create/create_source.py b/openpype/hosts/nuke/plugins/create/create_source.py index 48f9f86219..35fe42c16b 100644 --- a/openpype/hosts/nuke/plugins/create/create_source.py +++ b/openpype/hosts/nuke/plugins/create/create_source.py @@ -89,5 +89,3 @@ class CreateSource(NukeCreator): else: NukeCreatorError( "Creator error: only supprted with active selection") - - self.log.debug("Selection is: {}".format(self.selected_nodes)) diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py index ae05cee99f..31068c1828 100644 --- a/openpype/hosts/nuke/plugins/create/workfile_creator.py +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -22,7 +22,6 @@ class WorkfileCreator(AutoCreator): instance_data = api.get_node_data( root_node, api.INSTANCE_DATA_KNOB ) - self.log.debug("__ instance_data: {}".format(instance_data)) project_name = legacy_io.Session["AVALON_PROJECT"] asset_name = legacy_io.Session["AVALON_ASSET"] @@ -34,7 +33,6 @@ class WorkfileCreator(AutoCreator): self.default_variant, task_name, asset_doc, project_name, host_name ) - self.log.debug("__ subset_name: {}".format(subset_name)) instance_data.update({ "asset": asset_name, "task": task_name, From b400a604f032de1c8ae32db00b4536a4b083dbac Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:12:13 +0200 Subject: [PATCH 183/480] typo --- .../hosts/nuke/plugins/publish/help/validate_asset_name.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml index 1097909a5f..0422917e9c 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -1,7 +1,7 @@ - Shot/Asset mame + Shot/Asset name ## Invalid Shot/Asset name in subset From ebb07f928435146e08ad9014920afd740a0bb13e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:12:31 +0200 Subject: [PATCH 184/480] nuke: fixing validators --- .../plugins/publish/validate_asset_name.py | 19 +++---- .../publish/validate_script_attributes.py | 54 ++++--------------- 2 files changed, 20 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 230717cf18..f6822bee45 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -2,11 +2,10 @@ """Validate if instance asset is the same as context asset.""" from __future__ import absolute_import -import nuke import pyblish.api import openpype.hosts.nuke.api.lib as nlib -from openpype.hosts.nuke import api as napi + from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -51,9 +50,10 @@ class SelectInvalidInstances(pyblish.api.Action): self.deselect() def select(self, instances): - nlib.select_nodes( - [nuke.toNode(str(x)) for x in instances] - ) + for inst in instances: + if inst.data.get("transientData", {}).get("node"): + select_node = inst.data["transientData"]["node"] + select_node["selected"].setValue(True) def deselect(self): nlib.reset_selection() @@ -82,13 +82,14 @@ class RepairSelectInvalidInstances(pyblish.api.Action): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) + self.log.debug(instances) context_asset = context.data["assetEntity"]["name"] for instance in instances: - origin_node = instance.data["transientData"]["node"] - napi.lib.recreate_instance( - origin_node, avalon_data={"asset": context_asset} - ) + node = instance.data["transientData"]["node"] + node_data = nlib.get_node_data(node, nlib.INSTANCE_DATA_KNOB) + node_data["asset"] = context_asset + nlib.set_node_data(node, nlib.INSTANCE_DATA_KNOB, node_data) class ValidateCorrectAssetName(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index 3ebc34f195..f932ef2c40 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -1,13 +1,10 @@ -from pprint import pformat +from copy import deepcopy import pyblish.api - from openpype.pipeline import PublishXmlValidationError from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import ( - get_avalon_knob_data, WorkfileSettings ) -import nuke class ValidateScriptAttributes(pyblish.api.InstancePlugin): @@ -21,14 +18,9 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): actions = [RepairAction] def process(self, instance): - root = nuke.root() - knob_data = get_avalon_knob_data(root) + script_data = deepcopy(instance.context.data["scriptData"]) + asset = instance.data["assetEntity"] - # get asset data frame values - frame_start = asset["data"]["frameStart"] - frame_end = asset["data"]["frameEnd"] - handle_start = asset["data"]["handleStart"] - handle_end = asset["data"]["handleEnd"] # These attributes will be checked attributes = [ @@ -47,37 +39,11 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): for attr in attributes if attr in asset["data"] } - # fix float to max 4 digints (only for evaluating) - fps_data = float("{0:.4f}".format( - asset_attributes["fps"])) # fix frame values to include handles - asset_attributes.update({ - "frameStart": frame_start - handle_start, - "frameEnd": frame_end + handle_end, - "fps": fps_data - }) - - self.log.debug(pformat( - asset_attributes - )) - - # Get format - _format = root["format"].value() - - # Get values from nukescript - script_attributes = { - "handleStart": int(knob_data["handleStart"]), - "handleEnd": int(knob_data["handleEnd"]), - "fps": float("{0:.4f}".format(root['fps'].value())), - "frameStart": int(root["first_frame"].getValue()), - "frameEnd": int(root["last_frame"].getValue()), - "resolutionWidth": _format.width(), - "resolutionHeight": _format.height(), - "pixelAspect": _format.pixelAspect() - } - self.log.debug(pformat( - script_attributes - )) + asset_attributes["fps"] = float("{0:.4f}".format( + asset_attributes["fps"])) + script_data["fps"] = float("{0:.4f}".format( + script_data["fps"])) # Compare asset's values Nukescript X Database not_matching = [] @@ -86,14 +52,14 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): "Asset vs Script attribute \"{}\": {}, {}".format( attr, asset_attributes[attr], - script_attributes[attr] + script_data[attr] ) ) - if asset_attributes[attr] != script_attributes[attr]: + if asset_attributes[attr] != script_data[attr]: not_matching.append({ "name": attr, "expected": asset_attributes[attr], - "actual": script_attributes[attr] + "actual": script_data[attr] }) # Raise error if not matching From 9192005a8417f4eeedd8df1af54c2814c9df9c20 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 27 Oct 2022 15:40:05 +0200 Subject: [PATCH 185/480] OP-3426 - increase timeout for Nuke It seems that it takes longer on testing PC, this should be safe. --- tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 2 +- tests/integration/hosts/nuke/test_publish_in_nuke.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index 98b463b9ae..27c76a2274 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -42,7 +42,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): APP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 180 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 8fff54a4be..f6e5ad6921 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -41,7 +41,7 @@ class TestPublishInNuke(NukeLocalPublishTestClass): APP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 180 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit From 381ea92477969b341647daf6a2ee86390177bf02 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 27 Oct 2022 15:57:41 +0200 Subject: [PATCH 186/480] OP-3426 - fix subset name in Nuke --- tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index 27c76a2274..bac3b6898e 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -62,7 +62,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -71,7 +71,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From e14b0b284227db0acb3a8e89a540676ee1e39d39 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 11:14:35 +0100 Subject: [PATCH 187/480] OP-3908 - whitelist all DCC but Adobe Going further Adobe DCC should switch to new publisher by default, eg. Publisher is not experimental for them anymore. --- openpype/tools/experimental_tools/tools_def.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/experimental_tools/tools_def.py b/openpype/tools/experimental_tools/tools_def.py index d3a1caa60e..3275e7b87d 100644 --- a/openpype/tools/experimental_tools/tools_def.py +++ b/openpype/tools/experimental_tools/tools_def.py @@ -88,7 +88,10 @@ class ExperimentalTools: "publisher", "New publisher", "Combined creation and publishing into one tool.", - self._show_publisher + self._show_publisher, + hosts_filter=["blender", "maya", "nuke", "celaction", "flame", + "fusion", "harmony", "hiero", "resolve", + "tvpaint", "unreal"] ), ExperimentalTool( "traypublisher", From c43180aeca8c8a62c4f558e2ec4000dd364997a8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 11:16:14 +0100 Subject: [PATCH 188/480] OP-3908 - switch Photoshop to New Publisher --- openpype/hosts/photoshop/api/extension.zxp | Bin 54111 -> 54103 bytes .../hosts/photoshop/api/extension/index.html | 11 ----------- openpype/hosts/photoshop/api/launch_logic.py | 5 +---- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/openpype/hosts/photoshop/api/extension.zxp b/openpype/hosts/photoshop/api/extension.zxp index a25ec96e7df2348e558277faedf4f74561a7f316..b6bdb9f85d6857bde552c39eaf46f580284eefba 100644 GIT binary patch delta 3436 zcmaKu2|QG5AIHZq!dS|_WQ)j>eJf)!mTcKcmSn`0E$bjlM<_*1Oofo0tJ@GG`#Q39 z8M~3PRZ7OvM3zvm;dbM`_kBO-bGGOE{LcBG-}9W$^P4QBCKggNn;B8lu!BG#I?x>h zPpc$m6r(eZl(RkZOjQgG3+7Ext=GE8ILtRuraab_r4S`ayzJ!$Nn-5uK=mBuj^rfre5!rp6?Q zcYo=>D%`~erjZA}3^#0g6TGK%wD`ZuMT>}o%3#Y%soiSifUtwrQ~`?|yX5x=p$Cb~ zL&WhtCj;t2c>ZA|!;b$T(5{g*^|1$qDfJEz<^bDE)}t={Io)6o$dnmFh4*2!%er*= z=U`yM@yDS==E_Z@;qSp2f<2h0_@jWlB0PKpUwT$FhrdOjQN=USX+(ngXtdNjb8O}S z{l-fuolA8ih*LsAd(pCzSXg7in2XOy%__v2IdBz1;k9c!Q1y6Lo?o41<* zTfdkoXQ78`v!fVu1}}8g(A8P_yWc4CnRIFLAdIhyrt*nEyylP(O(y?bySND8WZFJQ z4&Rdkrz@;aaO8_JrrpL3wy!SDY2gcdh>UGStFocU^MO;M?fDVCB5dXUmnAMIin@*! z_&ueVPpA_piP0!7HCwYv1_+$f%=YLw4k3^cM$E0V1GqKD2|O+EPXWn|r>A%iQ@+ z`*iPUw~r+TW>z_x;-a@I1{N264)I$VJ{fU;g;0~UB-D5aFYv9MdoX>bv-^12QXg4K zCZrc^11?&QRLneTI?cK+?RUJa@Wj145C)OqZy|=NHKt~t^^SxITv1Xy2Iz-(*Y-EV z5yiMaJMQt>NJy&S&_dH4mUSi#$`)UW)(ut~%1JA@35k@-oCcgoqh*C=i5m*1zV^_r zK*`60_geVj%YobG+^=m}<9_JBwN3lSV?65T_x(0Tr27jndlLW`hI|+a7yPhF|=#uC2!M>Y{u_v~-+it{iU$ z7<#BKVX{xQW63_2RNGjE3UwI;=P=wkIaYVu{I_=M5!JC=j}13Z4JW^4%q6S<*9U44 zRs=PcDb^F zb-ABh)_zm;?i-*7FQ<@!n`uAMCE)nY$?? zRq$Bd0D!H^`>QO?r`pAm)}@!HwF*zf9XoN8k!=e}UQ73h0sx#}1JIoil|}M#dr;w0B9eSEgNPor(EFR;9wr6lLtPpkXd z0p~PwwLiu+Q`N`r2M|2fY2WI_$|MA&(l8?K6bapOYGNUqcD=&&xQSFk(Jj=`sc-sM zg6V!cGWB9((DmupuT~0b=4r_vRo*T@0ijuV_aY!vY6EZC?w)!6byalh&CXn#e4It< z>}jnnlGbQX*KZZ8)_|VBnaNREor0=9Y=&jXKA|`~^^p;!$E1<@kP72d2DCgZa`Bg&C=-Jw3SA9t%J&P$$ z10{a$fjmo0fmotmL0dwPE5a^~EE{_~H?BJm&_x@Ljc)~mNZ zv?R*O_^>=l=0lF1U?paLhzs-gBL&i{hgTl5mLn}LWyyToRNNj|^Z$@!6O4#BM_n2b z3WO&06D>ZU)dnP%t=*PIS;i2#J_2yW>cs5{k<&KuA~R|CxXQdyabpDxtzOPrfx{ts zx>ww`8krs?;6lVhOOG#UEM*G91oC=|m`qxR%2mFTqeLDDCZ@ASpDY6q&nmcHb#vxagQJcO9M<8c!?f)19fRKtQx)NI)ChTBo+B}O)&-%5(5fF+}6 z!7XN|37;j*%fiavdS%b@mC<@f)XpQ)`T{FnaHv}nnbxXCFFhbjAEK$PG{Ywt**+q) z6&VDc)Wc-Ly3TbS$`An;DRuJ7Ab)gapkxX`z$}3iMtlP z@n3Ir#$^gjV2hDb=G9= zxt4*wxj|R*isbvKpnkpL!NRRaB3kP;9Q?`-pS{o3oa>ILT%Of!q9<#=FJ5%K7-Kt1 zURK_09QvNs?b*7)>N5YrNvw!H9WGX>_lk&A2_I0#4?$tq;T^1Gm=VzuV0c`?A|&o; ztJG;`-&|hJU-7K|IA^dm8B$7!K@dF?;4is{ku|B)crCQ{g7h>*xN`Oest}rGE$%@+ zg181J`fYL$%RIz6-e!B|6u%QX=iZ{Yt})JcO;Ps#y{Bx*lTSDWRp`i*x06zwNp^|b zPg<@=)Hv9ee&k;N!kMdq_EbozLrzB1@BPy@}Xm6R@4a;ji z3hb1yya)Eu^=`V;AgHrbV)B21y9CXD2U7&ZzXRpo)Bg^ld*S=QURApZc(_+?U%9_B zKZd6dwh!!6xcB`J1-9?hKE=Kd{MM(s@8$b|ZokStu+J-WzXIFeR_(R#S~b+KE$~mP z{t<(f2c%>vF?hfKlpL{}rvEz7gO2=lb$I8jk3kRc@K8o`{|Nlb9xVeLRBtfE0dd)1 zefDai^r8P=50}_^3T3DJ>$y}Qp`E)a%*jD+-aQS3cBcWvwzCyMK%hrGl)L``E6%z( delta 3461 zcmZ{m2{csgAIGm@7<-t;QkEhkdx$b3p)6V6AL`%4_*I)2a0KzUQ8M?z#7KzW08Azvp@G^BpLp9xtS3G0>r=fdT*k0p7br zAro2rmZyMs7s4z?p3uQlX zg)YbiLTw-Y7p{gOf{Fa5AHoVvMlLvo*vmxzPS#~6XeeYrA?$Zzd#ad`LfF)=|4#0z z!x+9he5&XF9}a!ezd6*>zs7|AR@F}*{rZuh?@BA=e<>-YFCjyj_Lp9PHmnk(xRJ&L z|L&$5vviO`?xzI(P9#%RkI}S|8{%6Rz`d^@puWcC`~=~cVSw+1(=2G!^lWi z^bH6A%2{z#O`g9w!i6*)V^mA@MJl8q=W9TQj1>}oJs+`79+}=V~-J+KC04l%i0oLqv zXz3uj5<{qkHd7m`-GjJ4oCr3#oha?#nsryc3S+{_F7MIkB+{6dF^HMdvv~zlbt$+FH3}Lc-6>MAwy+xd_&RIMkhXCWPf@$pC z2<;ANDBmMM+^3d4`b)50r(VfNI8%TyBMF|zig$IvLn@Hq`PUf`p z?B3`!I@)uNPk)Nxi}>S|bVj?^eXCH(Kmy1-ydv=V9qo9$7dRw!P`gN^u#E&RoF28} z*T;Mm{!GuXD`H2Wjtn}FWaqEAB3GtwFt@#SVyg90R8m)`pyrp=z7j*v$^+ZS{dpTr zPHu*c<-kbtPtg7^(XSnZ{?OF-m4@9j4eyv9(Jk< z7xjXVmbg|w)#6PzHf4VrrKt%q$UGqM(uNO{xvNCW7Jj7Yny@DR`4gF5-Q$Jsn|C(V)qGq1Uqa`-EC)VU=_N3k3v_k&96kS( zo7HsOC1zob&>Hmo{WN^MgfDHH8Yt$M>-dU2Fv63i1A*cKcC$fj2J02x zwV-lVxwH>%m&bPDi+Oi9ax3|0%zh0@Yy0RtSHIE%GbM4*>&n=l>LaaC52f45`OVi+pr(PW5#MCvs zYrvZKdvjpmE105Zp_S9YAz`99s3$niz$rynxl_7EQ%U0yk!WgC?)^OB&aj!zzPsB; zZg-Q=|7%8YzF4cn^Kogcn9uUA-5vboEEISp$uYho(sokO33HuoGFrlxw@ zOEZTvi@Q}u!v0oZzM7~IHr7WllNN)Q>T9I*FQfJ_=m}zQ^F?T^04AM==K5p2K&#EO zxK+YxOH3E1U@ER^*ZxVa#FhZRW%SWE+{qQ1@SF&nS&dtlYCc^{X;f+xpgp;|!W&e# zM!$4H@~-)^*t=J$XY}_xc9arEluIH7PQyOh(xHn|!KWeaG3M1DwBeSJ1Fs}vx@O3o9CYJeSW#AKU-^NikLn}5-hBA9XcA|{34@#pJ1(>mk`R+m=+;Jq!L+PG$+NxANM*+mmw%vBzJjY$=4CH*ei3|d57;mKAr)9gu7 zu?x%D!t41=xSN8AbP%cm#H68v8tr+T^H?sxbXAf za*gnFc^(Ytryg~8Kes76F-L<4zN!NrBYtmn;dImq32tIZBAreb8*%9qqIZC_D4N7# zjNJk*sNDe(`L$=j)w5m?ZX|N%?*h8Az|}*sH_8P+hP=A&j0&f6of$R~b@EwU{Gt$> zB$r)}&wsG}?7g!IAYxP$E;$IlTIT#hV>u;kDX}(D!5+$$kq0(WkF}S0Oj6HtABWND zdWSIE(K0J$wRtVB=ydz{^alAlIoWB8Vtgulqjzf_;5j&G?pUnwz&;-mvbsj=e_!%t zG%o32TM>s+zG72mHx6lGHI$dNl&jereuQt4K}yb6@pzR*jmqmsrlb+&exFxM%v8D> zfzduX8C=y^@bm4i?)XG7PiVHDL&$u9ny7HbE!gEe4&H-4>a}vCLz`J5#{NicO^m}k z9+)d(m_IhDkEhPlM_iBj5lRnkYvxzia8z~6ApL=RZ~I(lfp1*-C3iSiof%aDpF(D3 zmU3F@6rV|u{AF&)sd`qHS<~&lcJX4swxZBzs`FG!rFi+M<8cHh-KbTRAy$%QVnM1F zi^bpAb<=&e+3-ebeOg@P)gU?(kHMR8hl+uh<<{{sD)a6z$1qv#^~Fhe+Zdt;(WG%-En>-T5*{x8a6yjQ6s!6GQ_TKqAGWxDb2tz`h;Z-^^ z;m->_WpvIzrmM5MqD)_mxWpbO2HUXq>ZMgN*7NhR*wdNNsyQLc>18}K`B4$si!5Sc zESHoq03kM#`}9!tS*7dmCs$nA@+V6QKbs|E=o>&9j}8|QBPw&*O)j}zNPlC-8sjzt z(J^bSdYop+&_0M!9q;cS!tAgwG^kkda?Tz`$kJ_hqRzg#bPuHK$IX&^Q$*DCUU`a0 zX2y>BLh0*(Br2vcciT>SMy)KV$dtx7>b-}ShT*8y7fsJIVy2FPH`ui9F%MrW$M}5; zqxllD^8EO~VbLy8t`dK~#iCL01B+mKGxS5wc*4+< z%3VcyIX7NGh=)6;DRzBit~?&;Z0zBH9HIA#<*W&gqOD<6*5(~(8AygqI}Yde8;n&P z2u#0&by!BKm*x;7oad3P}5LXrdR2d%HiTDZJ)_+YW9=02dElOsF{x9lQV zj(N#$Yz7rrIRdczBj!u8HGuDAN(!mpiBTuA4ZkH)qD3uVg>xuPH1f zqCfjitL~OdJUz38%x2aKmt)}+KU;EIElXzUsraigZt7g<++*zr?@d>U(afouHy?oU zw;g#C-zhLTRVKIV+*%EnhGX&Tl&L}aNji>}A$K*e1u_a3Xp`a95Ks@~i zpwzNPRM=rbm7s*cX1WJu)0Wi{JOWvqz2ZpI!M!B$gnF?Q`*6OBptI5|0+L z1pst4j8rAG^v+8es?z}Z--h*)5g4E&59t10{|q0r03dV#05}0Q$_$_a1ioFTaRd?^ r`b)F#GYq{vva_84;Nj}wcs0<=@n60I{`O7);P`gn!2pogMZWt#K=UWJ diff --git a/openpype/hosts/photoshop/api/extension/index.html b/openpype/hosts/photoshop/api/extension/index.html index 501e753c0b..f2cb30e313 100644 --- a/openpype/hosts/photoshop/api/extension/index.html +++ b/openpype/hosts/photoshop/api/extension/index.html @@ -32,17 +32,6 @@ }); - - - - - From 27aa43b4c811cae9f6c75544371a9a855b787ed5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 11:25:55 +0100 Subject: [PATCH 191/480] OP-3908 - updated extension menu Updated extension version number --- openpype/hosts/aftereffects/api/extension.zxp | Bin 101003 -> 100981 bytes .../api/extension/CSXS/manifest.xml | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/api/extension.zxp b/openpype/hosts/aftereffects/api/extension.zxp index 0ed799991e9c791565a92d3899f3db67b9fea910..1b46143eafee6c748e9ff23f62d16ea003a7acd3 100644 GIT binary patch delta 3652 zcmZ8k1z1$u7M=q`h%`vc5YmkjUg?nTlx|QO1aXK%&d?155`$NeRwSeZ6e&pw0ZBmt z0m&hxAISB2-#h19-}(1hd&S=0S!e$%CJlEt6*mzH4gA~E_Ek(j@C&4vV`cmTRR>sl z#Ft2$j121%*;B|7L8(kF4N^*(d>Nh34KIw9h;VGQ)v%$I5C{Yxa-rrP4Y-~n%^ZOL zdrHcC<`lrQ%7{f4`!Y;6Th66RNp@ZGOSGjrNS0!!8ufc*{Lgz0bFP07+`XTSnE2d( zLT=;985RWMPm*F7;!bP&uSC|2W}PPmNrPZNK<%28nGjkqImO}W+0i$24gDBBGd*Pq zo~hLI^rJ&N^u{qOd-3P|U>1s)Y%$lbrGC>6Ia{*0b5sW(v^wVPk)43uIF7ld?-VIZSa{Z zjzqHzmvt-$CSc;qLGz5@gjD(?CL%H475{~%sqmM@;C!*H$>0?WC#_8x{}?oFQJnW; z{B+fP7gf5!YKe9^VdG*u86`&r6VH%NS?8oq2HS{7W~}B)mD5X`Z3n(j!r%5{bCM|n z=5ZygS=Dh0)dHds5+lqtowZqND01ezoiKUq6>h>1x;0VpT2%+!cr7yu*`YS%-B_BT ztDa}1AU0iyuI_HhLKwdG^P&3hTh%;MWFnzwZ<`X^nzxR~&Y*n-J=F2^@x#2NW=eQ$ z_1g!nUhgZPCqlFg6KU>OjWd5(ZwQ(@4#DtNM98U=3rl;DFZ%`g#y$g;Zci2D`MFps zvJiLsO>UJZ=}NTB-heYTH6L0SOibL@G1if-2Km(nc&t5M>55wi7*H1Ivv(v17m8>$ z9+x}T`#!5^oGSWh;mKxy`-mOS&umToq=ei>aqn}7;MW$27C18TV)f9%buOvf8)ipe zhVh5pJ$zuWkG*_4i_mdXojwVM>m3Wu_$0~hg|B{ad+$`$ij|i4KzbyXB!Fh8#6Gxr z$pW^H4(?gXHEJw3R*7dQY%#4Pv%_AYfPOwU5q9z+=<-BN#MT&ct*!FGYa+6*8i)9m zFc-%)7^12K_IM1LpKgv9!Frz8(X4nM#f;lj(rXrK?QOf<NXo76#uunR?`-EN(p5;FI^Po z5den;{8OMrrOFxctwrpgW)28zB_lf`e5KHC(f z-)U#zv^3%6>}J`Mp|X^rM@3*w7VSt6-ZLKcq+e(;=@qsZeNP(Z29WjWB@|J!_{whWPe7z+Jdp5le9r5dvL;*^ z+r49Ic3JQEEh5RuJ=lLDf^a=)c~GSqLK;9loSWNi$nK0$WS0W<4&~n!`HnOqmc*9V zDa}3daNNg0-m*z?n$o``EU)R*I7rcbkQ`9M%2dC4CM zvk2r{;tP%}whixrV69l;2W$II0{Mn)wmkys+-otpAUBFqZ_Vq69&Y=*f85c`*y@I1 zQ;RLjf{*#0oZ9RtQD+vnHsPzKWuplZB}d7Jnd<$YIv%w>ijDRH_mh(CqtAv01n!&I zlIN_yH}6K=$(5oF6a8}9Dj4IdFH26VLtx(vWU=sEjlq~3t@{wkT%Y6rtCECjkRUSk z3tXWh8H}G&=%;#Oa;GCeT)FQgM-f?X68%>uA$fZ5cPbopn8s2_I%+)M^2j()reMaX zvbf=)y@+?Bv6z#dRNZd%$k_g)1;a>RJfBR?1~EH0F!j!iBC||4T;>(V)8FsO>!4Rv z&nmSY2lgMWJiL0W#$I@c;1nws+Q?MSa7|G#0R7{@Y(or3rC2*Ag!1`i$6gQu^QXRoKv%`u6hF#@gbjr&`eUmSI_m!*LZm+ zCONp4-B&kULwh6*x;E|kqC?`XlHPp|RU8f(7cEpChlBqizT*i7qxSSHd)PhD{dU5e zQV?e?QRZb}J-ljg-*EGtJSdJS9a!`vo*eue)_jUHd1M90OC5@Ko4c1;(UJX?%LQdZ zl~2JVZxlCHY;%WVkIzMP{g`9e-~F*PM(SYmV}w}Vg713xfQh3Ag~0l8NkrwS76W^5 zD0c_mYn}dbH|l_zrqEGmx^LPYv}FmO>p-NI?a{`wfq~({E{=wWkZ-M4^xOL6`clKU zpO9uxrxJxQm3x!vr+cx%hjVX^Pa)v*$y4H1K|*Yz3&v=kH(|C$r%_&l^j zXOUQs*?deHo5-sZq+Eu!4hQjk4pTeZT_1)&6_!S4jVyaGdq$}2LWN(3_T<-1Ot=<-l?p7l4uiT=3+1s9yVCYEz~Sh ze(WpM;4I3t*ZsGwMNqxakCmUQIF*IAo?+2^B&8N5BC1O2aGwf?HJY7Z<~I*I@|7pc z+HI0t_`O``#-OLHEjkYO9*6F_QZj%}H}U-PzO-9k&8HB3dZkRWW9F6w5&=#@7-ktw(z=HGdyFgC88#qFia&kgb?+oI$7X!rM;9161? zBRy>sg?AD%NwR{AZcImYkNI&4C#vJ}nb=j~+$6}l&tMW$U`9Nj)E zv2qVc8tav)x>2-PRpTt3s?6_b1n#)CHiaqv=)=#J@H$#fs=fbdyv(@)EZ0Iwns*RV z@L&0)DNrS&+FS--3grO%#+}3Yi*@w zcfDs*`r^zdIBypqkPA+5<@Ajd*Ezlre26o|^}eh9!^fzDV8EL2;_&}1wGJUbBk-#M zL9vAaf;1QHg|-@i#R&P|-UH;cF7i%j5m5BiKfE?5s0=Ng*p_Gt-1~7|CLjneXK57*SXaWhS zzajx$KpYhp2`B^QsL@D3_!mvO8U@gE659Zf2i|@HdJ0fT{<(hxeH;kh1v&b6@)xX0 z^L+MsoCi?=3*{gEpPfSGMFDg`52`T=pcS|zPk&bbU;2axfv{6SAk>h{H31N&^O%+> y>}Y_4@`6xZM*Wvpsc3-iW=k;e&(3?;|4*Wqad2?Yoj|Ui-;4MV$m0;;eDojUh=yeV delta 3730 zcmZu!2UrtX*UmtwQlv-`=^`K{(m^7j8Hz-ZszBfa&C-jsh?Fsr-XXwHR8WC{2uKm6 zNL5jmUSvTdC?bZAQkLouvi`eYnR)Kad*{99o;&Ax&b{whOQ(;|piha0=Rb6Iu8i*m zegYDfChI4t-lVZ$J49w295jar$EC{(5B1NqB$bK;37O_RzHex!ohi>jj+&!G~+$nR;L$qQ%zyW1N0Bj^b!Daq`H4B#~Jl>r-8ZJB!nvIY? z5com!wBPRk*#4aNRA5v_RDk;dgPjJ2g5ToKTbdVEFzPu=$DMsSVWN3uPV;Ccc_ObA@|>EVAIP- zH>&jBO*=v>NiL5SDVfrvMYB*NZrlJ909I}3lCvXKI&dkAS^JH6?rq@Yz0)G- zN0cluYG!bTFrBj>gdUj?iF)7Q$!A~Gv2fy?YsE=Oi(Ph9)`OX8`5IGfSo>&*MB~C% z>!$cjd~4=a1*6vieMSY-nQp**QZJWl65i2-{EsJcv3p7GD>OH3(=HSMkwy*n@eG1vk;Kp=Zr(doq zI`DzFb8sx+;gizP{D)D#KcO!Tp%#)5tr_mx(vZTOKEjTicK- zRI$Y=W;fi26LJ1-EF<;LMQXD=qldQUKjPfH?zl9}limm)hp^KE z2xdBj`dDBXPHPgF2?;m@LHti1{I8ATfPUl*5E*~bb;?(P``qyEz)VYM3=iCtK3MRt z_V)&Jz_UgU?=>;NV5fLsFkYAkv=j>*hca&fSn${QgZD_*75`ilo~R}4^p6xjc`Mb% z%o24gPhF8paqah_u5amIh!Ixj7&P;^G)XyeiUTj-o2~9Ok^3|Pz87Mq2A7gpU0Xg( zgegoI=S9(EE_~M;42%Xv4JS5C&#hRhC=ee?#pNHlHh(nfHVPHW*)hY=!w&kP=qEZ+ zO+ECwl?jg^&N;{`n_kNV5e``wvNRMhqXy2D=hR$?bCmmyA2H_?(^j24MN9_JnaKzh zDZ(Z8*9@O0vX@-`ELszO_oA~IgS*A3oX^rKITs3RQF2)SU75yfW7*9krjI@EhXSo# zhM-jA`Utc26y#;8a(?^%Du_B_pmnjkaE0+(F1qz-dMJlN+%9sqvxHiw(SmW_US@2* znM15c_Q^tVeR)c7h+0y2j=q%on8BPNZ}Ul7Ir2q*{|w^wb}GVm!fVrZ>nqbl-Tc#S z#O}<~>5rwrN!!PL9Js<>q~JIB(21bHE7(bA>swYVT$PA6d;g*`5NT28`bh$!tuIAL zEBa5>2Z073Gv$PIoTpf&<=2DXZVMR7$zSN5-y)%Hjo1~C=X+nK`+TKC=qIFGJQX`% z>N{4|xZP;+R`bp9cU3~rAKVg^7feL)r+zOCy(hFiv{NJYxzI;ooDjNKv>Hvkv`~y^ z?e!VNGGl$iXIh?DgZb1!*MvB>(2Vn`+4h$+g90)~G(h)_@KzzUw~^La>J=p9F`jc> z{Wlv6G@>%rYF*X@UFI5EH#qU&%(WeKkw4*8E%HKg;$@X({xYRXXQcp%YI!}n_Vs5S z44%+RQFnhu7_}K99-CiULXx`HWi&JR2d8B#q53~%?UW@zM?aIw(#nfM*W#C~7DKQV zEp}^rNk*SX1ILcYh1sbI2^@1&eQ}TM=r($SHDprJ*FwtcYs?ZKZgX!@T;liiANTVd z2Wz^nouza(-butG!xZTQTRS41GAYHxlOmUxthzSW6CTUS$^~_H4Ot0)WyB9~AZ$;{ zINo9M*4CGyf~^=;-}|Q%uXaa2AO0|LCBR-PqnI=%;xv}NXqHFiau+H&ABoutj8O@( zoYmtu?;4~?_3M~Z@_TwdcoiEvPE1(YiHY`#~%o&vf%GsL9F%) zHl%guyQx)gHx_u`bRl=eTrZI=e&*SXWIQ^`ZzF#9Db?gOJgWVgA2*A=1|lf^>@qb~ z&(#rLW6*kCO}w=BY67Cc){0rF%NEzMLC*e^7wR0+pnpzb?JI58dS4%cuvcx`GV**A z`Awb&91f!=E>17VB{rMV3HyCb?%v69iLZfCvK|OG??u*Zh*B2@vTcQU33QPvUrt=S zUT5i)ZYmp7K0a9-L$O>w+wZ=(LoV3Rn%?c+<|ghcGy`&V{*uV~J8eD0Rlc`^e-!V& za1s-da&B<3+k8(AdNcY?#`SjX`E@I19sgTXppaJLbW5@p%7Vt-B*1NnbmX?z8;kSZ zaV5P25tH*$s_S%a*qCsyH#QIIDE!2A)nP8^^DDOR@zRwVol*Au^MZ|Rrb-OrE8G~i z_hW{72%gt6C5^L3EyFkXh^oR9!g?)l^5SZm5|h+3XJ-6!H4RQ|eDC+ut(Ky=Ar#eU}1BDvg(^4Mo#P$gXSLIerChGw55a$#=0+wPC^{_GH3zhxQdUr^@=bGK^L@w~ z(z7};KudHE#!NO_q}1*S5to0E_A;BR?{yCc;Ely3KOXm=dS#uhK#@(<@Mg1bPHhx3 zeldARbyK)<;;j#le&IVI-gv#HT$9v64;_vGy~&wxmz&CyS!Bo~xg-d-zODT=Q>{WT}ZsaRJHWv%;7oj-wGqLju-f)y<%1$3-4LWv9TS=a%U4!7^BTBK`T%MO-9=!u=${w*giOq!TeQrt|PA?kk zeP6|nHoqGJp0nWg%Z-$HD6hU@dV=hetLMwLC@3}bFXPG2gzv?)K0|T$%LE!cz>B=e z<>pbbP=xc$qhW{dZo99;WO8TdFb@WKBz6nZ)c*8*u3USCrFVqip z)Ls+4-rmAd`K4g6yTLUp<3e;BB{!@w##B9xx}(i&oYyTc@a47fH4x`F&uMmRMOWPc z$jwcmMXQNlWf3KEZ;-W_q_6pg;m~ejbPQ`ZRAuH0hvm*yJ(EW=oGckHXU$pdPn_(S zkVHLMPf4Sp#+79+nh0eTddYaO1F?~kZG?DNbI zyZn1pS$-Y$Df>r#of!dW1b)^g5c_T54BtWJVRiwa5rX}DSpwPI z2GnWyS%^If5Ui`X4SWLrqnv>9A^{m_mjEzB!;!#AU>4ep1ayEKkY*Hsrack`Be6yC zLwBP94cfXg7^$I52-?{10O62AG=Kp%Aut-SqV+}s&~+rh1x1yTmbWmK#couqkWyXfBlS4+`b+%n#Kb#%y~;4IirhcgoQpFR - From bc945e9c41dee9946e94c276c2517d0e0b72af48 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 12:01:38 +0100 Subject: [PATCH 192/480] OP-3908 - fix - wrong regex --- .../photoshop/plugins/publish/collect_extension_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_extension_version.py b/openpype/hosts/photoshop/plugins/publish/collect_extension_version.py index 64c99b4fc1..dc0678c9af 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_extension_version.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_extension_version.py @@ -43,7 +43,7 @@ class CollectExtensionVersion(pyblish.api.ContextPlugin): with open(manifest_url) as fp: content = fp.read() - found = re.findall(r'(ExtensionBundleVersion=")([0-10\.]+)(")', + found = re.findall(r'(ExtensionBundleVersion=")([0-9\.]+)(")', content) if found: expected_version = found[0][1] From 7809f833c38146cc081e223557fc962e5c81fccd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 18:31:17 +0100 Subject: [PATCH 193/480] OP-3908 - remove SubsetManager rout Functionality already in Publisher. Moved list_instances and remove_instances directly into PhotoshopHost (needed for Creators). --- openpype/hosts/photoshop/api/__init__.py | 35 +-- openpype/hosts/photoshop/api/launch_logic.py | 3 - openpype/hosts/photoshop/api/pipeline.py | 256 ++++++++++-------- .../photoshop/plugins/create/create_image.py | 5 +- .../plugins/create/workfile_creator.py | 3 +- .../plugins/publish/collect_instances.py | 2 +- .../publish/validate_unique_subsets.py | 2 +- 7 files changed, 159 insertions(+), 147 deletions(-) diff --git a/openpype/hosts/photoshop/api/__init__.py b/openpype/hosts/photoshop/api/__init__.py index 94152b5706..c5a12cba06 100644 --- a/openpype/hosts/photoshop/api/__init__.py +++ b/openpype/hosts/photoshop/api/__init__.py @@ -7,28 +7,15 @@ Anything that isn't defined here is INTERNAL and unreliable for external use. from .launch_logic import stub from .pipeline import ( + PhotoshopHost, ls, - list_instances, - remove_instance, - install, - uninstall, - containerise, - get_context_data, - update_context_data, - get_context_title + containerise ) from .plugin import ( PhotoshopLoader, get_unique_layer_name ) -from .workio import ( - file_extensions, - has_unsaved_changes, - save_file, - open_file, - current_file, - work_root, -) + from .lib import ( maintained_selection, @@ -40,28 +27,14 @@ __all__ = [ "stub", # pipeline + "PhotoshopHost", "ls", - "list_instances", - "remove_instance", - "install", - "uninstall", "containerise", - "get_context_data", - "update_context_data", - "get_context_title", # Plugin "PhotoshopLoader", "get_unique_layer_name", - # workfiles - "file_extensions", - "has_unsaved_changes", - "save_file", - "open_file", - "current_file", - "work_root", - # lib "maintained_selection", "maintained_visibility", diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 669d31c804..97a2dd94f6 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -346,9 +346,6 @@ class PhotoshopRoute(WebSocketRoute): async def sceneinventory_route(self): self._tool_route("sceneinventory") - async def subsetmanager_route(self): - self._tool_route("subsetmanager") - async def experimental_tools_route(self): self._tool_route("experimental_tools") diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 9f6fc0983c..dbf7426e95 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -12,6 +12,13 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) + +from openpype.host import ( + HostBase, + IWorkfileHost, + ILoadHost, +) + from openpype.pipeline.load import any_outdated_containers from openpype.hosts.photoshop import PHOTOSHOP_HOST_DIR @@ -26,6 +33,147 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost): + name = "photoshop" + + def __init__(self): + super(PhotoshopHost, self).__init__() + + def install(self): + """Install Photoshop-specific functionality of avalon-core. + + This function is called automatically on calling `api.install(photoshop)`. + """ + log.info("Installing OpenPype Photoshop...") + pyblish.api.register_host("photoshop") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + log.info(PUBLISH_PATH) + + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) + + register_event_callback("application.launched", on_application_launch) + + def current_file(self): + try: + full_name = lib.stub().get_active_document_full_name() + if full_name and full_name != "null": + return os.path.normpath(full_name).replace("\\", "/") + except Exception: + pass + + return None + + def work_root(self, session): + return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + + def open_workfile(self, filepath): + lib.stub().open(filepath) + + return True + + def save_workfile(self, filepath=None): + _, ext = os.path.splitext(filepath) + lib.stub().saveAs(filepath, ext[1:], True) + + def get_current_workfile(self): + return self.current_file() + + def workfile_has_unsaved_changes(self): + if self.current_file(): + return not lib.stub().is_saved() + + return False + + def get_workfile_extensions(self): + return [".psd", ".psb"] + + def get_containers(self): + return ls() + + def get_context_data(self): + """Get stored values for context (validation enable/disable etc)""" + meta = _get_stub().get_layers_metadata() + for item in meta: + if item.get("id") == "publish_context": + item.pop("id") + return item + + return {} + + def update_context_data(self, data, changes): + """Store value needed for context""" + item = data + item["id"] = "publish_context" + _get_stub().imprint(item["id"], item) + + def get_context_title(self): + """Returns title for Creator window""" + + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + return "{}/{}/{}".format(project_name, asset_name, task_name) + + def list_instances(self): + """List all created instances to publish from current workfile. + + Pulls from File > File Info + + Returns: + (list) of dictionaries matching instances format + """ + stub = _get_stub() + + if not stub: + return [] + + instances = [] + layers_meta = stub.get_layers_metadata() + if layers_meta: + for instance in layers_meta: + if instance.get("id") == "pyblish.avalon.instance": + instances.append(instance) + + return instances + + def remove_instance(self, instance): + """Remove instance from current workfile metadata. + + Updates metadata of current file in File > File Info and removes + icon highlight on group layer. + + Args: + instance (dict): instance representation from subsetmanager model + """ + stub = _get_stub() + + if not stub: + return + + inst_id = instance.get("instance_id") or instance.get("uuid") # legacy + if not inst_id: + log.warning("No instance identifier for {}".format(instance)) + return + + stub.remove_instance(inst_id) + + if instance.get("members"): + item = stub.get_layer(instance["members"][0]) + if item: + stub.rename_layer(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) + + def uninstall(self): + pyblish.api.deregister_plugin_path(PUBLISH_PATH) + deregister_loader_plugin_path(LOAD_PATH) + deregister_creator_plugin_path(CREATE_PATH) + + def check_inventory(): if not any_outdated_containers(): return @@ -52,32 +200,6 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): instance[0].Visible = new_value -def install(): - """Install Photoshop-specific functionality of avalon-core. - - This function is called automatically on calling `api.install(photoshop)`. - """ - log.info("Installing OpenPype Photoshop...") - pyblish.api.register_host("photoshop") - - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - log.info(PUBLISH_PATH) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) - - register_event_callback("application.launched", on_application_launch) - - -def uninstall(): - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) - - def ls(): """Yields containers from active Photoshop document @@ -117,61 +239,6 @@ def ls(): yield data -def list_instances(): - """List all created instances to publish from current workfile. - - Pulls from File > File Info - - For SubsetManager - - Returns: - (list) of dictionaries matching instances format - """ - stub = _get_stub() - - if not stub: - return [] - - instances = [] - layers_meta = stub.get_layers_metadata() - if layers_meta: - for instance in layers_meta: - if instance.get("id") == "pyblish.avalon.instance": - instances.append(instance) - - return instances - - -def remove_instance(instance): - """Remove instance from current workfile metadata. - - Updates metadata of current file in File > File Info and removes - icon highlight on group layer. - - For SubsetManager - - Args: - instance (dict): instance representation from subsetmanager model - """ - stub = _get_stub() - - if not stub: - return - - inst_id = instance.get("instance_id") or instance.get("uuid") # legacy - if not inst_id: - log.warning("No instance identifier for {}".format(instance)) - return - - stub.remove_instance(inst_id) - - if instance.get("members"): - item = stub.get_layer(instance["members"][0]) - if item: - stub.rename_layer(item.id, - item.name.replace(stub.PUBLISH_ICON, '')) - - def _get_stub(): """Handle pulling stub from PS to run operations on host @@ -224,30 +291,3 @@ def containerise( stub.imprint(layer.id, data) return layer - - -def get_context_data(): - """Get stored values for context (validation enable/disable etc)""" - meta = _get_stub().get_layers_metadata() - for item in meta: - if item.get("id") == "publish_context": - item.pop("id") - return item - - return {} - - -def update_context_data(data, changes): - """Store value needed for context""" - item = data - item["id"] = "publish_context" - _get_stub().imprint(item["id"], item) - - -def get_context_title(): - """Returns title for Creator window""" - - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 2cfbfa8778..ad6c657d9c 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -9,6 +9,7 @@ from openpype.pipeline import ( ) from openpype.lib import prepare_template_data from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.hosts.photoshop.api import PhotoshopHost class ImageCreator(Creator): @@ -19,7 +20,7 @@ class ImageCreator(Creator): description = "Image creator" def collect_instances(self): - for instance_data in api.list_instances(): + for instance_data in PhotoshopHost().list_instances(): # legacy instances have family=='image' creator_id = (instance_data.get("creator_identifier") or instance_data.get("family")) @@ -121,7 +122,7 @@ class ImageCreator(Creator): def remove_instances(self, instances): for instance in instances: - api.remove_instance(instance) + PhotoshopHost().remove_instance(instance) self._remove_instance_from_context(instance) def get_default_variants(self): diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index e79d16d154..829aff3aea 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -5,6 +5,7 @@ from openpype.pipeline import ( CreatedInstance, legacy_io ) +from openpype.hosts.photoshop.api import PhotoshopHost class PSWorkfileCreator(AutoCreator): @@ -17,7 +18,7 @@ class PSWorkfileCreator(AutoCreator): return [] def collect_instances(self): - for instance_data in api.list_instances(): + for instance_data in PhotoshopHost().list_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: subset_name = instance_data["subset"] diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index b466ec8687..5bf12379b1 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -82,7 +82,7 @@ class CollectInstances(pyblish.api.ContextPlugin): if len(instance_names) != len(set(instance_names)): self.log.warning("Duplicate instances found. " + - "Remove unwanted via SubsetManager") + "Remove unwanted via Publisher") if len(instance_names) == 0 and self.flatten_subset_template: project_name = context.data["projectEntity"]["name"] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py index 78e84729ce..09859ca7c6 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py @@ -29,7 +29,7 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): for item, count in collections.Counter(subset_names).items() if count > 1] msg = ("Instance subset names {} are not unique. ".format(non_unique) + - "Remove duplicates via SubsetManager.") + "Remove duplicates via Publisher.") formatting_data = { "non_unique": ",".join(non_unique) } From e500cae3997f8b8d5dd716ac052c0f51d304f2a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 18:31:55 +0100 Subject: [PATCH 194/480] OP-3908 - modified host registration Use PhotoshopHost class instead to standardize. --- openpype/hosts/photoshop/api/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 221b4314e6..a16e2c43ca 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -20,9 +20,11 @@ def safe_excepthook(*args): def main(*subprocess_args): - from openpype.hosts.photoshop import api + from openpype.hosts.photoshop.api import PhotoshopHost + + host = PhotoshopHost() + install_host(host) - install_host(api) sys.excepthook = safe_excepthook # coloring in StdOutBroker From d9b33889b7ed8e09fbf9c384b35eb2f87336f007 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 31 Oct 2022 18:32:47 +0100 Subject: [PATCH 195/480] OP-3908 - remove SubsetManager menu item --- openpype/hosts/photoshop/api/extension.zxp | Bin 54088 -> 54077 bytes .../hosts/photoshop/api/extension/index.html | 1 - 2 files changed, 1 deletion(-) diff --git a/openpype/hosts/photoshop/api/extension.zxp b/openpype/hosts/photoshop/api/extension.zxp index bcbd694a00363233b5d0ee0985f3058d3de225af..f34d958ae19fae8c592ee565a82a025bf77b733b 100644 GIT binary patch delta 2862 zcmZuzc{mhY`<@wQvQF8?Izu7bAWO(rme+19W0&0sWhwjCB%-8{Wol5;*mq`>ZS0YR zEM*C=CEFNlg-{=Uzuxb=zU#Zrb*|?==Q+>0&mZTx?z=mmrYWB$Ii4m5;#`$*4tN|> zD}Witu|^NHOLqda7>h(sz`CqA_yk;4Qeip)cx_$=5TA4EYg8!LVzHCkQJtGEGuYMC zkcO5W007Vdj*_}2cTfF)vSz@9j^?;Bb;3ODlHRK7X#s!HHQgPFMnHSD(uVqu z_3Ah5NSS&>gmOGm;G7(sDtpZ{eVDl zmHzI=cZp{2l-OEu#NIga(p}qpC4VNVL|sy_fylgIzd-WL7Skx!CWY38vtTF9;pu2?4|lN)Zd<(r$z(Px6N?fQmSYJnnjZuu zO*h;BE>_>k%oLE*VXzr#M`n|8{S8^y91>;_ug+T2T46St81?kIWLe7mc!!!R80kwC zoaES@(4FQqb|10PY`m#I)RK(4+a3`Tm7b(|;d+E#hh>4sGY@)9C7S|vVj>*X-_WzI z5e!~MX+bCPKAX9!+hMaJ?m9eh#m||2N^HXy#FPuZfAedEUV3hL5ia+*-oASws~*Va z()BXyV^IJED+q|>b*NACpSr}|Ij9GJKU|g^qA&q5K{;UAUKMIaqx6`(@+L1gLX`{C zEXpQ7an2MPjVV_k2IW4N@jvSmHHN(1l?xt{6zG(6T zZU(`TEq?>Wx_;9Q5f&9jd3-D#EBw~9r}K-K!<4RqWL{1&@r+TEoxjkv~QB2OV-$S`%>E_q@& zx@?Hu%#h0v4CLVj#d5i)NFoH2-6~_j16<4OPe0Z+O!mklG+yuD?{+PEw6nL+#ordz zyPA*>cQ>PTy*Jk?Q|Vvnu*-Z{eMr9frkoPNTc8^!nXF@rW+B^95HXp$!570Vp6&pM z2tAHPG3Dv`A1%bvLxx+H+pW}a@H&X_C>7>;`5s5{b9^*ST@kN@zW z`R~S|JNocCb1osSsdJst*gyH>NP21?klPE3TQA$XOpwJ{f#?cX@r2?_4Bckh-=LSY zXnHi#oPMXwzc-SN@c!%`d`mdWj$&J4-7J5*BDK&zvzB3{YjbJ;Cn+f4or~G!FK>+l z4o=?(8ru3ONDYFmcD(seh-qcvhlg9j>DSZ0jA_s?V^jI>0kF~@$(72| zt)C8*-&iPLnBP<5T;Tla)H`QIH`zPRtiC?`OYBla)t+ldunVuM4jK6nCM37TptwzL z-a@)XBcnamw}(Q*l$D!uma^)t5M)6=YxUN{?ymuqDB*+A>c!>evKNaf6S`h26&`Vq zsRgp)FF@+*189AR{rdM$o_eiZc8bCpMUKtF z7|RMR$fZYcm=Q$O+V7OLUVu>gR{Op`4aR)GlJ{zr{n~>oL88ow_*2LaqB@f@Zy%GG zwfU}cTIqwdhUgRwMhAUzUdhmpHA@O;=rheo4gz7#CG;?0z3n~??{bfjEHkBV~w7; zB9mv7T<%-}|NA)2XO-n@A}CnKd_(OQUOX=s-2`v9I$<_Xq7U5F-kuleE+V1lh4m&~ zyQKn&D8FVm`(v+00(ihpc3A&QkXDv{))n{S)IZ;-0_yGN8A|8#C; z$CYPwOG`F3Ep=aw6}pUSwoO&eVkMwuA35gs8xDVJb3Y33VmNp2dILU5ce7}vR>fGO zmrTrAxxkHO|9dzGo;}(_p%S!nq>av(Iip4-NP;9r91#h+Dg%h%gEny6iKw)_X}sj+ zO@IAZ z+MPbHHJgHau}}OJx#o-Yz{+8Y;v<|M$NI!;OJ$o)8sTYC2q$OoxQ_?j*V12%z8;d$ zbo9M)&PaOIdkR)-z}7Vos}vjjxz)&Roe>Na)e=pHWS0t?pr??Rp{L)|K^qv>w+Xwf z;mrEGpn}cBniRGo%8y`|Fflml>-wY4sG-LNvG)jud}20h!o z_D=8`&B*Xq2KkY?=@{9U%lEr1RfY)oef!mlp3n>$P`ZNX%-g(bd>>}&3&G6)V!3I4 zm<+z`!9wY-tv*)U{3)qMXA2v&eH*YFxk0+2xxnYH1}ss-RlPLF3n?vq5*O%OedY=i z{Y!1uGh18rN6 zZNBh7lnxC#&m^u7nH1+5ajDCinQcqdnyYVP)bt%-qZo0jmlzXg1@CT|f4;fR(RK4& zooie22qxW#Ku>rCsO0SAUO$?}we*-}}1tC49TN z-S_p9kf_nb7f!v_}>N& zF(|=$Y~cQ{;2%A-ALP@&&esgKf;Mr+do2uJLe7GS`sW9I*4_(vll0D#aL z0Du#~a`I3hK;S5Lq_zx-u>QN|ACkR6KDoEO0DzB|4<<0oAM-yrz(+R%0BlF201N=! I`Eb1Y7Yc$Y6aWAK delta 2851 zcmZWrc{CJi8y{oMZZIKRWeHJYY$M5%kR>~1U$TwtOPC2U$TAw)xy_Za6GI{-W6R#i zG7PdumfT@XQhjmnckeyl_s4sl-+79L=#s*V1Gi zTii_`mFWa%vy@AofSoJe^e5o13Y7H(_2LPrM+AdUfK*nu_({m_>!Op3C#}x( z?1grYmKEWVF&#hvfP)=P(|I3}^sk0n+4J|uE_3$IXHZ4__&p07B!m{@<+cWy(b1g95u{>d`>A#OoCMw6bl43f6^Z8(u6X5?#5xSx1Yo$NFAd)clIZXEWA=9Jc0jFXZBO>@~GIQArnG!+d|X%ZXLZL=Bln z8Sxsjdg>w=jeTzs4%DxlHB^3bW$`8mYcGATd1&e4=-je(viC~iYY`H;erY(zU(!-w zi&-_;r>R>jVwK!w(pR9!;8&wu7$9BdY!+<%d3`=yD6kdft>z&Z(u5jP4WX6jOw(2^ zHeX}?aRWeAY6+HFXbrlOT#})dm2~Iqj_*shqJkEL#EskWz(N!s!QeH*hFz^J)1n$* z|JdIGD;V@%YZ|hhBaKI(at&SPF?g@LUmWOa=!^*u6@)~L7eLf=0pfMo3#cSj=JVql z{szj;Bc@*@xu9%{dHh^rYRk>rnf|&E*A9}t;k(W3Y)IGv<$JpyH z0P8w;Kl0hAOvxQ3z=`mAmAjMsZK-4D@!*@qRpm^MRUzr`hD<^skrp`zfCe~9dKeM{ z@7s#=QuUVUH`;y##8RF$7Krg~R;5n{)6r59%uAqcMRgc{$tyib0_HcL2$xObwA>!yIeILrzsJl&nuZh zzOJ*vuBxpq!+0I>U2iZu*NyU_=H2G9!C}J$&Z-vfb>vVQunvLoPM9bc?^I4sz#Jde z!&?pWR= z$TywVjbVm`|WeF7I`(KIZDBg;5oidHSu56m#fm zYURYa+>e^0S{hQPN{U1?q62LeUTG*C_3re|JRR}B78n6*t9>MLC$#X#%wcmH>8ieq zvFyDqeYXd}3fmG5+JYVuFFClnLA<4sw?p3#l0NuI+D#)&E+xeTJ*)?X`3!~d$lkvZ z3UO}mzPNF5)rHy#9XjV)fy+WG19CNICho-XEC(mqoZ+an+MULK!TXt)VU#=+!W9;& z-(A+S#FprJEa_CFfoecxG}F~rOk44rxCf;u%ou0-rHmgbPKlw3*6bs-nEU#fi>*D8 z(H{5;{bIy|h2jEQI(d-VS-;ogW{W0cjn}^VR!*I^b6q^jSuWQMxuBiDAG^wD1^Ju? z2`g~T1J3J{hCI=`$p&k{)ar;VVFvjrGC%A}-RfmU3!<#{VQ0&%(F5P?U74qt+Mx9s z^pW@{zZ=`hgA_waOMaIDqDyJuu5F{2S=(~XDxw2pvSpdS6TGeGc&~l;fr1NYe2(vq12kH!lI|ESDg>WXT!ns9I0D}lxUDuq{97DLs+(iR@n+r zg76vd$vcQF7w%6;V_{2{$k+#E8Lczy=g6I1%W%;2CB37u+PYoNVxj5%xcc`PwkLVu zHZ#gh@*`}NLafhO;#Q+BHV@7*NSL9>@G~YW;p6$CA}^7pUf-0VP#tjo=2wsB89tX@ z0mPNr1O=KQJ2^Tzak*RRlgPVa$v3Z^LolaLlyx$^9kv&e!VxJ(`fg4u7P$P|fmJO- z#@1Y)fv{&VVpITUN0-}6hPn*ve0d#B(Ujh5C!+?1#`F6{CU@8|SLL7E3$niEC+bP| zP=8n5u&Yb1w=<`1D~`3Ik+mk-W%yFLXr|KHr6uz9pN&|@Zw@}f5kcU+Xz<|>f;?84 z(*86yOyJNww^r@WbKNOcQ1rpxsrm!sJ<6wBm+tg8#fZA*Q{nh=Yky1931-CY(SW4{q_CRCr#2ALjifBs#VmV~uv~d;K z663MQ=oyV-PttRM3)df#o1q6?h?NaOB1tnX-U!(xj5eRO5J1kHTfeT`luzRM*zkkJ zFI1fUskv^E2>g_Kyi(}GSnOE5m<^UeMfxeU#M@?6?*R)?$d{a@ZVUz=si;4c=L_Xk z=kfX~zNhv?&Y<9J(A@R3=gunm&0B(1bVX2;A}@iu;n*kdrTo5~-m|wCLL6OsI~R{1 z4@fpiq+NT$IER4@pk!LgjZbGMNa4F!?gjgI8_XN>oK!3R;~pq^N~J>lJO4|F!xMW{(aj)5I(-}5qmC(2{)3>Q2ldNyc3B2%yMGWyj~ z@gTwbn|R+;Qn#;zOsd9NrD_2iQ4x)G?|?;Q`{jJ+@~9#Y!Xuz08A01u&M_dszYPXt z9mIhd6O~6aPa8^UMz+){~H*y7ZEcZ+eX@J6jzg6J=hz2kRgM*BY z()_QO<0i;}j>X)c8GrMkDM64%9StM!^B=eQce#J~@HYZ4?Pxauz0fTnLuHu2 v^Z>?x68^5XO%POgKL`N8ykPFZ5&rJxI{z0>e=-_3fE!Rs4*)zKIv(^tY4BPz diff --git a/openpype/hosts/photoshop/api/extension/index.html b/openpype/hosts/photoshop/api/extension/index.html index 80018afb78..0b6cc2e182 100644 --- a/openpype/hosts/photoshop/api/extension/index.html +++ b/openpype/hosts/photoshop/api/extension/index.html @@ -101,7 +101,6 @@ - From 54828e5d7d1d8ab35b6393e5e9ffca74060aa085 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 1 Nov 2022 11:44:38 +0100 Subject: [PATCH 196/480] OP-3908 - fix renaming, keep layer name If subset was created with layer name, keep it when renaming. --- .../hosts/photoshop/plugins/create/create_image.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index ad6c657d9c..67f08a311b 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -98,6 +98,7 @@ class ImageCreator(Creator): data.update({"subset": subset_name}) data.update({"members": [str(group.id)]}) + data.update({"layer_name": layer_name}) data.update({"long_name": "_".join(layer_names_in_hierarchy)}) new_instance = CreatedInstance(self.family, subset_name, data, @@ -164,6 +165,12 @@ class ImageCreator(Creator): def _clean_highlights(self, stub, item): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') - @classmethod - def get_dynamic_data(cls, *args, **kwargs): - return {"layer": "{layer}"} + + def get_dynamic_data(self, variant, task_name, asset_doc, + project_name, host_name, instance): + if not instance: + return {"layer": "{layer}"} + else: + layer_name = instance.get("layer_name") + if layer_name: + return {"layer": layer_name} From 352d5c80b47b57b75625969d6722649a0990f4da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 1 Nov 2022 12:13:41 +0100 Subject: [PATCH 197/480] OP-3908 - fix - pass None as instance argument get_dynamic_data requires instance as positional argument --- openpype/hosts/photoshop/plugins/create/workfile_creator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index 829aff3aea..5ad9fbaa88 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -55,7 +55,7 @@ class PSWorkfileCreator(AutoCreator): } data.update(self.get_dynamic_data( self.default_variant, task_name, asset_doc, - project_name, host_name + project_name, host_name, None )) new_instance = CreatedInstance( From 0af6d3f205461a99fc7f6e9fb144e03efd61a9b6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 1 Nov 2022 12:18:48 +0100 Subject: [PATCH 198/480] OP-3908 - remove workio file Ingested into PhotoshopHost --- openpype/hosts/photoshop/api/workio.py | 49 -------------------------- 1 file changed, 49 deletions(-) delete mode 100644 openpype/hosts/photoshop/api/workio.py diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py deleted file mode 100644 index 35b44d6070..0000000000 --- a/openpype/hosts/photoshop/api/workio.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Host API required Work Files tool""" -import os - -from . import lib - - -def _active_document(): - document_name = lib.stub().get_active_document_name() - if not document_name: - return None - - return document_name - - -def file_extensions(): - return [".psd", ".psb"] - - -def has_unsaved_changes(): - if _active_document(): - return not lib.stub().is_saved() - - return False - - -def save_file(filepath): - _, ext = os.path.splitext(filepath) - lib.stub().saveAs(filepath, ext[1:], True) - - -def open_file(filepath): - lib.stub().open(filepath) - - return True - - -def current_file(): - try: - full_name = lib.stub().get_active_document_full_name() - if full_name and full_name != "null": - return os.path.normpath(full_name).replace("\\", "/") - except Exception: - pass - - return None - - -def work_root(session): - return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") From c2aa33cf2bf9eb75c69f4c6fb1aa1bf3777d8493 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 1 Nov 2022 12:20:59 +0100 Subject: [PATCH 199/480] OP-3908 - remove subset manager menu item Obsolete, replaced by Publisher --- openpype/hosts/photoshop/api/extension.zxp | Bin 54077 -> 54041 bytes .../hosts/photoshop/api/extension/index.html | 12 ------------ 2 files changed, 12 deletions(-) diff --git a/openpype/hosts/photoshop/api/extension.zxp b/openpype/hosts/photoshop/api/extension.zxp index f34d958ae19fae8c592ee565a82a025bf77b733b..c2fba57495f90fbd234ca29be8c5e67e09171a79 100644 GIT binary patch delta 2752 zcmZ8jc{mhm7axo*LuHM*#&P}pl&xVf$VA!3Zj7xAO%Y-wOJXcp zQ=*K>KCZE3jcRPg7x#Y8_dWN0pZ9rw=XcI|&-=%DpWkUJX3H&RON(P;08mwUDHgU_ z0E$q_l67ELo3L(i9Rfq1(o=_E?d(nVL*S^X13CnGu-qeuN$M8ahr|a9F1ABr43T+A zpsg+_9D*0PAmKxBI;&ImFl4=vbVy`SQS5wo+)y@m(1{ZntN_4K5P_x5A2$g=x&MBM zyD=}j+e+4yKTYLW_f-0pgrBHaevP*HUolE6?>@6}c?&%Np1+b3Xgb~y{CvWtY^P=B zq<2;gi!a&0Aa9q>=-b=+*;L<5XUwep_?7yR_erpPY^4jkO{yCDLcQ>bMt-hf;&f|u zeT^=(>r8%HKlLu$=ot^a3Ri4(|3@F^b9(xf-GGs$rlzo(;x~jrNRDBV^x&O^>+kX~ zW|+3wW}Ekx-3E#~X&Z6|$Zoj0X-!^F$-7KAU+B)>1k)~gWEs+yTWi+V0P*|+`WZ1b ztGk2%6{vvGMy^gAQ;pF{myT!mFsrJ5U2!t8f4QCa2pH>4LaOBP;;4h8{{kW`X<8wR^G2G9<*sxibjH8Sf5t3F)J@safuSv$OuGHDf^+%AlRP$+Ftxn;z zgc})1ipM;aw3k2p_$g1(DOC}#h8UkoE$&xI+7N+wt`-+n#nA&hT; z2AxmlnM`aYwvH$Wjz8(Pss}ckIGXyLFk_%o2IQNxNds3_6X`^HeJ}i5DJ=#+)8q71 z*S@G@f9s<7*7ehZKFZ%6M`M|jq%G&$$^uhg=7Lw1lltrPP?kbA)$62lnY*5T!5wgG zJRqbQMACb?oAd^8y?lRrb=SWQ+qa6Rh%?}Vh~#y;V5r*X%1g2OBe*dwF3G5tz@4Da ztls0bLeq)WD!M52H%O^x>XB~(0L8dE^7vpxK+A=&IK54Wh>xoR%4K^laC0``5x~KU zc0&;^bcRbDH0~%m2XljVEhnln#XaLZRU5Z|NSHSF$^>3ACTR5#zf>%iE-q$wO+Ax)v)pjH zY4Nu9Hl?M`Z%5GMQ@gMIa3}81lBI@SzNY?nfV_4U&}&V#s{!4eQ-1!Mo71*c`5k;X zs>G7NO9fUbdwbg|@kRCd#6Q5WdrrpKlVO;gMhzg>#@Vv=?;(All^zLJZxBX9n;Ljb zp)q_h%F^RDPm|%EKzK`AHt~*KjQWO~*|&(w?`OLkRVgu>rHWTM%?oz z+1gU7>W!y1OONYE1pXd96gfQH!8eAI%_5Tfc{p50{d6>|PM#LWM1&@MzX5cwai>y4 z?(j}Hv}*beGnb&VL<}SKDUQs97^O(xU^6<-NhDj3I_KrDz>{4;_hB48vX`ZV*IYB; z&v^9-nI*R3MG$W*8)Wg=M_P}hCotW_o^=E&sLCgW?eDm6#*tyl|H=f(XhUtjzdb44 zo%7KsBa~zwG|@K_?We8vC2@;-PCC6qVU(_&&PO5ZD|ATe%RnV?yB*GIIaiBu-!Y5e zN*cE)zCW@w>T}Z;Z5hcQwFyKxoupVJr$3v_a7&FsDbGGHP zg?qtvAD}Gs7uGmP4F)%9Lrc&HmSzUZ3vcSkIP;(LAvo3B@1paht5ZVKH$ZFJc0e9) zcW!=7z4Wr<4FPp;jVK#5)8-rPGsw{n^5S>B_dd^tv4UI!N3PoqhK)mF|4f=%UOs^= zL=1m{IV}5z01U|TUKE+p25G}!UfWpcm`H+8adSiz5XV7rjY3b%n6}Vw`u@PgcIs)o z`xM0Q{B|`U)5rvjW-}2c`WzANwHZJ>XRDO()B-vNtKYKsRdQONF<8wlc_i63cmA*Y z(YH13%(!uI5r#yNW|)fv>vOg~rsH&`VMHo8uG|QM&L_(T1a^*|_@K z$ii1>t75my5qF1JKIVij%+BjlbPF1FYJ;Twa68wfySckuCNc1W+wD5A@INqM2MOyQw2 z?zL{%%%|H4eO1xAibuL+WUfq`lal@QQ5E2|0_)2D@$) z#rSGBF38Y;;?)XNaf(2y`t;GeaabEeoaJ2Dubh>=Z>8=|=U%Et!*?PRFed$eqrSjp z@fDq@<$gg$^~ZfzM{`&MiI4yv^q zUq~Iq*yB2H`<%FDJA@$cWV;rOEyWjW#ZZQoY{|2>BZ44e@-n8V=aEUXZ~|D?`x z0L;Qi@Es5pJ;?ss`){c$7!YD3cLE60onjCm;K}c*HrDI_&I9H9_jPLnU?{m40Pywp gy&i-OxNZgezl6Ulupc}dAn1a+9Q)Vfri8~lo z)G4UK1;9^%NLr`h>5=XF7pIA54bEUFO^Tz1;(}rY2?PMNGA9Gud~s3lfP8*vmCdgvbjuGHJ#`X<)wl@wJ24nLSgUHQ`{PtlJ_B2I@K zq%X9{Kfsqbx5G4!vreLQCT|1#Y&_=GGWWuKC6OgkRl@ShOV^6dS@4Ff=C~U@ihVv_ z)5BFHh2K@{Krxw)N=2dh1!Y)*3Kxe#3A4?1zlzlMGBWsNv>B|&+EH0e_<{P&8}_kt z$k!LFXf3f@jf}c_oYE|%zC0t%<&5;j@{TfWj+jm}8rvb<0~k;8chv;L?zVVhoZ_<- zPkfKSo6t<~WX4gCiFl*`USzm~+FN?q22uZYgeG*F;JuxrvKu-t>+i(X?^n#I5#1`}`R$kpq7f)`;>FrQ<*xHN{|RaAtyaRz#ogQ+3@$dboe7 z8G%DCUPUeTcId7ir=1OJ>@VGUip#C&h9VMo!TI1(>ZEPr)aZlK5jIl;P6IHIn+Fud z>6Rpph!lxWD<6NPceTpVJ4A-Yng@N|eT1g6Im?3B;l+4Be($U!hktX?oOC9DgM(_8Llu zdwp^Xx+552OR*`oYL>lMo?PIUQNys-wY_rqgB%ySr=b**CL4jc71%BNN$g{_v6>@wJMQogem;UpVObP~Yc9 zIl#rU$@eb^Z?korTYq!mr^uD?$^(~fwCaSga>ChobCBRcXp;?*{JFg@FJ+`)nmwxB4Br8hTYl8kEk{u9 zD9I9o45v4~$9VF7j2kS~Az;I7lmVWY@zP-I}{MmSbzehxj7-IOS# z+J3*Z^)iIgzuy1tc@Xy7wcOY1Y&RZX3lwIKBb-5f5Z0cSdiRvftofJHV?;uY$Q!e3 zmjJigrJOiBdKmGb$fxERklZ#!*;V?CAcaP!`V>1c*^n2Eb?BY63Z8`TH>FikRIOL^ zG2{-k^Xp<4pxLU7AsN^E%D7e-{>gJap{YM)Ay6g9^~;3h2+eAzuPjqhnM*9<%nMH} zyhXy#<89B_EXEl7^Q`ozP=qWPUOuo`eEjmjZ;Jht_l13Msd}ZMfp;)dU@}wZAa`!qWwm=ACvoZd;5s`FLIvap~FpNs6U*f$sO_mH!@Nj*OdBaF!v8k`pZCh8pwmL>b5R%Tm3+q0K6=acX+4@p1o zZusEbaWdG#m~h9ZQQk3H5?Q9xh6GwP($NV#>FrMUrQ|20kGnWD74xu+<1W3*LwU;$ zVCx2mRRV^1VL5hBdkhOjw>)TqWR(aSV`flSp=W#Pp!E!!yTpCgFlN1dQ2usYbrP(Q z@;%6TLPtiTJWFiAB6Xa2PyHqv

;MWu=4LG^hw}^$X0L!_0SYyyw3`GdB8#L3XTm zHd6Z4>Z2|TbAD>KOpl_HcrUMktQ(Zgw( zJttRd@8AM=?*aDjZjoru5G?!4HvkNtQ)*Bg*(Me?AeRHZlj zY}CY-p4AhK9Y8(>f2q!Vd$B|gD*Ru!*-inuAcf8-G2F+1^L9=*$*0D#dS=IY{lXl+T17A=7ITiU`L z3^;S5`tOSWF+d0aAaD)<-~h0kz5)o~`yD%ylZJ(0|J3|*OYbm`%)34Sz}wRs8xZP; X{Xdt0e@g-Yu-`Ku3;^8!aI*Us%-kC* diff --git a/openpype/hosts/photoshop/api/extension/index.html b/openpype/hosts/photoshop/api/extension/index.html index 0b6cc2e182..0c0db3a223 100644 --- a/openpype/hosts/photoshop/api/extension/index.html +++ b/openpype/hosts/photoshop/api/extension/index.html @@ -64,17 +64,6 @@ }); }); - - - -