diff --git a/.all-contributorsrc b/.all-contributorsrc
new file mode 100644
index 0000000000..a3b85cae68
--- /dev/null
+++ b/.all-contributorsrc
@@ -0,0 +1,315 @@
+{
+ "projectName": "OpenPype",
+ "projectOwner": "pypeclub",
+ "repoType": "github",
+ "repoHost": "https://github.com",
+ "files": [
+ "README.md"
+ ],
+ "imageSize": 100,
+ "commit": true,
+ "commitConvention": "none",
+ "contributors": [
+ {
+ "login": "mkolar",
+ "name": "Milan Kolar",
+ "avatar_url": "https://avatars.githubusercontent.com/u/3333008?v=4",
+ "profile": "http://pype.club/",
+ "contributions": [
+ "code",
+ "doc",
+ "infra",
+ "business",
+ "content",
+ "fundingFinding",
+ "maintenance",
+ "projectManagement",
+ "review",
+ "mentoring",
+ "question"
+ ]
+ },
+ {
+ "login": "jakubjezek001",
+ "name": "Jakub Ježek",
+ "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4",
+ "profile": "https://www.linkedin.com/in/jakubjezek79",
+ "contributions": [
+ "code",
+ "doc",
+ "infra",
+ "content",
+ "review",
+ "maintenance",
+ "mentoring",
+ "projectManagement",
+ "question"
+ ]
+ },
+ {
+ "login": "antirotor",
+ "name": "Ondřej Samohel",
+ "avatar_url": "https://avatars.githubusercontent.com/u/33513211?v=4",
+ "profile": "https://github.com/antirotor",
+ "contributions": [
+ "code",
+ "doc",
+ "infra",
+ "content",
+ "review",
+ "maintenance",
+ "mentoring",
+ "projectManagement",
+ "question"
+ ]
+ },
+ {
+ "login": "iLLiCiTiT",
+ "name": "Jakub Trllo",
+ "avatar_url": "https://avatars.githubusercontent.com/u/43494761?v=4",
+ "profile": "https://github.com/iLLiCiTiT",
+ "contributions": [
+ "code",
+ "doc",
+ "infra",
+ "review",
+ "maintenance",
+ "question"
+ ]
+ },
+ {
+ "login": "kalisp",
+ "name": "Petr Kalis",
+ "avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4",
+ "profile": "https://github.com/kalisp",
+ "contributions": [
+ "code",
+ "doc",
+ "infra",
+ "review",
+ "maintenance",
+ "question"
+ ]
+ },
+ {
+ "login": "64qam",
+ "name": "64qam",
+ "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4",
+ "profile": "https://github.com/64qam",
+ "contributions": [
+ "code",
+ "review",
+ "doc",
+ "infra",
+ "projectManagement",
+ "maintenance",
+ "content",
+ "userTesting"
+ ]
+ },
+ {
+ "login": "BigRoy",
+ "name": "Roy Nieterau",
+ "avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4",
+ "profile": "http://www.colorbleed.nl/",
+ "contributions": [
+ "code",
+ "doc",
+ "review",
+ "mentoring",
+ "question"
+ ]
+ },
+ {
+ "login": "tokejepsen",
+ "name": "Toke Jepsen",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4",
+ "profile": "https://github.com/tokejepsen",
+ "contributions": [
+ "code",
+ "doc",
+ "review",
+ "mentoring",
+ "question"
+ ]
+ },
+ {
+ "login": "jrsndl",
+ "name": "Jiri Sindelar",
+ "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4",
+ "profile": "https://github.com/jrsndl",
+ "contributions": [
+ "code",
+ "review",
+ "doc",
+ "content",
+ "tutorial",
+ "userTesting"
+ ]
+ },
+ {
+ "login": "simonebarbieri",
+ "name": "Simone Barbieri",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4",
+ "profile": "https://barbierisimone.com/",
+ "contributions": [
+ "code",
+ "doc"
+ ]
+ },
+ {
+ "login": "karimmozilla",
+ "name": "karimmozilla",
+ "avatar_url": "https://avatars.githubusercontent.com/u/82811760?v=4",
+ "profile": "http://karimmozilla.xyz/",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "Allan-I",
+ "name": "Allan I. A.",
+ "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4",
+ "profile": "https://github.com/Allan-I",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "m-u-r-p-h-y",
+ "name": "murphy",
+ "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4",
+ "profile": "https://www.linkedin.com/in/mmuurrpphhyy/",
+ "contributions": [
+ "code",
+ "review",
+ "userTesting",
+ "doc",
+ "projectManagement"
+ ]
+ },
+ {
+ "login": "aardschok",
+ "name": "Wijnand Koreman",
+ "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4",
+ "profile": "https://github.com/aardschok",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "zhoub",
+ "name": "Bo Zhou",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1798206?v=4",
+ "profile": "http://jedimaster.cnblogs.com/",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "ClementHector",
+ "name": "Clément Hector",
+ "avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4",
+ "profile": "https://www.linkedin.com/in/clementhector/",
+ "contributions": [
+ "code",
+ "review"
+ ]
+ },
+ {
+ "login": "davidlatwe",
+ "name": "David Lai",
+ "avatar_url": "https://avatars.githubusercontent.com/u/3357009?v=4",
+ "profile": "https://twitter.com/davidlatwe",
+ "contributions": [
+ "code",
+ "review"
+ ]
+ },
+ {
+ "login": "2-REC",
+ "name": "Derek ",
+ "avatar_url": "https://avatars.githubusercontent.com/u/42170307?v=4",
+ "profile": "https://github.com/2-REC",
+ "contributions": [
+ "code",
+ "doc"
+ ]
+ },
+ {
+ "login": "gabormarinov",
+ "name": "Gábor Marinov",
+ "avatar_url": "https://avatars.githubusercontent.com/u/8620515?v=4",
+ "profile": "https://github.com/gabormarinov",
+ "contributions": [
+ "code",
+ "doc"
+ ]
+ },
+ {
+ "login": "icyvapor",
+ "name": "icyvapor",
+ "avatar_url": "https://avatars.githubusercontent.com/u/1195278?v=4",
+ "profile": "https://github.com/icyvapor",
+ "contributions": [
+ "code",
+ "doc"
+ ]
+ },
+ {
+ "login": "jlorrain",
+ "name": "Jérôme LORRAIN",
+ "avatar_url": "https://avatars.githubusercontent.com/u/7955673?v=4",
+ "profile": "https://github.com/jlorrain",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "dmo-j-cube",
+ "name": "David Morris-Oliveros",
+ "avatar_url": "https://avatars.githubusercontent.com/u/89823400?v=4",
+ "profile": "https://github.com/dmo-j-cube",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "BenoitConnan",
+ "name": "BenoitConnan",
+ "avatar_url": "https://avatars.githubusercontent.com/u/82808268?v=4",
+ "profile": "https://github.com/BenoitConnan",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "Malthaldar",
+ "name": "Malthaldar",
+ "avatar_url": "https://avatars.githubusercontent.com/u/33671694?v=4",
+ "profile": "https://github.com/Malthaldar",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "svenneve",
+ "name": "Sven Neve",
+ "avatar_url": "https://avatars.githubusercontent.com/u/2472863?v=4",
+ "profile": "http://www.svenneve.com/",
+ "contributions": [
+ "code"
+ ]
+ },
+ {
+ "login": "zafrs",
+ "name": "zafrs",
+ "avatar_url": "https://avatars.githubusercontent.com/u/26890002?v=4",
+ "profile": "https://github.com/zafrs",
+ "contributions": [
+ "code"
+ ]
+ }
+ ],
+ "contributorsPerLine": 7
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index fa3fae1ad2..28cfb4b1e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -70,6 +70,8 @@ coverage.xml
##################
node_modules
package-lock.json
+package.json
+yarn.lock
openpype/premiere/ppro/js/debug.log
diff --git a/.gitmodules b/.gitmodules
index 9920ceaad6..e69de29bb2 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +0,0 @@
-[submodule "repos/avalon-core"]
- path = repos/avalon-core
- url = https://github.com/pypeclub/avalon-core.git
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a48e9ee806..e2ff9f919c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,25 +1,39 @@
# Changelog
-## [3.10.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
+## [3.10.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...HEAD)
### 📖 Documentation
+- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094)
- Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052)
**🚀 Enhancements**
+- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080)
- Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055)
+- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983)
**🐛 Bug fixes**
+- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091)
+- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089)
+- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083)
+- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077)
+- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076)
+- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068)
- Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060)
**🔀 Refactored code**
- General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009)
+**Merged pull requests:**
+
+- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078)
+- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065)
+
## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4)
@@ -53,6 +67,7 @@
- LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042)
- SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041)
- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040)
+- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032)
- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018)
**Merged pull requests:**
@@ -72,7 +87,6 @@
- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027)
- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988)
-- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978)
**🚀 Enhancements**
@@ -80,14 +94,11 @@
- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016)
- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015)
- General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011)
-- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005)
- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995)
-- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937)
**🐛 Bug fixes**
- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033)
-- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032)
- General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029)
- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028)
- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024)
@@ -99,10 +110,6 @@
- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002)
- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996)
-**🔀 Refactored code**
-
-- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935)
-
**Merged pull requests:**
- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030)
@@ -120,19 +127,17 @@
**🆕 New features**
- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992)
+- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978)
**🚀 Enhancements**
+- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005)
- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001)
- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000)
- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985)
-- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983)
- General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980)
- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975)
- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967)
-- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945)
-- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943)
-- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942)
**🐛 Bug fixes**
@@ -148,14 +153,6 @@
- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958)
- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956)
- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950)
-- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949)
-- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948)
-- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947)
-- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944)
-- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941)
-- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939)
-- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936)
-- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934)
**Merged pull requests:**
diff --git a/README.md b/README.md
index 0e450fc48d..b6966adbc4 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,7 @@
+
+[](#contributors-)
+
OpenPype
====
@@ -283,3 +286,54 @@ Running tests
To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
**Note that it needs existing virtual environment.**
+
+## Contributors ✨
+
+Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
+
+
+
+
+
+
+
+
+
+
+
+This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
\ No newline at end of file
diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py
index ad49f868d5..08333885c0 100644
--- a/igniter/bootstrap_repos.py
+++ b/igniter/bootstrap_repos.py
@@ -627,8 +627,6 @@ class BootstrapRepos:
Attributes:
data_dir (Path): local OpenPype installation directory.
- live_repo_dir (Path): path to repos directory if running live,
- otherwise `None`.
registry (OpenPypeSettingsRegistry): OpenPype registry object.
zip_filter (list): List of files to exclude from zip
openpype_filter (list): list of top level directories to
@@ -654,7 +652,7 @@ class BootstrapRepos:
self.registry = OpenPypeSettingsRegistry()
self.zip_filter = [".pyc", "__pycache__"]
self.openpype_filter = [
- "openpype", "repos", "schema", "LICENSE"
+ "openpype", "schema", "LICENSE"
]
self._message = message
@@ -667,11 +665,6 @@ class BootstrapRepos:
progress_callback = empty_progress
self._progress_callback = progress_callback
- if getattr(sys, "frozen", False):
- self.live_repo_dir = Path(sys.executable).parent / "repos"
- else:
- self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos")
-
@staticmethod
def get_version_path_from_list(
version: str, version_list: list) -> Union[Path, None]:
@@ -736,11 +729,12 @@ class BootstrapRepos:
# if repo dir is not set, we detect local "live" OpenPype repository
# version and use it as a source. Otherwise repo_dir is user
# entered location.
- if not repo_dir:
- version = OpenPypeVersion.get_installed_version_str()
- repo_dir = self.live_repo_dir
- else:
+ if repo_dir:
version = self.get_version(repo_dir)
+ else:
+ installed_version = OpenPypeVersion.get_installed_version()
+ version = str(installed_version)
+ repo_dir = installed_version.path
if not version:
self._print("OpenPype not found.", LOG_ERROR)
@@ -756,7 +750,7 @@ class BootstrapRepos:
Path(temp_dir) / f"openpype-v{version}.zip"
self._print(f"creating zip: {temp_zip}")
- self._create_openpype_zip(temp_zip, repo_dir.parent)
+ self._create_openpype_zip(temp_zip, repo_dir)
if not os.path.exists(temp_zip):
self._print("make archive failed.", LOG_ERROR)
return None
@@ -1057,27 +1051,11 @@ class BootstrapRepos:
if not archive.is_file() and not archive.exists():
raise ValueError("Archive is not file.")
- with ZipFile(archive, "r") as zip_file:
- name_list = zip_file.namelist()
-
- roots = []
- paths = []
- for item in name_list:
- if not item.startswith("repos/"):
- continue
-
- root = item.split("/")[1]
-
- if root not in roots:
- roots.append(root)
- paths.append(
- f"{archive}{os.path.sep}repos{os.path.sep}{root}")
- sys.path.insert(0, paths[-1])
-
- sys.path.insert(0, f"{archive}")
+ archive_path = str(archive)
+ sys.path.insert(0, archive_path)
pythonpath = os.getenv("PYTHONPATH", "")
python_paths = pythonpath.split(os.pathsep)
- python_paths += paths
+ python_paths.insert(0, archive_path)
os.environ["PYTHONPATH"] = os.pathsep.join(python_paths)
@@ -1094,24 +1072,8 @@ class BootstrapRepos:
directory (Path): path to directory.
"""
+
sys.path.insert(0, directory.as_posix())
- directory /= "repos"
- if not directory.exists() and not directory.is_dir():
- raise ValueError("directory is invalid")
-
- roots = []
- for item in directory.iterdir():
- if item.is_dir():
- root = item.as_posix()
- if root not in roots:
- roots.append(root)
- sys.path.insert(0, root)
-
- pythonpath = os.getenv("PYTHONPATH", "")
- paths = pythonpath.split(os.pathsep)
- paths += roots
-
- os.environ["PYTHONPATH"] = os.pathsep.join(paths)
@staticmethod
def find_openpype_version(version, staging):
@@ -1437,6 +1399,7 @@ class BootstrapRepos:
# create destination parent directories even if they don't exist.
destination.mkdir(parents=True)
+ remove_source_file = False
# version is directory
if openpype_version.path.is_dir():
# create zip inside temporary directory.
@@ -1470,6 +1433,8 @@ class BootstrapRepos:
self._progress_callback(35)
openpype_version.path = self._copy_zip(
openpype_version.path, destination)
+ # Mark zip to be deleted when done
+ remove_source_file = True
# extract zip there
self._print("extracting zip to destination ...")
@@ -1478,6 +1443,10 @@ class BootstrapRepos:
zip_ref.extractall(destination)
self._progress_callback(100)
+ # Remove zip file copied to local app data
+ if remove_source_file:
+ os.remove(openpype_version.path)
+
return destination
def _copy_zip(self, source: Path, destination: Path) -> Path:
diff --git a/openpype/cli.py b/openpype/cli.py
index cbeb7fef9b..2aa4a46929 100644
--- a/openpype/cli.py
+++ b/openpype/cli.py
@@ -20,6 +20,10 @@ from .pype_commands import PypeCommands
"to list staging versions."))
@click.option("--validate-version", expose_value=False,
help="validate given version integrity")
+@click.option("--debug", is_flag=True, expose_value=False,
+ help=("Enable debug"))
+@click.option("--verbose", expose_value=False,
+ help=("Change OpenPype log level (debug - critical or 0-50)"))
def main(ctx):
"""Pype is main command serving as entry point to pipeline system.
@@ -49,18 +53,13 @@ def traypublisher():
@main.command()
-@click.option("-d", "--debug",
- is_flag=True, help=("Run pype tray in debug mode"))
-def tray(debug=False):
+def tray():
"""Launch pype tray.
Default action of pype command is to launch tray widget to control basic
aspects of pype. See documentation for more information.
-
- Running pype with `--debug` will result in lot of information useful for
- debugging to be shown in console.
"""
- PypeCommands().launch_tray(debug)
+ PypeCommands().launch_tray()
@PypeCommands.add_modules
@@ -75,7 +74,6 @@ def module(ctx):
@main.command()
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
help="Ftrack server url")
@click.option("--ftrack-user", envvar="FTRACK_API_USER",
@@ -88,8 +86,7 @@ def module(ctx):
help="Clockify API key.")
@click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE",
help="Clockify workspace")
-def eventserver(debug,
- ftrack_url,
+def eventserver(ftrack_url,
ftrack_user,
ftrack_api_key,
legacy,
@@ -100,8 +97,6 @@ def eventserver(debug,
This should be ideally used by system service (such us systemd or upstart
on linux and window service).
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
PypeCommands().launch_eventservercli(
ftrack_url,
@@ -114,12 +109,11 @@ def eventserver(debug,
@main.command()
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-h", "--host", help="Host", default=None)
@click.option("-p", "--port", help="Port", default=None)
@click.option("-e", "--executable", help="Executable")
@click.option("-u", "--upload_dir", help="Upload dir")
-def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
+def webpublisherwebserver(executable, upload_dir, host=None, port=None):
"""Starts webserver for communication with Webpublish FR via command line
OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
@@ -127,8 +121,6 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
Expect "pype.club" user created on Ftrack.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
PypeCommands().launch_webpublisher_webservercli(
upload_dir=upload_dir,
@@ -164,38 +156,34 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup):
@main.command()
@click.argument("paths", nargs=-1)
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-t", "--targets", help="Targets module", default=None,
multiple=True)
@click.option("-g", "--gui", is_flag=True,
help="Show Publish UI", default=False)
-def publish(debug, paths, targets, gui):
+def publish(paths, targets, gui):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
+
PypeCommands.publish(list(paths), targets, gui)
@main.command()
@click.argument("path")
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-h", "--host", help="Host")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
-def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
+def remotepublishfromapp(project, path, host, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
+
PypeCommands.remotepublishfromapp(
project, path, host, user, targets=targets
)
@@ -203,24 +191,21 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
@main.command()
@click.argument("path")
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
-def remotepublish(debug, project, path, user=None, targets=None):
+def remotepublish(project, path, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
+
PypeCommands.remotepublish(project, path, user, targets=targets)
@main.command()
-@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-p", "--project", required=True,
help="name of project asset is under")
@click.option("-a", "--asset", required=True,
@@ -228,7 +213,7 @@ def remotepublish(debug, project, path, user=None, targets=None):
@click.option("--path", required=True,
help="path where textures are found",
type=click.Path(exists=True))
-def texturecopy(debug, project, asset, path):
+def texturecopy(project, asset, path):
"""Copy specified textures to provided asset path.
It validates if project and asset exists. Then it will use speedcopy to
@@ -239,8 +224,7 @@ def texturecopy(debug, project, asset, path):
Result will be copied without directory structure so it will be flat then.
Nothing is written to database.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
+
PypeCommands().texture_copy(project, asset, path)
@@ -389,11 +373,9 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
@main.command()
-@click.option("-d", "--debug",
- is_flag=True, help=("Run process in debug mode"))
@click.option("-a", "--active_site", required=True,
help="Name of active stie")
-def syncserver(debug, active_site):
+def syncserver(active_site):
"""Run sync site server in background.
Some Site Sync use cases need to expose site to another one.
@@ -408,8 +390,7 @@ def syncserver(debug, active_site):
Settings (configured by starting OP Tray with env
var OPENPYPE_LOCAL_ID set to 'active_site'.
"""
- if debug:
- os.environ["OPENPYPE_DEBUG"] = "1"
+
PypeCommands().syncserver(active_site)
diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py
index 4c85a511ed..ea5e290d6f 100644
--- a/openpype/hooks/pre_global_host_data.py
+++ b/openpype/hooks/pre_global_host_data.py
@@ -5,8 +5,7 @@ from openpype.lib import (
prepare_app_environments,
prepare_context_environments
)
-
-import avalon.api
+from openpype.pipeline import AvalonMongoDB
class GlobalHostDataHook(PreLaunchHook):
@@ -64,7 +63,7 @@ class GlobalHostDataHook(PreLaunchHook):
self.data["anatomy"] = Anatomy(project_name)
# Mongo connection
- dbcon = avalon.api.AvalonMongoDB()
+ dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
dbcon.install()
diff --git a/openpype/hosts/aftereffects/api/__init__.py b/openpype/hosts/aftereffects/api/__init__.py
index cea1bdc023..2ad1255d27 100644
--- a/openpype/hosts/aftereffects/api/__init__.py
+++ b/openpype/hosts/aftereffects/api/__init__.py
@@ -16,7 +16,10 @@ from .pipeline import (
uninstall,
list_instances,
remove_instance,
- containerise
+ containerise,
+ get_context_data,
+ update_context_data,
+ get_context_title
)
from .workio import (
@@ -51,6 +54,9 @@ __all__ = [
"list_instances",
"remove_instance",
"containerise",
+ "get_context_data",
+ "update_context_data",
+ "get_context_title",
"file_extensions",
"has_unsaved_changes",
diff --git a/openpype/hosts/aftereffects/api/extension.zxp b/openpype/hosts/aftereffects/api/extension.zxp
index 389d74505d..0ed799991e 100644
Binary files a/openpype/hosts/aftereffects/api/extension.zxp and b/openpype/hosts/aftereffects/api/extension.zxp differ
diff --git a/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml b/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
index 668cb3fc24..a39f5781bb 100644
--- a/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
+++ b/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
@@ -1,5 +1,5 @@
-
diff --git a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
index 8f82c9709d..91df433908 100644
--- a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
+++ b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
@@ -417,7 +417,9 @@ function getRenderInfo(){
var file_url = item.file.toString();
return JSON.stringify({
- "file_name": file_url
+ "file_name": file_url,
+ "width": render_item.comp.width,
+ "height": render_item.comp.height
})
}
diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py
index c549268978..30a3e1f1c3 100644
--- a/openpype/hosts/aftereffects/api/launch_logic.py
+++ b/openpype/hosts/aftereffects/api/launch_logic.py
@@ -12,9 +12,8 @@ from wsrpc_aiohttp import (
from Qt import QtCore
+from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
-
-from avalon import api
from openpype.tools.adobe_webserver.app import WebServerTool
from .ws_stub import AfterEffectsServerStub
@@ -271,13 +270,13 @@ class AfterEffectsRoute(WebSocketRoute):
log.info("Setting context change")
log.info("project {} asset {} ".format(project, asset))
if project:
- api.Session["AVALON_PROJECT"] = project
+ legacy_io.Session["AVALON_PROJECT"] = project
os.environ["AVALON_PROJECT"] = project
if asset:
- api.Session["AVALON_ASSET"] = asset
+ legacy_io.Session["AVALON_ASSET"] = asset
os.environ["AVALON_ASSET"] = asset
if task:
- api.Session["AVALON_TASK"] = task
+ legacy_io.Session["AVALON_TASK"] = task
os.environ["AVALON_TASK"] = task
async def read(self):
diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py
index 3ed2de0e9d..a428a1470d 100644
--- a/openpype/hosts/aftereffects/api/pipeline.py
+++ b/openpype/hosts/aftereffects/api/pipeline.py
@@ -2,10 +2,8 @@ import os
import sys
from Qt import QtWidgets
-from bson.objectid import ObjectId
import pyblish.api
-from avalon import io
from openpype import lib
from openpype.api import Logger
@@ -15,7 +13,7 @@ from openpype.pipeline import (
deregister_loader_plugin_path,
deregister_creator_plugin_path,
AVALON_CONTAINER_ID,
- registered_host,
+ legacy_io,
)
import openpype.hosts.aftereffects
from openpype.lib import register_event_callback
@@ -34,24 +32,6 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
-def check_inventory():
- if not lib.any_outdated():
- return
-
- # Warn about outdated containers.
- print("Starting new QApplication..")
- app = QtWidgets.QApplication(sys.argv)
- message_box = QtWidgets.QMessageBox()
- message_box.setIcon(QtWidgets.QMessageBox.Warning)
- msg = "There are outdated containers in the scene."
- message_box.setText(msg)
- message_box.exec_()
-
-
-def application_launch():
- check_inventory()
-
-
def install():
print("Installing Pype config...")
@@ -75,6 +55,11 @@ def uninstall():
deregister_creator_plugin_path(CREATE_PATH)
+def application_launch():
+ """Triggered after start of app"""
+ check_inventory()
+
+
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle layer visibility on instance toggles."""
instance[0].Visible = new_value
@@ -109,65 +94,6 @@ def get_asset_settings():
}
-def containerise(name,
- namespace,
- comp,
- context,
- loader=None,
- suffix="_CON"):
- """
- Containerisation enables a tracking of version, author and origin
- for loaded assets.
-
- Creates dictionary payloads that gets saved into file metadata. Each
- container contains of who loaded (loader) and members (single or multiple
- in case of background).
-
- Arguments:
- name (str): Name of resulting assembly
- namespace (str): Namespace under which to host container
- comp (Comp): Composition to containerise
- context (dict): Asset information
- loader (str, optional): Name of loader used to produce this container.
- suffix (str, optional): Suffix of container, defaults to `_CON`.
-
- Returns:
- container (str): Name of container assembly
- """
- data = {
- "schema": "openpype:container-2.0",
- "id": AVALON_CONTAINER_ID,
- "name": name,
- "namespace": namespace,
- "loader": str(loader),
- "representation": str(context["representation"]["_id"]),
- "members": comp.members or [comp.id]
- }
-
- stub = get_stub()
- stub.imprint(comp, data)
-
- return comp
-
-
-def _get_stub():
- """
- Handle pulling stub from PS to run operations on host
- Returns:
- (AEServerStub) or None
- """
- try:
- stub = get_stub() # only after Photoshop is up
- except lib.ConnectionNotEstablishedYet:
- print("Not connected yet, ignoring")
- return
-
- if not stub.get_active_document_name():
- return
-
- return stub
-
-
def ls():
"""Yields containers from active AfterEffects document.
@@ -208,6 +134,66 @@ def ls():
yield data
+def check_inventory():
+ """Checks loaded containers if they are of highest version"""
+ if not lib.any_outdated():
+ return
+
+ # Warn about outdated containers.
+ _app = QtWidgets.QApplication.instance()
+ if not _app:
+ print("Starting new QApplication..")
+ _app = QtWidgets.QApplication([])
+
+ message_box = QtWidgets.QMessageBox()
+ message_box.setIcon(QtWidgets.QMessageBox.Warning)
+ msg = "There are outdated containers in the scene."
+ message_box.setText(msg)
+ message_box.exec_()
+
+
+def containerise(name,
+ namespace,
+ comp,
+ context,
+ loader=None,
+ suffix="_CON"):
+ """
+ Containerisation enables a tracking of version, author and origin
+ for loaded assets.
+
+ Creates dictionary payloads that gets saved into file metadata. Each
+ container contains of who loaded (loader) and members (single or multiple
+ in case of background).
+
+ Arguments:
+ name (str): Name of resulting assembly
+ namespace (str): Namespace under which to host container
+ comp (AEItem): Composition to containerise
+ context (dict): Asset information
+ loader (str, optional): Name of loader used to produce this container.
+ suffix (str, optional): Suffix of container, defaults to `_CON`.
+
+ Returns:
+ container (str): Name of container assembly
+ """
+ data = {
+ "schema": "openpype:container-2.0",
+ "id": AVALON_CONTAINER_ID,
+ "name": name,
+ "namespace": namespace,
+ "loader": str(loader),
+ "representation": str(context["representation"]["_id"]),
+ "members": comp.members or [comp.id]
+ }
+
+ stub = get_stub()
+ stub.imprint(comp.id, data)
+
+ return comp
+
+
+# created instances section
def list_instances():
"""
List all created instances from current workfile which
@@ -228,16 +214,8 @@ def list_instances():
layers_meta = stub.get_metadata()
for instance in layers_meta:
- if instance.get("schema") and \
- "container" in instance.get("schema"):
- continue
-
- uuid_val = instance.get("uuid")
- if uuid_val:
- instance['uuid'] = uuid_val
- else:
- instance['uuid'] = instance.get("members")[0] # legacy
- instances.append(instance)
+ if instance.get("id") == "pyblish.avalon.instance":
+ instances.append(instance)
return instances
@@ -258,8 +236,59 @@ def remove_instance(instance):
if not stub:
return
- stub.remove_instance(instance.get("uuid"))
- item = stub.get_item(instance.get("uuid"))
- if item:
- stub.rename_item(item.id,
- item.name.replace(stub.PUBLISH_ICON, ''))
+ inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
+ if not inst_id:
+ log.warning("No instance identifier for {}".format(instance))
+ return
+
+ stub.remove_instance(inst_id)
+
+ if instance.get("members"):
+ item = stub.get_item(instance["members"][0])
+ if item:
+ stub.rename_item(item.id,
+ item.name.replace(stub.PUBLISH_ICON, ''))
+
+
+# new publisher section
+def get_context_data():
+ meta = _get_stub().get_metadata()
+ for item in meta:
+ if item.get("id") == "publish_context":
+ item.pop("id")
+ return item
+
+ return {}
+
+
+def update_context_data(data, changes):
+ item = data
+ item["id"] = "publish_context"
+ _get_stub().imprint(item["id"], item)
+
+
+def get_context_title():
+ """Returns title for Creator window"""
+
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ return "{}/{}/{}".format(project_name, asset_name, task_name)
+
+
+def _get_stub():
+ """
+ Handle pulling stub from PS to run operations on host
+ Returns:
+ (AEServerStub) or None
+ """
+ try:
+ stub = get_stub() # only after Photoshop is up
+ except lib.ConnectionNotEstablishedYet:
+ print("Not connected yet, ignoring")
+ return
+
+ if not stub.get_active_document_name():
+ return
+
+ return stub
diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py
index 70815bda6b..d6c732285a 100644
--- a/openpype/hosts/aftereffects/api/workio.py
+++ b/openpype/hosts/aftereffects/api/workio.py
@@ -51,4 +51,4 @@ def _active_document():
print("Nothing opened")
pass
- return document_name
\ No newline at end of file
+ return document_name
diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py
index b0893310c1..8719a8f46e 100644
--- a/openpype/hosts/aftereffects/api/ws_stub.py
+++ b/openpype/hosts/aftereffects/api/ws_stub.py
@@ -28,6 +28,9 @@ class AEItem(object):
workAreaDuration = attr.ib(default=None)
frameRate = attr.ib(default=None)
file_name = attr.ib(default=None)
+ instance_id = attr.ib(default=None) # New Publisher
+ width = attr.ib(default=None)
+ height = attr.ib(default=None)
class AfterEffectsServerStub():
@@ -110,11 +113,11 @@ class AfterEffectsServerStub():
self.log.debug("Couldn't find layer metadata")
- def imprint(self, item, data, all_items=None, items_meta=None):
+ def imprint(self, item_id, data, all_items=None, items_meta=None):
"""
Save item metadata to Label field of metadata of active document
Args:
- item (AEItem):
+ item_id (int|str): id of FootageItem or instance_id for workfiles
data(string): json representation for single layer
all_items (list of item): for performance, could be
injected for usage in loop, if not, single call will be
@@ -132,8 +135,9 @@ class AfterEffectsServerStub():
is_new = True
for item_meta in items_meta:
- if item_meta.get('members') \
- and str(item.id) == str(item_meta.get('members')[0]):
+ if ((item_meta.get('members') and
+ str(item_id) == str(item_meta.get('members')[0])) or
+ item_meta.get("instance_id") == item_id):
is_new = False
if data:
item_meta.update(data)
@@ -153,10 +157,12 @@ class AfterEffectsServerStub():
item_ids = [int(item.id) for item in all_items]
cleaned_data = []
for meta in result_meta:
- # for creation of instance OR loaded container
- if 'instance' in meta.get('id') or \
- int(meta.get('members')[0]) in item_ids:
- cleaned_data.append(meta)
+ # do not added instance with nonexistend item id
+ if meta.get("members"):
+ if int(meta["members"][0]) not in item_ids:
+ continue
+
+ cleaned_data.append(meta)
payload = json.dumps(cleaned_data, indent=4)
@@ -167,7 +173,7 @@ class AfterEffectsServerStub():
def get_active_document_full_name(self):
"""
- Returns just a name of active document via ws call
+ Returns absolute path of active document via ws call
Returns(string): file name
"""
res = self.websocketserver.call(self.client.call(
@@ -314,15 +320,13 @@ class AfterEffectsServerStub():
Keep matching item in file though.
Args:
- instance_id(string): instance uuid
+ instance_id(string): instance id
"""
cleaned_data = []
for instance in self.get_metadata():
- uuid_val = instance.get("uuid")
- if not uuid_val:
- uuid_val = instance.get("members")[0] # legacy
- if uuid_val != instance_id:
+ inst_id = instance.get("instance_id") or instance.get("uuid")
+ if inst_id != instance_id:
cleaned_data.append(instance)
payload = json.dumps(cleaned_data, indent=4)
@@ -357,7 +361,7 @@ class AfterEffectsServerStub():
item_id (int):
Returns:
- (namedtuple)
+ (AEItem)
"""
res = self.websocketserver.call(self.client.call
@@ -418,7 +422,7 @@ class AfterEffectsServerStub():
""" Get render queue info for render purposes
Returns:
- (namedtuple): with 'file_name' field
+ (AEItem): with 'file_name' field
"""
res = self.websocketserver.call(self.client.call
('AfterEffects.get_render_info'))
@@ -606,7 +610,10 @@ class AfterEffectsServerStub():
d.get('workAreaStart'),
d.get('workAreaDuration'),
d.get('frameRate'),
- d.get('file_name'))
+ d.get('file_name'),
+ d.get("instance_id"),
+ d.get("width"),
+ d.get("height"))
ret.append(item)
return ret
diff --git a/openpype/hosts/aftereffects/plugins/create/create_local_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py
similarity index 64%
rename from openpype/hosts/aftereffects/plugins/create/create_local_render.py
rename to openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py
index 9d2cdcd7be..04413acbcf 100644
--- a/openpype/hosts/aftereffects/plugins/create/create_local_render.py
+++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py
@@ -1,7 +1,7 @@
-from openpype.hosts.aftereffects.plugins.create import create_render
+from openpype.hosts.aftereffects.plugins.create import create_legacy_render
-class CreateLocalRender(create_render.CreateRender):
+class CreateLocalRender(create_legacy_render.CreateRender):
""" Creator to render locally.
Created only after default render on farm. So family 'render.local' is
diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py
new file mode 100644
index 0000000000..e4fbb47a33
--- /dev/null
+++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py
@@ -0,0 +1,62 @@
+from openpype.pipeline import create
+from openpype.pipeline import CreatorError
+from openpype.hosts.aftereffects.api import (
+ get_stub,
+ list_instances
+)
+
+
+class CreateRender(create.LegacyCreator):
+ """Render folder for publish.
+
+ Creates subsets in format 'familyTaskSubsetname',
+ eg 'renderCompositingMain'.
+
+ Create only single instance from composition at a time.
+ """
+
+ name = "renderDefault"
+ label = "Render on Farm"
+ family = "render"
+ defaults = ["Main"]
+
+ def process(self):
+ stub = get_stub() # only after After Effects is up
+ items = []
+ if (self.options or {}).get("useSelection"):
+ items = stub.get_selected_items(
+ comps=True, folders=False, footages=False
+ )
+ if len(items) > 1:
+ raise CreatorError(
+ "Please select only single composition at time."
+ )
+
+ if not items:
+ raise CreatorError((
+ "Nothing to create. Select composition "
+ "if 'useSelection' or create at least "
+ "one composition."
+ ))
+
+ existing_subsets = [
+ instance['subset'].lower()
+ for instance in list_instances()
+ ]
+
+ item = items.pop()
+ if self.name.lower() in existing_subsets:
+ txt = "Instance with name \"{}\" already exists.".format(self.name)
+ raise CreatorError(txt)
+
+ self.data["members"] = [item.id]
+ self.data["uuid"] = item.id # for SubsetManager
+ self.data["subset"] = (
+ self.data["subset"]
+ .replace(stub.PUBLISH_ICON, '')
+ .replace(stub.LOADED_ICON, '')
+ )
+
+ stub.imprint(item, self.data)
+ stub.set_label_color(item.id, 14) # Cyan options 0 - 16
+ stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])
diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py
index 831085a5f1..215c148f37 100644
--- a/openpype/hosts/aftereffects/plugins/create/create_render.py
+++ b/openpype/hosts/aftereffects/plugins/create/create_render.py
@@ -1,38 +1,70 @@
+from openpype import resources
+from openpype.lib import BoolDef, UISeparatorDef
+from openpype.hosts.aftereffects import api
from openpype.pipeline import (
+ Creator,
+ CreatedInstance,
CreatorError,
- LegacyCreator
-)
-from openpype.hosts.aftereffects.api import (
- get_stub,
- list_instances
+ legacy_io,
)
-class CreateRender(LegacyCreator):
- """Render folder for publish.
-
- Creates subsets in format 'familyTaskSubsetname',
- eg 'renderCompositingMain'.
-
- Create only single instance from composition at a time.
- """
-
- name = "renderDefault"
- label = "Render on Farm"
+class RenderCreator(Creator):
+ identifier = "render"
+ label = "Render"
family = "render"
- defaults = ["Main"]
+ description = "Render creator"
- def process(self):
- stub = get_stub() # only after After Effects is up
- if (self.options or {}).get("useSelection"):
+ create_allow_context_change = True
+
+ def __init__(
+ self, create_context, system_settings, project_settings, headless=False
+ ):
+ super(RenderCreator, self).__init__(create_context, system_settings,
+ project_settings, headless)
+ self._default_variants = (project_settings["aftereffects"]
+ ["create"]
+ ["RenderCreator"]
+ ["defaults"])
+
+ def get_icon(self):
+ return resources.get_openpype_splash_filepath()
+
+ def collect_instances(self):
+ for instance_data in api.list_instances():
+ # legacy instances have family=='render' or 'renderLocal', use them
+ creator_id = (instance_data.get("creator_identifier") or
+ instance_data.get("family", '').replace("Local", ''))
+ if creator_id == self.identifier:
+ instance_data = self._handle_legacy(instance_data)
+ instance = CreatedInstance.from_existing(
+ instance_data, self
+ )
+ self._add_instance_to_context(instance)
+
+ def update_instances(self, update_list):
+ for created_inst, _changes in update_list:
+ api.get_stub().imprint(created_inst.get("instance_id"),
+ created_inst.data_to_store())
+
+ def remove_instances(self, instances):
+ for instance in instances:
+ api.remove_instance(instance)
+ self._remove_instance_from_context(instance)
+
+ def create(self, subset_name, data, pre_create_data):
+ stub = api.get_stub() # only after After Effects is up
+ if pre_create_data.get("use_selection"):
items = stub.get_selected_items(
comps=True, folders=False, footages=False
)
+ else:
+ items = stub.get_items(comps=True, folders=False, footages=False)
+
if len(items) > 1:
raise CreatorError(
"Please select only single composition at time."
)
-
if not items:
raise CreatorError((
"Nothing to create. Select composition "
@@ -40,24 +72,54 @@ class CreateRender(LegacyCreator):
"one composition."
))
- existing_subsets = [
- instance['subset'].lower()
- for instance in list_instances()
+ for inst in self.create_context.instances:
+ if subset_name == inst.subset_name:
+ raise CreatorError("{} already exists".format(
+ inst.subset_name))
+
+ data["members"] = [items[0].id]
+ new_instance = CreatedInstance(self.family, subset_name, data, self)
+ if "farm" in pre_create_data:
+ use_farm = pre_create_data["farm"]
+ new_instance.creator_attributes["farm"] = use_farm
+
+ api.get_stub().imprint(new_instance.id,
+ new_instance.data_to_store())
+ self._add_instance_to_context(new_instance)
+
+ def get_default_variants(self):
+ return self._default_variants
+
+ def get_instance_attr_defs(self):
+ return [BoolDef("farm", label="Render on farm")]
+
+ def get_pre_create_attr_defs(self):
+ output = [
+ BoolDef("use_selection", default=True, label="Use selection"),
+ UISeparatorDef(),
+ BoolDef("farm", label="Render on farm")
]
+ return output
- item = items.pop()
- if self.name.lower() in existing_subsets:
- txt = "Instance with name \"{}\" already exists.".format(self.name)
- raise CreatorError(txt)
+ def get_detail_description(self):
+ return """Creator for Render instances"""
- self.data["members"] = [item.id]
- self.data["uuid"] = item.id # for SubsetManager
- self.data["subset"] = (
- self.data["subset"]
- .replace(stub.PUBLISH_ICON, '')
- .replace(stub.LOADED_ICON, '')
- )
+ def _handle_legacy(self, instance_data):
+ """Converts old instances to new format."""
+ if not instance_data.get("members"):
+ instance_data["members"] = [instance_data.get("uuid")]
- stub.imprint(item, self.data)
- stub.set_label_color(item.id, 14) # Cyan options 0 - 16
- stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])
+ if instance_data.get("uuid"):
+ # uuid not needed, replaced with unique instance_id
+ api.get_stub().remove_instance(instance_data.get("uuid"))
+ instance_data.pop("uuid")
+
+ if not instance_data.get("task"):
+ instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
+
+ if not instance_data.get("creator_attributes"):
+ is_old_farm = instance_data["family"] != "renderLocal"
+ instance_data["creator_attributes"] = {"farm": is_old_farm}
+ instance_data["family"] = self.family
+
+ return instance_data
diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py
new file mode 100644
index 0000000000..88e55e21b5
--- /dev/null
+++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py
@@ -0,0 +1,80 @@
+import openpype.hosts.aftereffects.api as api
+from openpype.pipeline import (
+ AutoCreator,
+ CreatedInstance,
+ legacy_io,
+)
+
+
+class AEWorkfileCreator(AutoCreator):
+ identifier = "workfile"
+ family = "workfile"
+
+ def get_instance_attr_defs(self):
+ return []
+
+ def collect_instances(self):
+ for instance_data in api.list_instances():
+ creator_id = instance_data.get("creator_identifier")
+ if creator_id == self.identifier:
+ subset_name = instance_data["subset"]
+ instance = CreatedInstance(
+ self.family, subset_name, instance_data, self
+ )
+ self._add_instance_to_context(instance)
+
+ def update_instances(self, update_list):
+ # nothing to change on workfiles
+ pass
+
+ def create(self, options=None):
+ existing_instance = None
+ for instance in self.create_context.instances:
+ if instance.family == self.family:
+ existing_instance = instance
+ break
+
+ variant = ''
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ host_name = legacy_io.Session["AVALON_APP"]
+
+ if existing_instance is None:
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ data = {
+ "asset": asset_name,
+ "task": task_name,
+ "variant": variant
+ }
+ data.update(self.get_dynamic_data(
+ variant, task_name, asset_doc, project_name, host_name
+ ))
+
+ new_instance = CreatedInstance(
+ self.family, subset_name, data, self
+ )
+ self._add_instance_to_context(new_instance)
+
+ api.get_stub().imprint(new_instance.get("instance_id"),
+ new_instance.data_to_store())
+
+ elif (
+ existing_instance["asset"] != asset_name
+ or existing_instance["task"] != task_name
+ ):
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ existing_instance["asset"] = asset_name
+ existing_instance["task"] = task_name
diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py
index be43cae44e..d346df504a 100644
--- a/openpype/hosts/aftereffects/plugins/load/load_background.py
+++ b/openpype/hosts/aftereffects/plugins/load/load_background.py
@@ -90,7 +90,7 @@ class BackgroundLoader(AfterEffectsLoader):
container["namespace"] = comp_name
container["members"] = comp.members
- stub.imprint(comp, container)
+ stub.imprint(comp.id, container)
def remove(self, container):
"""
@@ -99,10 +99,9 @@ class BackgroundLoader(AfterEffectsLoader):
Args:
container (dict): container to be removed - used to get layer_id
"""
- print("!!!! container:: {}".format(container))
stub = self.get_stub()
layer = container.pop("layer")
- stub.imprint(layer, {})
+ stub.imprint(layer.id, {})
stub.delete_item(layer.id)
def switch(self, container, representation):
diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py
index 9eb9e80a2c..6ab69c6bfa 100644
--- a/openpype/hosts/aftereffects/plugins/load/load_file.py
+++ b/openpype/hosts/aftereffects/plugins/load/load_file.py
@@ -96,9 +96,9 @@ class FileLoader(AfterEffectsLoader):
# with aftereffects.maintained_selection(): # TODO
stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name)
stub.imprint(
- layer, {"representation": str(representation["_id"]),
- "name": context["subset"],
- "namespace": layer_name}
+ layer.id, {"representation": str(representation["_id"]),
+ "name": context["subset"],
+ "namespace": layer_name}
)
def remove(self, container):
@@ -109,7 +109,7 @@ class FileLoader(AfterEffectsLoader):
"""
stub = self.get_stub()
layer = container.pop("layer")
- stub.imprint(layer, {})
+ stub.imprint(layer.id, {})
stub.delete_item(layer.id)
def switch(self, container, representation):
diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py
index 80679725e6..8647ba498b 100644
--- a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py
+++ b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py
@@ -17,12 +17,11 @@ class CollectAudio(pyblish.api.ContextPlugin):
def process(self, context):
for instance in context:
- if instance.data["family"] == 'render.farm':
+ if 'render.farm' in instance.data.get("families", []):
comp_id = instance.data["comp_id"]
if not comp_id:
self.log.debug("No comp_id filled in instance")
- # @iLLiCiTiT QUESTION Should return or continue?
- return
+ continue
context.data["audioFile"] = os.path.normpath(
get_stub().get_audio_url(comp_id)
).replace("\\", "/")
diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py
index 3e44acd7e9..fa23bf92b0 100644
--- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py
+++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py
@@ -21,135 +21,129 @@ class AERenderInstance(RenderInstance):
projectEntity = attr.ib(default=None)
stagingDir = attr.ib(default=None)
app_version = attr.ib(default=None)
+ publish_attributes = attr.ib(default=None)
+ file_name = attr.ib(default=None)
class CollectAERender(abstract_collect_render.AbstractCollectRender):
- order = pyblish.api.CollectorOrder + 0.400
+ order = pyblish.api.CollectorOrder + 0.405
label = "Collect After Effects Render Layers"
hosts = ["aftereffects"]
- # internal
- family_remapping = {
- "render": ("render.farm", "farm"), # (family, label)
- "renderLocal": ("render", "local")
- }
padding_width = 6
rendered_extension = 'png'
- stub = get_stub()
+ _stub = None
+
+ @classmethod
+ def get_stub(cls):
+ if not cls._stub:
+ cls._stub = get_stub()
+ return cls._stub
def get_instances(self, context):
instances = []
+ instances_to_remove = []
- app_version = self.stub.get_app_version()
+ app_version = CollectAERender.get_stub().get_app_version()
app_version = app_version[0:4]
current_file = context.data["currentFile"]
version = context.data["version"]
- asset_entity = context.data["assetEntity"]
+
project_entity = context.data["projectEntity"]
- compositions = self.stub.get_items(True)
+ compositions = CollectAERender.get_stub().get_items(True)
compositions_by_id = {item.id: item for item in compositions}
- for inst in self.stub.get_metadata():
- schema = inst.get('schema')
- # loaded asset container skip it
- if schema and 'container' in schema:
+ for inst in context:
+ if not inst.data.get("active", True):
continue
- if not inst["members"]:
- raise ValueError("Couldn't find id, unable to publish. " +
- "Please recreate instance.")
- item_id = inst["members"][0]
+ family = inst.data["family"]
+ if family not in ["render", "renderLocal"]: # legacy
+ continue
- work_area_info = self.stub.get_work_area(int(item_id))
+ item_id = inst.data["members"][0]
+
+ work_area_info = CollectAERender.get_stub().get_work_area(
+ int(item_id))
if not work_area_info:
self.log.warning("Orphaned instance, deleting metadata")
- self.stub.remove_instance(int(item_id))
+ inst_id = inst.get("instance_id") or item_id
+ CollectAERender.get_stub().remove_instance(inst_id)
continue
- frameStart = work_area_info.workAreaStart
-
- frameEnd = round(work_area_info.workAreaStart +
- float(work_area_info.workAreaDuration) *
- float(work_area_info.frameRate)) - 1
+ frame_start = work_area_info.workAreaStart
+ frame_end = round(work_area_info.workAreaStart +
+ float(work_area_info.workAreaDuration) *
+ float(work_area_info.frameRate)) - 1
fps = work_area_info.frameRate
# TODO add resolution when supported by extension
- if inst["family"] in self.family_remapping.keys() \
- and inst["active"]:
- remapped_family = self.family_remapping[inst["family"]]
- instance = AERenderInstance(
- family=remapped_family[0],
- families=[remapped_family[0]],
- version=version,
- time="",
- source=current_file,
- label="{} - {}".format(inst["subset"], remapped_family[1]),
- subset=inst["subset"],
- asset=context.data["assetEntity"]["name"],
- attachTo=False,
- setMembers='',
- publish=True,
- renderer='aerender',
- name=inst["subset"],
- resolutionWidth=asset_entity["data"].get(
- "resolutionWidth",
- project_entity["data"]["resolutionWidth"]),
- resolutionHeight=asset_entity["data"].get(
- "resolutionHeight",
- project_entity["data"]["resolutionHeight"]),
- pixelAspect=1,
- tileRendering=False,
- tilesX=0,
- tilesY=0,
- frameStart=frameStart,
- frameEnd=frameEnd,
- frameStep=1,
- toBeRenderedOn='deadline',
- fps=fps,
- app_version=app_version
- )
+ task_name = inst.data.get("task") # legacy
- comp = compositions_by_id.get(int(item_id))
- if not comp:
- raise ValueError("There is no composition for item {}".
- format(item_id))
- instance.comp_name = comp.name
- instance.comp_id = item_id
- instance._anatomy = context.data["anatomy"]
- instance.anatomyData = context.data["anatomyData"]
+ render_q = CollectAERender.get_stub().get_render_info()
+ if not render_q:
+ raise ValueError("No file extension set in Render Queue")
- instance.outputDir = self._get_output_dir(instance)
- instance.context = context
+ subset_name = inst.data["subset"]
+ instance = AERenderInstance(
+ family=family,
+ families=inst.data.get("families", []),
+ version=version,
+ time="",
+ source=current_file,
+ label="{} - {}".format(subset_name, family),
+ subset=subset_name,
+ asset=inst.data["asset"],
+ task=task_name,
+ attachTo=False,
+ setMembers='',
+ publish=True,
+ renderer='aerender',
+ name=subset_name,
+ resolutionWidth=render_q.width,
+ resolutionHeight=render_q.height,
+ pixelAspect=1,
+ tileRendering=False,
+ tilesX=0,
+ tilesY=0,
+ frameStart=frame_start,
+ frameEnd=frame_end,
+ frameStep=1,
+ toBeRenderedOn='deadline',
+ fps=fps,
+ app_version=app_version,
+ publish_attributes=inst.data.get("publish_attributes"),
+ file_name=render_q.file_name
+ )
- settings = get_project_settings(os.getenv("AVALON_PROJECT"))
- reviewable_subset_filter = \
- (settings["deadline"]
- ["publish"]
- ["ProcessSubmittedJobOnFarm"]
- ["aov_filter"])
+ comp = compositions_by_id.get(int(item_id))
+ if not comp:
+ raise ValueError("There is no composition for item {}".
+ format(item_id))
+ instance.outputDir = self._get_output_dir(instance)
+ instance.comp_name = comp.name
+ instance.comp_id = item_id
- if inst["family"] == "renderLocal":
- # for local renders
- instance.anatomyData["version"] = instance.version
- instance.anatomyData["subset"] = instance.subset
- instance.stagingDir = tempfile.mkdtemp()
- instance.projectEntity = project_entity
+ is_local = "renderLocal" in inst.data["family"] # legacy
+ if inst.data.get("creator_attributes"):
+ is_local = not inst.data["creator_attributes"].get("farm")
+ if is_local:
+ # for local renders
+ instance = self._update_for_local(instance, project_entity)
+ else:
+ fam = "render.farm"
+ if fam not in instance.families:
+ instance.families.append(fam)
- if self.hosts[0] in reviewable_subset_filter.keys():
- for aov_pattern in \
- reviewable_subset_filter[self.hosts[0]]:
- if re.match(aov_pattern, instance.subset):
- instance.families.append("review")
- instance.review = True
- break
-
- self.log.info("New instance:: {}".format(instance))
- instances.append(instance)
+ instances.append(instance)
+ instances_to_remove.append(inst)
+ for instance in instances_to_remove:
+ context.remove(instance)
return instances
def get_expected_files(self, render_instance):
@@ -168,15 +162,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
start = render_instance.frameStart
end = render_instance.frameEnd
- # pull file name from Render Queue Output module
- render_q = self.stub.get_render_info()
- if not render_q:
- raise ValueError("No file extension set in Render Queue")
- _, ext = os.path.splitext(os.path.basename(render_q.file_name))
+ _, ext = os.path.splitext(os.path.basename(render_instance.file_name))
base_dir = self._get_output_dir(render_instance)
expected_files = []
- if "#" not in render_q.file_name: # single frame (mov)W
+ if "#" not in render_instance.file_name: # single frame (mov)W
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
render_instance.asset,
render_instance.subset,
@@ -216,3 +206,24 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
# for submit_publish_job
return base_dir
+
+ def _update_for_local(self, instance, project_entity):
+ """Update old saved instances to current publishing format"""
+ instance.stagingDir = tempfile.mkdtemp()
+ instance.projectEntity = project_entity
+ fam = "render.local"
+ if fam not in instance.families:
+ instance.families.append(fam)
+
+ settings = get_project_settings(os.getenv("AVALON_PROJECT"))
+ reviewable_subset_filter = (settings["deadline"]
+ ["publish"]
+ ["ProcessSubmittedJobOnFarm"]
+ ["aov_filter"].get(self.hosts[0]))
+ for aov_pattern in reviewable_subset_filter:
+ if re.match(aov_pattern, instance.subset):
+ instance.families.append("review")
+ instance.review = True
+ break
+
+ return instance
diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
index cb5a2bad4f..9cb6900b0a 100644
--- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
@@ -1,7 +1,8 @@
import os
-from avalon import api
+
import pyblish.api
from openpype.lib import get_subset_name_with_asset_doc
+from openpype.pipeline import legacy_io
class CollectWorkfile(pyblish.api.ContextPlugin):
@@ -11,16 +12,45 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
- task = api.Session["AVALON_TASK"]
+ existing_instance = None
+ for instance in context:
+ if instance.data["family"] == "workfile":
+ self.log.debug("Workfile instance found, won't create new")
+ existing_instance = instance
+ break
+
current_file = context.data["currentFile"]
staging_dir = os.path.dirname(current_file)
scene_file = os.path.basename(current_file)
+ if existing_instance is None: # old publish
+ instance = self._get_new_instance(context, scene_file)
+ else:
+ instance = existing_instance
+
+ # creating representation
+ representation = {
+ 'name': 'aep',
+ 'ext': 'aep',
+ 'files': scene_file,
+ "stagingDir": staging_dir,
+ }
+
+ if not instance.data.get("representations"):
+ instance.data["representations"] = []
+ instance.data["representations"].append(representation)
+
+ instance.data["publish"] = instance.data["active"] # for DL
+
+ def _get_new_instance(self, context, scene_file):
+ task = legacy_io.Session["AVALON_TASK"]
version = context.data["version"]
asset_entity = context.data["assetEntity"]
project_entity = context.data["projectEntity"]
- shared_instance_data = {
+ instance_data = {
+ "active": True,
"asset": asset_entity["name"],
+ "task": task,
"frameStart": asset_entity["data"]["frameStart"],
"frameEnd": asset_entity["data"]["frameEnd"],
"handleStart": asset_entity["data"]["handleStart"],
@@ -59,20 +89,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"representations": list()
})
- # adding basic script data
- instance.data.update(shared_instance_data)
+ instance.data.update(instance_data)
- # creating representation
- representation = {
- 'name': 'aep',
- 'ext': 'aep',
- 'files': scene_file,
- "stagingDir": staging_dir,
- }
-
- instance.data["representations"].append(representation)
-
- self.log.info('Publishing After Effects workfile')
-
- for i in context:
- self.log.debug(f"{i.data['families']}")
+ return instance
diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
index b738068a7b..7323a0b125 100644
--- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
+++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
@@ -12,7 +12,7 @@ class ExtractLocalRender(openpype.api.Extractor):
order = openpype.api.Extractor.order - 0.47
label = "Extract Local Render"
hosts = ["aftereffects"]
- families = ["render"]
+ families = ["renderLocal", "render.local"]
def process(self, instance):
stub = get_stub()
diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py
index e20598b311..eb2977309f 100644
--- a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py
+++ b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py
@@ -1,15 +1,16 @@
+import pyblish.api
+
import openpype.api
from openpype.hosts.aftereffects.api import get_stub
-class ExtractSaveScene(openpype.api.Extractor):
+class ExtractSaveScene(pyblish.api.ContextPlugin):
"""Save scene before extraction."""
order = openpype.api.Extractor.order - 0.48
label = "Extract Save Scene"
hosts = ["aftereffects"]
- families = ["workfile"]
- def process(self, instance):
+ def process(self, context):
stub = get_stub()
stub.save()
diff --git a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml
index 36fa90456e..0591020ed3 100644
--- a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml
+++ b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml
@@ -12,6 +12,8 @@ One of the settings in a scene doesn't match to asset settings in database.
### How to repair?
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
+
+ In the scene it is right mouse click on published composition > `Composition Settings`.
### __Detailed Info__ (optional)
diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py
new file mode 100644
index 0000000000..03ec184524
--- /dev/null
+++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py
@@ -0,0 +1,54 @@
+import json
+import pyblish.api
+from openpype.hosts.aftereffects.api import list_instances
+
+
+class PreCollectRender(pyblish.api.ContextPlugin):
+ """
+ Checks if render instance is of old type, adds to families to both
+ existing collectors work same way.
+
+ Could be removed in the future when no one uses old publish.
+ """
+
+ label = "PreCollect Render"
+ order = pyblish.api.CollectorOrder + 0.400
+ hosts = ["aftereffects"]
+
+ family_remapping = {
+ "render": ("render.farm", "farm"), # (family, label)
+ "renderLocal": ("render.local", "local")
+ }
+
+ def process(self, context):
+ if context.data.get("newPublishing"):
+ self.log.debug("Not applicable for New Publisher, skip")
+ return
+
+ for inst in list_instances():
+ if inst.get("creator_attributes"):
+ raise ValueError("Instance created in New publisher, "
+ "cannot be published in Pyblish.\n"
+ "Please publish in New Publisher "
+ "or recreate instances with legacy Creators")
+
+ if inst["family"] not in self.family_remapping.keys():
+ continue
+
+ if not inst["members"]:
+ raise ValueError("Couldn't find id, unable to publish. " +
+ "Please recreate instance.")
+
+ instance = context.create_instance(inst["subset"])
+ inst["families"] = [self.family_remapping[inst["family"]][0]]
+ instance.data.update(inst)
+
+ self._debug_log(instance)
+
+ def _debug_log(self, instance):
+ def _default_json(value):
+ return str(value)
+
+ self.log.info(
+ json.dumps(instance.data, indent=4, default=_default_json)
+ )
diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
index 37cecfbcc4..7a9356f020 100644
--- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
+++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
@@ -1,7 +1,10 @@
-from avalon import api
import pyblish.api
+
import openpype.api
-from openpype.pipeline import PublishXmlValidationError
+from openpype.pipeline import (
+ PublishXmlValidationError,
+ legacy_io,
+)
from openpype.hosts.aftereffects.api import get_stub
@@ -27,8 +30,8 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
for instance in instances:
data = stub.read(instance[0])
- data["asset"] = api.Session["AVALON_ASSET"]
- stub.imprint(instance[0], data)
+ data["asset"] = legacy_io.Session["AVALON_ASSET"]
+ stub.imprint(instance[0].instance_id, data)
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
@@ -51,7 +54,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
def process(self, instance):
instance_asset = instance.data["asset"]
- current_asset = api.Session["AVALON_ASSET"]
+ current_asset = legacy_io.Session["AVALON_ASSET"]
msg = (
f"Instance asset {instance_asset} is not the same "
f"as current context {current_asset}."
diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py
index 273ccd295e..14e224fdc2 100644
--- a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py
+++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py
@@ -5,11 +5,15 @@ import re
import pyblish.api
-from openpype.pipeline import PublishXmlValidationError
+from openpype.pipeline import (
+ PublishXmlValidationError,
+ OptionalPyblishPluginMixin
+)
from openpype.hosts.aftereffects.api import get_asset_settings
-class ValidateSceneSettings(pyblish.api.InstancePlugin):
+class ValidateSceneSettings(OptionalPyblishPluginMixin,
+ pyblish.api.InstancePlugin):
"""
Ensures that Composition Settings (right mouse on comp) are same as
in FTrack on task.
@@ -59,15 +63,20 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
def process(self, instance):
"""Plugin entry point."""
+ # Skip the instance if is not active by data on the instance
+ if not self.is_active(instance.data):
+ return
+
expected_settings = get_asset_settings()
self.log.info("config from DB::{}".format(expected_settings))
- if any(re.search(pattern, os.getenv('AVALON_TASK'))
+ task_name = instance.data["anatomyData"]["task"]["name"]
+ if any(re.search(pattern, task_name)
for pattern in self.skip_resolution_check):
expected_settings.pop("resolutionWidth")
expected_settings.pop("resolutionHeight")
- if any(re.search(pattern, os.getenv('AVALON_TASK'))
+ if any(re.search(pattern, task_name)
for pattern in self.skip_timelines_check):
expected_settings.pop('fps', None)
expected_settings.pop('frameStart', None)
@@ -87,10 +96,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
duration = instance.data.get("frameEndHandle") - \
instance.data.get("frameStartHandle") + 1
- self.log.debug("filtered config::{}".format(expected_settings))
+ self.log.debug("validated items::{}".format(expected_settings))
current_settings = {
"fps": fps,
+ "frameStart": instance.data.get("frameStart"),
+ "frameEnd": instance.data.get("frameEnd"),
+ "handleStart": instance.data.get("handleStart"),
+ "handleEnd": instance.data.get("handleEnd"),
"frameStartHandle": instance.data.get("frameStartHandle"),
"frameEndHandle": instance.data.get("frameEndHandle"),
"resolutionWidth": instance.data.get("resolutionWidth"),
@@ -103,24 +116,22 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
invalid_keys = set()
for key, value in expected_settings.items():
if value != current_settings[key]:
- invalid_settings.append(
- "{} expected: {} found: {}".format(key, value,
- current_settings[key])
- )
+ msg = "'{}' expected: '{}' found: '{}'".format(
+ key, value, current_settings[key])
+
+ if key == "duration" and expected_settings.get("handleStart"):
+ msg += "Handles included in calculation. Remove " \
+ "handles in DB or extend frame range in " \
+ "Composition Setting."
+
+ invalid_settings.append(msg)
invalid_keys.add(key)
- if ((expected_settings.get("handleStart")
- or expected_settings.get("handleEnd"))
- and invalid_settings):
- msg = "Handles included in calculation. Remove handles in DB " +\
- "or extend frame range in Composition Setting."
- invalid_settings[-1]["reason"] = msg
-
- msg = "Found invalid settings:\n{}".format(
- "\n".join(invalid_settings)
- )
-
if invalid_settings:
+ msg = "Found invalid settings:\n{}".format(
+ "\n".join(invalid_settings)
+ )
+
invalid_keys_str = ",".join(invalid_keys)
break_str = "
"
invalid_setting_str = "Found invalid settings:
{}".\
diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py
index 29d6d356c8..c1b5add518 100644
--- a/openpype/hosts/blender/api/ops.py
+++ b/openpype/hosts/blender/api/ops.py
@@ -15,9 +15,9 @@ from Qt import QtWidgets, QtCore
import bpy
import bpy.utils.previews
-import avalon.api
-from openpype.tools.utils import host_tools
from openpype import style
+from openpype.pipeline import legacy_io
+from openpype.tools.utils import host_tools
from .workio import OpenFileCacher
@@ -279,7 +279,7 @@ class LaunchLoader(LaunchQtApp):
def before_window_show(self):
self._window.set_context(
- {"asset": avalon.api.Session["AVALON_ASSET"]},
+ {"asset": legacy_io.Session["AVALON_ASSET"]},
refresh=True
)
@@ -327,8 +327,8 @@ class LaunchWorkFiles(LaunchQtApp):
def execute(self, context):
result = super().execute(context)
self._window.set_context({
- "asset": avalon.api.Session["AVALON_ASSET"],
- "task": avalon.api.Session["AVALON_TASK"]
+ "asset": legacy_io.Session["AVALON_ASSET"],
+ "task": legacy_io.Session["AVALON_TASK"]
})
return result
@@ -358,8 +358,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
else:
pyblish_menu_icon_id = 0
- asset = avalon.api.Session['AVALON_ASSET']
- task = avalon.api.Session['AVALON_TASK']
+ asset = legacy_io.Session['AVALON_ASSET']
+ task = legacy_io.Session['AVALON_TASK']
context_label = f"{asset}, {task}"
context_label_item = layout.row()
context_label_item.operator(
diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py
index 0ea579970e..5b81764644 100644
--- a/openpype/hosts/blender/api/pipeline.py
+++ b/openpype/hosts/blender/api/pipeline.py
@@ -1,6 +1,5 @@
import os
import sys
-import importlib
import traceback
from typing import Callable, Dict, Iterator, List, Optional
@@ -10,16 +9,15 @@ from . import lib
from . import ops
import pyblish.api
-import avalon.api
-from avalon import io, schema
from openpype.pipeline import (
+ schema,
+ legacy_io,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
deregister_creator_plugin_path,
AVALON_CONTAINER_ID,
- uninstall_host,
)
from openpype.api import Logger
from openpype.lib import (
@@ -85,8 +83,8 @@ def uninstall():
def set_start_end_frames():
- asset_name = io.Session["AVALON_ASSET"]
- asset_doc = io.find_one({
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
@@ -190,7 +188,7 @@ def _on_task_changed():
# `directory` attribute, so it opens in that directory (does it?).
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
- workdir = avalon.api.Session["AVALON_WORKDIR"]
+ workdir = legacy_io.Session["AVALON_WORKDIR"]
log.debug("New working directory: %s", workdir)
@@ -201,26 +199,6 @@ def _register_events():
log.info("Installed event callback for 'taskChanged'...")
-def reload_pipeline(*args):
- """Attempt to reload pipeline at run-time.
-
- Warning:
- This is primarily for development and debugging purposes and not well
- tested.
-
- """
-
- uninstall_host()
-
- for module in (
- "avalon.io",
- "avalon.pipeline",
- "avalon.api",
- ):
- module = importlib.import_module(module)
- importlib.reload(module)
-
-
def _discover_gui() -> Optional[Callable]:
"""Return the most desirable of the currently registered GUIs"""
diff --git a/openpype/hosts/blender/plugins/create/create_action.py b/openpype/hosts/blender/plugins/create/create_action.py
index 5f66f5da6e..54b3a501a7 100644
--- a/openpype/hosts/blender/plugins/create/create_action.py
+++ b/openpype/hosts/blender/plugins/create/create_action.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import lib
@@ -22,7 +22,7 @@ class CreateAction(openpype.hosts.blender.api.plugin.Creator):
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py
index b88010ae90..a0e9e5e399 100644
--- a/openpype/hosts/blender/plugins/create/create_animation.py
+++ b/openpype/hosts/blender/plugins/create/create_animation.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -37,7 +37,7 @@ class CreateAnimation(plugin.Creator):
# asset_group.empty_display_type = 'SINGLE_ARROW'
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py
index cc796d464d..1a3c008069 100644
--- a/openpype/hosts/blender/plugins/create/create_camera.py
+++ b/openpype/hosts/blender/plugins/create/create_camera.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -40,7 +40,7 @@ class CreateCamera(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
print(f"self.data: {self.data}")
lib.imprint(asset_group, self.data)
diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py
index f62cbc52ba..5949a4b86e 100644
--- a/openpype/hosts/blender/plugins/create/create_layout.py
+++ b/openpype/hosts/blender/plugins/create/create_layout.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateLayout(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(asset_group, self.data)
# Add selected objects to instance
diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py
index 75c90f9bb1..fedc708943 100644
--- a/openpype/hosts/blender/plugins/create/create_model.py
+++ b/openpype/hosts/blender/plugins/create/create_model.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateModel(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(asset_group, self.data)
# Add selected objects to instance
diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py
index bf5a84048f..38707fd3b1 100644
--- a/openpype/hosts/blender/plugins/create/create_pointcache.py
+++ b/openpype/hosts/blender/plugins/create/create_pointcache.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import lib
@@ -22,7 +22,7 @@ class CreatePointcache(openpype.hosts.blender.api.plugin.Creator):
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py
index 65f5061924..0abd306c6b 100644
--- a/openpype/hosts/blender/plugins/create/create_rig.py
+++ b/openpype/hosts/blender/plugins/create/create_rig.py
@@ -2,7 +2,7 @@
import bpy
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateRig(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = api.Session.get('AVALON_TASK')
+ self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(asset_group, self.data)
# Add selected objects to instance
diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py
index b78a193d81..8ecc78a2c6 100644
--- a/openpype/hosts/blender/plugins/publish/extract_layout.py
+++ b/openpype/hosts/blender/plugins/publish/extract_layout.py
@@ -7,7 +7,7 @@ import bpy
import bpy_extras
import bpy_extras.anim_utils
-from avalon import io
+from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
import openpype.api
@@ -139,7 +139,7 @@ class ExtractLayout(openpype.api.Extractor):
self.log.debug("Parent: {}".format(parent))
# Get blend reference
- blend = io.find_one(
+ blend = legacy_io.find_one(
{
"type": "representation",
"parent": ObjectId(parent),
@@ -150,7 +150,7 @@ class ExtractLayout(openpype.api.Extractor):
if blend:
blend_id = blend["_id"]
# Get fbx reference
- fbx = io.find_one(
+ fbx = legacy_io.find_one(
{
"type": "representation",
"parent": ObjectId(parent),
@@ -161,7 +161,7 @@ class ExtractLayout(openpype.api.Extractor):
if fbx:
fbx_id = fbx["_id"]
# Get abc reference
- abc = io.find_one(
+ abc = legacy_io.find_one(
{
"type": "representation",
"parent": ObjectId(parent),
diff --git a/openpype/hosts/blender/plugins/publish/integrate_animation.py b/openpype/hosts/blender/plugins/publish/integrate_animation.py
index 90e94a4aac..d9a85bc79b 100644
--- a/openpype/hosts/blender/plugins/publish/integrate_animation.py
+++ b/openpype/hosts/blender/plugins/publish/integrate_animation.py
@@ -1,6 +1,5 @@
import json
-from avalon import io
import pyblish.api
diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py
index 80c1c37d7e..8acda5fc7c 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_audio.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_audio.py
@@ -1,10 +1,10 @@
import os
import collections
+from pprint import pformat
import pyblish.api
-from avalon import io
-from pprint import pformat
+from openpype.pipeline import legacy_io
class AppendCelactionAudio(pyblish.api.ContextPlugin):
@@ -60,7 +60,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
"""
# Query all subsets for asset
- subset_docs = io.find({
+ subset_docs = legacy_io.find({
"type": "subset",
"parent": asset_doc["_id"]
})
@@ -93,7 +93,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
}}
]
last_versions_by_subset_id = dict()
- for doc in io.aggregate(pipeline):
+ for doc in legacy_io.aggregate(pipeline):
doc["parent"] = doc["_id"]
doc["_id"] = doc.pop("_version_id")
last_versions_by_subset_id[doc["parent"]] = doc
@@ -102,7 +102,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
for version_doc in last_versions_by_subset_id.values():
version_docs_by_id[version_doc["_id"]] = version_doc
- repre_docs = io.find({
+ repre_docs = legacy_io.find({
"type": "representation",
"parent": {"$in": list(version_docs_by_id.keys())},
"name": {"$in": representations}
diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
index f393e471c4..1d2d9da1af 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
@@ -1,6 +1,6 @@
import os
-from avalon import api
import pyblish.api
+from openpype.pipeline import legacy_io
class CollectCelactionInstances(pyblish.api.ContextPlugin):
@@ -10,7 +10,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
- task = api.Session["AVALON_TASK"]
+ task = legacy_io.Session["AVALON_TASK"]
current_file = context.data["currentFile"]
staging_dir = os.path.dirname(current_file)
scene_file = os.path.basename(current_file)
diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
index c6aeae7730..f2ae1f62a9 100644
--- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
+++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
@@ -1,6 +1,7 @@
import pyblish.api
-import avalon.api as avalon
+
import openpype.lib as oplib
+from openpype.pipeline import legacy_io
import openpype.hosts.flame.api as opfapi
from openpype.hosts.flame.otio import flame_export
@@ -18,7 +19,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
# main
asset_doc = context.data["assetEntity"]
- task_name = avalon.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
project = opfapi.get_current_project()
sequence = opfapi.get_current_sequence(opfapi.CTX.selection)
diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py
index f7a2360bfa..29f3a3a3eb 100644
--- a/openpype/hosts/fusion/api/lib.py
+++ b/openpype/hosts/fusion/api/lib.py
@@ -6,8 +6,10 @@ import contextlib
from bson.objectid import ObjectId
from Qt import QtGui
-from avalon import io
-from openpype.pipeline import switch_container
+from openpype.pipeline import (
+ switch_container,
+ legacy_io,
+)
from .pipeline import get_current_comp, comp_lock_and_undo_chunk
self = sys.modules[__name__]
@@ -94,8 +96,10 @@ def switch_item(container,
# so we can use the original name from those.
if any(not x for x in [asset_name, subset_name, representation_name]):
_id = ObjectId(container["representation"])
- representation = io.find_one({"type": "representation", "_id": _id})
- version, subset, asset, project = io.parenthood(representation)
+ representation = legacy_io.find_one({
+ "type": "representation", "_id": _id
+ })
+ version, subset, asset, project = legacy_io.parenthood(representation)
if asset_name is None:
asset_name = asset["name"]
@@ -107,14 +111,14 @@ def switch_item(container,
representation_name = representation["name"]
# Find the new one
- asset = io.find_one({
+ asset = legacy_io.find_one({
"name": asset_name,
"type": "asset"
})
assert asset, ("Could not find asset in the database with the name "
"'%s'" % asset_name)
- subset = io.find_one({
+ subset = legacy_io.find_one({
"name": subset_name,
"type": "subset",
"parent": asset["_id"]
@@ -122,7 +126,7 @@ def switch_item(container,
assert subset, ("Could not find subset in the database with the name "
"'%s'" % subset_name)
- version = io.find_one(
+ version = legacy_io.find_one(
{
"type": "version",
"parent": subset["_id"]
@@ -134,7 +138,7 @@ def switch_item(container,
asset_name, subset_name
)
- representation = io.find_one({
+ representation = legacy_io.find_one({
"name": representation_name,
"type": "representation",
"parent": version["_id"]}
diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py
index 075820de35..b860abd88b 100644
--- a/openpype/hosts/fusion/plugins/load/load_sequence.py
+++ b/openpype/hosts/fusion/plugins/load/load_sequence.py
@@ -1,10 +1,9 @@
import os
import contextlib
-from avalon import io
-
from openpype.pipeline import (
load,
+ legacy_io,
get_representation_path,
)
from openpype.hosts.fusion.api import (
@@ -212,8 +211,10 @@ class FusionLoadSequence(load.LoaderPlugin):
path = self._get_first_image(root)
# Get start frame from version data
- version = io.find_one({"type": "version",
- "_id": representation["parent"]})
+ version = legacy_io.find_one({
+ "type": "version",
+ "_id": representation["parent"]
+ })
start = version["data"].get("frameStart")
if start is None:
self.log.warning("Missing start frame for updated version"
diff --git a/openpype/hosts/fusion/plugins/publish/submit_deadline.py b/openpype/hosts/fusion/plugins/publish/submit_deadline.py
index 9da99dd9e2..8570c759bc 100644
--- a/openpype/hosts/fusion/plugins/publish/submit_deadline.py
+++ b/openpype/hosts/fusion/plugins/publish/submit_deadline.py
@@ -4,10 +4,10 @@ import getpass
import requests
-from avalon import api
-
import pyblish.api
+from openpype.pipeline import legacy_io
+
class FusionSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit current Comp to Deadline
@@ -133,7 +133,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
"FUSION9_MasterPrefs"
]
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@@ -146,7 +146,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
- url = "{}/api/jobs".format(DEADLINE_REST_URL)
+ url = "{}/api/jobs".format(deadline_url)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py
index ca8e5c9e37..704f420796 100644
--- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py
+++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py
@@ -4,10 +4,8 @@ import sys
import logging
# Pipeline imports
-import avalon.api
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
install_host,
registered_host,
)
@@ -167,7 +165,7 @@ def update_frame_range(comp, representations):
"""
version_ids = [r["parent"] for r in representations]
- versions = io.find({"type": "version", "_id": {"$in": version_ids}})
+ versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}})
versions = list(versions)
versions = [v for v in versions
@@ -205,12 +203,11 @@ def switch(asset_name, filepath=None, new=True):
# Assert asset name exists
# It is better to do this here then to wait till switch_shot does it
- asset = io.find_one({"type": "asset", "name": asset_name})
+ asset = legacy_io.find_one({"type": "asset", "name": asset_name})
assert asset, "Could not find '%s' in the database" % asset_name
# Get current project
- self._project = io.find_one({"type": "project",
- "name": avalon.api.Session["AVALON_PROJECT"]})
+ self._project = legacy_io.find_one({"type": "project"})
# Go to comp
if not filepath:
@@ -241,7 +238,7 @@ def switch(asset_name, filepath=None, new=True):
current_comp.Print(message)
# Build the session to switch to
- switch_to_session = avalon.api.Session.copy()
+ switch_to_session = legacy_io.Session.copy()
switch_to_session["AVALON_ASSET"] = asset['name']
if new:
diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py
index 37306c7a2a..70eb3d0a19 100644
--- a/openpype/hosts/fusion/utility_scripts/switch_ui.py
+++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py
@@ -5,11 +5,13 @@ import logging
from Qt import QtWidgets, QtCore
-from avalon import io
import qtawesome as qta
from openpype import style
-from openpype.pipeline import install_host
+from openpype.pipeline import (
+ install_host,
+ legacy_io,
+)
from openpype.hosts.fusion import api
from openpype.lib.avalon_context import get_workdir_from_session
@@ -164,7 +166,7 @@ class App(QtWidgets.QWidget):
return items
def collect_assets(self):
- return list(io.find({"type": "asset"}, {"name": True}))
+ return list(legacy_io.find({"type": "asset"}, {"name": True}))
def populate_comp_box(self, files):
"""Ensure we display the filename only but the path is stored as well
diff --git a/openpype/hosts/harmony/api/README.md b/openpype/hosts/harmony/api/README.md
index e8d354e1e6..dd45eb14dd 100644
--- a/openpype/hosts/harmony/api/README.md
+++ b/openpype/hosts/harmony/api/README.md
@@ -419,7 +419,6 @@ class ExtractImage(pyblish.api.InstancePlugin):
```python
import os
-from avalon import api, io
import openpype.hosts.harmony.api as harmony
signature = str(uuid4()).replace("-", "_")
@@ -611,7 +610,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
def update(self, container, representation):
node = container.pop("node")
- version = io.find_one({"_id": representation["parent"]})
+ version = legacy_io.find_one({"_id": representation["parent"]})
files = []
for f in version["data"]["files"]:
files.append(
diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py
index 88f11dd16f..b953d0e984 100644
--- a/openpype/hosts/harmony/api/pipeline.py
+++ b/openpype/hosts/harmony/api/pipeline.py
@@ -5,11 +5,10 @@ import logging
from bson.objectid import ObjectId
import pyblish.api
-from avalon import io
-
from openpype import lib
from openpype.lib import register_event_callback
from openpype.pipeline import (
+ legacy_io,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
@@ -111,7 +110,7 @@ def check_inventory():
outdated_containers = []
for container in ls():
representation = container['representation']
- representation_doc = io.find_one(
+ representation_doc = legacy_io.find_one(
{
"_id": ObjectId(representation),
"type": "representation"
diff --git a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
index 35b123f97d..f5bf051243 100644
--- a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
+++ b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
@@ -3,13 +3,13 @@
from pathlib import Path
import attr
-from avalon import api
-from openpype.lib import get_formatted_current_time
-import openpype.lib.abstract_collect_render
-import openpype.hosts.harmony.api as harmony
-from openpype.lib.abstract_collect_render import RenderInstance
import openpype.lib
+import openpype.lib.abstract_collect_render
+from openpype.lib.abstract_collect_render import RenderInstance
+from openpype.lib import get_formatted_current_time
+from openpype.pipeline import legacy_io
+import openpype.hosts.harmony.api as harmony
@attr.s
@@ -143,7 +143,7 @@ class CollectFarmRender(openpype.lib.abstract_collect_render.
source=context.data["currentFile"],
label=node.split("/")[1],
subset=subset_name,
- asset=api.Session["AVALON_ASSET"],
+ asset=legacy_io.Session["AVALON_ASSET"],
attachTo=False,
setMembers=[node],
publish=info[4],
diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py
index df3b24ff2c..0e64ddcaf5 100644
--- a/openpype/hosts/hiero/api/lib.py
+++ b/openpype/hosts/hiero/api/lib.py
@@ -12,8 +12,7 @@ import hiero
from Qt import QtWidgets
from bson.objectid import ObjectId
-import avalon.api as avalon
-import avalon.io
+from openpype.pipeline import legacy_io
from openpype.api import (Logger, Anatomy, get_anatomy_settings)
from . import tags
@@ -38,8 +37,6 @@ self.pype_tag_name = "openpypeData"
self.default_sequence_name = "openpypeSequence"
self.default_bin_name = "openpypeBin"
-AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
-
def flatten(_list):
for item in _list:
@@ -49,6 +46,7 @@ def flatten(_list):
else:
yield item
+
def get_current_project(remove_untitled=False):
projects = flatten(hiero.core.projects())
if not remove_untitled:
@@ -384,7 +382,7 @@ def get_publish_attribute(tag):
def sync_avalon_data_to_workfile():
# import session to get project dir
- project_name = avalon.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
anatomy = Anatomy(project_name)
work_template = anatomy.templates["work"]["path"]
@@ -409,7 +407,7 @@ def sync_avalon_data_to_workfile():
project.setProjectRoot(active_project_root)
# get project data from avalon db
- project_doc = avalon.io.find_one({"type": "project"})
+ project_doc = legacy_io.find_one({"type": "project"})
project_data = project_doc["data"]
log.debug("project_data: {}".format(project_data))
@@ -995,7 +993,6 @@ def check_inventory_versions():
it to red.
"""
from . import parse_container
- from avalon import io
# presets
clip_color_last = "green"
@@ -1007,19 +1004,19 @@ def check_inventory_versions():
if container:
# get representation from io
- representation = io.find_one({
+ representation = legacy_io.find_one({
"type": "representation",
"_id": ObjectId(container["representation"])
})
# Get start frame from version data
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py
index de20b86f30..e262abec00 100644
--- a/openpype/hosts/hiero/api/menu.py
+++ b/openpype/hosts/hiero/api/menu.py
@@ -1,14 +1,16 @@
import os
import sys
+
import hiero.core
-from openpype.api import Logger
-from openpype.tools.utils import host_tools
-from avalon.api import Session
from hiero.ui import findMenuAction
+from openpype.api import Logger
+from openpype.pipeline import legacy_io
+from openpype.tools.utils import host_tools
+
from . import tags
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
self = sys.modules[__name__]
self._change_context_menu = None
@@ -24,8 +26,10 @@ def update_menu_task_label():
log.warning("Can't find menuItem: {}".format(object_name))
return
- label = "{}, {}".format(Session["AVALON_ASSET"],
- Session["AVALON_TASK"])
+ label = "{}, {}".format(
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"]
+ )
menu = found_menu.menu()
self._change_context_menu = label
@@ -51,7 +55,8 @@ def menu_install():
menu_name = os.environ['AVALON_LABEL']
context_label = "{0}, {1}".format(
- Session["AVALON_ASSET"], Session["AVALON_TASK"]
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"]
)
self._change_context_menu = context_label
diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py
index 616ff53fd8..8025ebff05 100644
--- a/openpype/hosts/hiero/api/pipeline.py
+++ b/openpype/hosts/hiero/api/pipeline.py
@@ -5,10 +5,10 @@ import os
import contextlib
from collections import OrderedDict
-from avalon import schema
from pyblish import api as pyblish
from openpype.api import Logger
from openpype.pipeline import (
+ schema,
register_creator_plugin_path,
register_loader_plugin_path,
deregister_creator_plugin_path,
@@ -20,8 +20,6 @@ from . import lib, menu, events
log = Logger().get_logger(__name__)
-AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
-
# plugin paths
API_DIR = os.path.dirname(os.path.abspath(__file__))
HOST_DIR = os.path.dirname(API_DIR)
@@ -247,15 +245,10 @@ def reload_config():
import importlib
for module in (
- "avalon",
- "avalon.lib",
- "avalon.pipeline",
- "pyblish",
- "pypeapp",
- "{}.api".format(AVALON_CONFIG),
- "{}.hosts.hiero.lib".format(AVALON_CONFIG),
- "{}.hosts.hiero.menu".format(AVALON_CONFIG),
- "{}.hosts.hiero.tags".format(AVALON_CONFIG)
+ "openpype.api",
+ "openpype.hosts.hiero.lib",
+ "openpype.hosts.hiero.menu",
+ "openpype.hosts.hiero.tags"
):
log.info("Reloading module: {}...".format(module))
try:
diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py
index fe5c0d5257..e15e3119a6 100644
--- a/openpype/hosts/hiero/api/tags.py
+++ b/openpype/hosts/hiero/api/tags.py
@@ -3,9 +3,9 @@ import os
import hiero
from openpype.api import Logger
-from avalon import io
+from openpype.pipeline import legacy_io
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
def tag_data():
@@ -141,7 +141,7 @@ def add_tags_to_workfile():
nks_pres_tags = tag_data()
# Get project task types.
- tasks = io.find_one({"type": "project"})["config"]["tasks"]
+ tasks = legacy_io.find_one({"type": "project"})["config"]["tasks"]
nks_pres_tags["[Tasks]"] = {}
log.debug("__ tasks: {}".format(tasks))
for task_type in tasks.keys():
@@ -159,7 +159,7 @@ def add_tags_to_workfile():
# asset builds and shots.
if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) == 1:
nks_pres_tags["[AssetBuilds]"] = {}
- for asset in io.find({"type": "asset"}):
+ for asset in legacy_io.find({"type": "asset"}):
if asset["data"]["entityType"] == "AssetBuild":
nks_pres_tags["[AssetBuilds]"][asset["name"]] = {
"editable": "1",
diff --git a/openpype/hosts/hiero/plugins/load/load_clip.py b/openpype/hosts/hiero/plugins/load/load_clip.py
index d3908695a2..da4326c8c1 100644
--- a/openpype/hosts/hiero/plugins/load/load_clip.py
+++ b/openpype/hosts/hiero/plugins/load/load_clip.py
@@ -1,5 +1,7 @@
-from avalon import io
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ legacy_io,
+ get_representation_path,
+)
import openpype.hosts.hiero.api as phiero
# from openpype.hosts.hiero.api import plugin, lib
# reload(lib)
@@ -105,7 +107,7 @@ class LoadClip(phiero.SequenceLoader):
namespace = container['namespace']
track_item = phiero.get_track_items(
track_item_name=namespace)
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -174,7 +176,7 @@ class LoadClip(phiero.SequenceLoader):
# define version name
version_name = version.get("name", None)
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
index d48d6949bd..29c0397f79 100644
--- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
+++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
@@ -1,12 +1,15 @@
import os
-import pyblish.api
-import hiero.ui
-from openpype.hosts.hiero import api as phiero
-from avalon import api as avalon
-from pprint import pformat
-from openpype.hosts.hiero.api.otio import hiero_export
-from Qt.QtGui import QPixmap
import tempfile
+from pprint import pformat
+
+import pyblish.api
+from Qt.QtGui import QPixmap
+
+import hiero.ui
+
+from openpype.pipeline import legacy_io
+from openpype.hosts.hiero import api as phiero
+from openpype.hosts.hiero.api.otio import hiero_export
class PrecollectWorkfile(pyblish.api.ContextPlugin):
@@ -17,7 +20,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
def process(self, context):
- asset = avalon.Session["AVALON_ASSET"]
+ asset = legacy_io.Session["AVALON_ASSET"]
subset = "workfile"
project = phiero.get_current_project()
active_timeline = hiero.ui.activeSequence()
diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
index a90856c6fd..10baf25803 100644
--- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
+++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
@@ -1,5 +1,5 @@
from pyblish import api
-from avalon import io
+from openpype.pipeline import legacy_io
class CollectAssetBuilds(api.ContextPlugin):
@@ -18,7 +18,7 @@ class CollectAssetBuilds(api.ContextPlugin):
def process(self, context):
asset_builds = {}
- for asset in io.find({"type": "asset"}):
+ for asset in legacy_io.find({"type": "asset"}):
if asset["data"]["entityType"] == "AssetBuild":
self.log.debug("Found \"{}\" in database.".format(asset))
asset_builds[asset["name"]] = asset
diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py
index ef7d07421b..693e151f6f 100644
--- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py
+++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py
@@ -1,7 +1,7 @@
import os
import pyblish.api
from openpype.hosts.hiero import api as phiero
-from avalon import api as avalon
+from openpype.pipeline import legacy_io
class PreCollectWorkfile(pyblish.api.ContextPlugin):
@@ -11,7 +11,7 @@ class PreCollectWorkfile(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.51
def process(self, context):
- asset = avalon.Session["AVALON_ASSET"]
+ asset = legacy_io.Session["AVALON_ASSET"]
subset = "workfile"
project = phiero.get_current_project()
diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py
index bd41618856..603519069a 100644
--- a/openpype/hosts/houdini/api/lib.py
+++ b/openpype/hosts/houdini/api/lib.py
@@ -4,8 +4,8 @@ from contextlib import contextmanager
import six
-from avalon import api, io
from openpype.api import get_asset
+from openpype.pipeline import legacy_io
import hou
@@ -75,9 +75,13 @@ def generate_ids(nodes, asset_id=None):
if asset_id is None:
# Get the asset ID from the database for the asset of current context
- asset_data = io.find_one({"type": "asset",
- "name": api.Session["AVALON_ASSET"]},
- projection={"_id": True})
+ asset_data = legacy_io.find_one(
+ {
+ "type": "asset",
+ "name": legacy_io.Session["AVALON_ASSET"]
+ },
+ projection={"_id": True}
+ )
assert asset_data, "No current asset found in Session"
asset_id = asset_data['_id']
@@ -155,7 +159,7 @@ def validate_fps():
if parent is None:
pass
else:
- dialog = popup.Popup(parent=parent)
+ dialog = popup.PopupUpdateKeys(parent=parent)
dialog.setModal(True)
dialog.setWindowTitle("Houdini scene does not match project FPS")
dialog.setMessage("Scene %i FPS does not match project %i FPS" %
@@ -163,7 +167,7 @@ def validate_fps():
dialog.setButtonText("Fix")
# on_show is the Fix button clicked callback
- dialog.on_clicked.connect(lambda: set_scene_fps(fps))
+ dialog.on_clicked_state.connect(lambda: set_scene_fps(fps))
dialog.show()
@@ -424,8 +428,8 @@ def maintained_selection():
def reset_framerange():
"""Set frame range to current asset"""
- asset_name = api.Session["AVALON_ASSET"]
- asset = io.find_one({"name": asset_name, "type": "asset"})
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ asset = legacy_io.find_one({"name": asset_name, "type": "asset"})
frame_start = asset["data"].get("frameStart")
frame_end = asset["data"].get("frameEnd")
diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py
index a992f1d082..e9991e38ec 100644
--- a/openpype/hosts/houdini/api/usd.py
+++ b/openpype/hosts/houdini/api/usd.py
@@ -1,11 +1,12 @@
"""Houdini-specific USD Library functions."""
import contextlib
-
import logging
+
from Qt import QtWidgets, QtCore, QtGui
-from avalon import io
+
from openpype import style
+from openpype.pipeline import legacy_io
from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget
from pxr import Sdf
@@ -20,11 +21,12 @@ class SelectAssetDialog(QtWidgets.QWidget):
Args:
parm: Parameter where selected asset name is set.
"""
+
def __init__(self, parm):
self.setWindowTitle("Pick Asset")
self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup)
- assets_widget = SingleSelectAssetsWidget(io, parent=self)
+ assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self)
layout = QtWidgets.QHBoxLayout(self)
layout.addWidget(assets_widget)
@@ -44,7 +46,7 @@ class SelectAssetDialog(QtWidgets.QWidget):
select_id = None
name = self._parm.eval()
if name:
- db_asset = io.find_one(
+ db_asset = legacy_io.find_one(
{"name": name, "type": "asset"},
{"_id": True}
)
diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py
index 0a9c1bad1e..5fc78c7539 100644
--- a/openpype/hosts/houdini/plugins/create/create_hda.py
+++ b/openpype/hosts/houdini/plugins/create/create_hda.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import hou
-from avalon import io
+
+from openpype.pipeline import legacy_io
from openpype.hosts.houdini.api import lib
from openpype.hosts.houdini.api import plugin
@@ -22,13 +23,16 @@ class CreateHDA(plugin.Creator):
# type: (str) -> bool
"""Check if existing subset name versions already exists."""
# Get all subsets of the current asset
- asset_id = io.find_one({"name": self.data["asset"], "type": "asset"},
- projection={"_id": True})['_id']
- subset_docs = io.find(
+ asset_id = legacy_io.find_one(
+ {"name": self.data["asset"], "type": "asset"},
+ projection={"_id": True}
+ )['_id']
+ subset_docs = legacy_io.find(
{
"type": "subset",
"parent": asset_id
- }, {"name": 1}
+ },
+ {"name": 1}
)
existing_subset_names = set(subset_docs.distinct("name"))
existing_subset_names_low = {
diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
index 66dfba64df..3f0d10e0ba 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
@@ -1,6 +1,6 @@
import pyblish.api
-from avalon import io
+from openpype.pipeline import legacy_io
import openpype.lib.usdlib as usdlib
@@ -50,7 +50,10 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
self.log.debug("Add bootstrap for: %s" % bootstrap)
- asset = io.find_one({"name": instance.data["asset"], "type": "asset"})
+ asset = legacy_io.find_one({
+ "name": instance.data["asset"],
+ "type": "asset"
+ })
assert asset, "Asset must exist: %s" % asset
# Check which are not about to be created and don't exist yet
@@ -104,7 +107,8 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
# Or, if they already exist in the database we can
# skip them too.
return bool(
- io.find_one(
- {"name": subset, "type": "subset", "parent": asset["_id"]}
+ legacy_io.find_one(
+ {"name": subset, "type": "subset", "parent": asset["_id"]},
+ {"_id": True}
)
)
diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
index 3e842ae766..bfcd93c1cb 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
@@ -7,7 +7,10 @@ from collections import deque
import pyblish.api
import openpype.api
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ get_representation_path,
+ legacy_io,
+)
import openpype.hosts.houdini.api.usd as hou_usdlib
from openpype.hosts.houdini.api.lib import render_rop
@@ -266,8 +269,6 @@ class ExtractUSDLayered(openpype.api.Extractor):
instance.data["files"].append(fname)
def _compare_with_latest_publish(self, dependency, new_file):
-
- from avalon import api, io
import filecmp
_, ext = os.path.splitext(new_file)
@@ -275,10 +276,10 @@ class ExtractUSDLayered(openpype.api.Extractor):
# Compare this dependency with the latest published version
# to detect whether we should make this into a new publish
# version. If not, skip it.
- asset = io.find_one(
+ asset = legacy_io.find_one(
{"name": dependency.data["asset"], "type": "asset"}
)
- subset = io.find_one(
+ subset = legacy_io.find_one(
{
"name": dependency.data["subset"],
"type": "subset",
@@ -290,7 +291,7 @@ class ExtractUSDLayered(openpype.api.Extractor):
self.log.debug("No existing subset..")
return False
- version = io.find_one(
+ version = legacy_io.find_one(
{"type": "version", "parent": subset["_id"], },
sort=[("name", -1)]
)
@@ -298,7 +299,7 @@ class ExtractUSDLayered(openpype.api.Extractor):
self.log.debug("No existing version..")
return False
- representation = io.find_one(
+ representation = legacy_io.find_one(
{
"name": ext.lstrip("."),
"type": "representation",
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
index fcfbf6b22d..44719ae488 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
@@ -1,9 +1,9 @@
import re
import pyblish.api
-import openpype.api
-from avalon import io
+import openpype.api
+from openpype.pipeline import legacy_io
class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
@@ -23,16 +23,20 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
shade_subset = subset.split(".", 1)[0]
model_subset = re.sub("^usdShade", "usdModel", shade_subset)
- asset_doc = io.find_one({"name": asset, "type": "asset"})
+ asset_doc = legacy_io.find_one(
+ {"name": asset, "type": "asset"},
+ {"_id": True}
+ )
if not asset_doc:
raise RuntimeError("Asset does not exist: %s" % asset)
- subset_doc = io.find_one(
+ subset_doc = legacy_io.find_one(
{
"name": model_subset,
"type": "subset",
"parent": asset_doc["_id"],
- }
+ },
+ {"_id": True}
)
if not subset_doc:
raise RuntimeError(
diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py
index 8cd51e6641..01a29472e7 100644
--- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py
+++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py
@@ -1,17 +1,21 @@
+import os
import hou
import husdoutputprocessors.base as base
-import os
-import re
-import logging
import colorbleed.usdlib as usdlib
+from openpype.pipeline import (
+ legacy_io,
+ registered_root,
+)
+
def _get_project_publish_template():
"""Return publish template from database for current project"""
- from avalon import io
- project = io.find_one({"type": "project"},
- projection={"config.template.publish": True})
+ project = legacy_io.find_one(
+ {"type": "project"},
+ projection={"config.template.publish": True}
+ )
return project["config"]["template"]["publish"]
@@ -133,12 +137,11 @@ class AvalonURIOutputProcessor(base.OutputProcessorBase):
"""
- from avalon import api, io
- from openpype.pipeline import registered_root
-
- PROJECT = api.Session["AVALON_PROJECT"]
- asset_doc = io.find_one({"name": asset,
- "type": "asset"})
+ PROJECT = legacy_io.Session["AVALON_PROJECT"]
+ asset_doc = legacy_io.find_one({
+ "name": asset,
+ "type": "asset"
+ })
if not asset_doc:
raise RuntimeError("Invalid asset name: '%s'" % asset)
diff --git a/openpype/hosts/maya/api/action.py b/openpype/hosts/maya/api/action.py
index ab26748c8a..ca1006b6aa 100644
--- a/openpype/hosts/maya/api/action.py
+++ b/openpype/hosts/maya/api/action.py
@@ -2,8 +2,8 @@
from __future__ import absolute_import
import pyblish.api
-from avalon import io
+from openpype.pipeline import legacy_io
from openpype.api import get_errored_instances_from_context
@@ -75,8 +75,10 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
from . import lib
asset = instance.data['asset']
- asset_id = io.find_one({"name": asset, "type": "asset"},
- projection={"_id": True})['_id']
+ asset_id = legacy_io.find_one(
+ {"name": asset, "type": "asset"},
+ projection={"_id": True}
+ )['_id']
for node, _id in lib.generate_ids(nodes, asset_id=asset_id):
lib.set_id(node, _id, overwrite=True)
diff --git a/openpype/hosts/maya/api/commands.py b/openpype/hosts/maya/api/commands.py
index a1e0be2cfe..dd616b6dd6 100644
--- a/openpype/hosts/maya/api/commands.py
+++ b/openpype/hosts/maya/api/commands.py
@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
"""OpenPype script commands to be used directly in Maya."""
from maya import cmds
-from avalon import api, io
+
+from openpype.pipeline import legacy_io
class ToolWindows:
@@ -73,13 +74,13 @@ def reset_frame_range():
59.94: '59.94fps',
44100: '44100fps',
48000: '48000fps'
- }.get(float(api.Session.get("AVALON_FPS", 25)), "pal")
+ }.get(float(legacy_io.Session.get("AVALON_FPS", 25)), "pal")
cmds.currentUnit(time=fps)
# Set frame start/end
- asset_name = api.Session["AVALON_ASSET"]
- asset = io.find_one({"name": asset_name, "type": "asset"})
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ asset = legacy_io.find_one({"name": asset_name, "type": "asset"})
frame_start = asset["data"].get("frameStart")
frame_end = asset["data"].get("frameEnd")
@@ -144,8 +145,8 @@ def reset_resolution():
resolution_height = 1080
# Get resolution from asset
- asset_name = api.Session["AVALON_ASSET"]
- asset_doc = io.find_one({"name": asset_name, "type": "asset"})
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ asset_doc = legacy_io.find_one({"name": asset_name, "type": "asset"})
resolution = _resolution_from_document(asset_doc)
# Try get resolution from project
if resolution is None:
@@ -154,7 +155,7 @@ def reset_resolution():
"Asset \"{}\" does not have set resolution."
" Trying to get resolution from project"
).format(asset_name))
- project_doc = io.find_one({"type": "project"})
+ project_doc = legacy_io.find_one({"type": "project"})
resolution = _resolution_from_document(project_doc)
if resolution is None:
diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py
index 9e99b96477..088304ab05 100644
--- a/openpype/hosts/maya/api/lib.py
+++ b/openpype/hosts/maya/api/lib.py
@@ -17,11 +17,10 @@ import bson
from maya import cmds, mel
import maya.api.OpenMaya as om
-from avalon import api, io
-
from openpype import lib
from openpype.api import get_anatomy_settings
from openpype.pipeline import (
+ legacy_io,
discover_loader_plugins,
loaders_from_representation,
get_representation_path,
@@ -1388,9 +1387,13 @@ def generate_ids(nodes, asset_id=None):
if asset_id is None:
# Get the asset ID from the database for the asset of current context
- asset_data = io.find_one({"type": "asset",
- "name": api.Session["AVALON_ASSET"]},
- projection={"_id": True})
+ asset_data = legacy_io.find_one(
+ {
+ "type": "asset",
+ "name": legacy_io.Session["AVALON_ASSET"]
+ },
+ projection={"_id": True}
+ )
assert asset_data, "No current asset found in Session"
asset_id = asset_data['_id']
@@ -1545,9 +1548,11 @@ def list_looks(asset_id):
# # get all subsets with look leading in
# the name associated with the asset
- subset = io.find({"parent": bson.ObjectId(asset_id),
- "type": "subset",
- "name": {"$regex": "look*"}})
+ subset = legacy_io.find({
+ "parent": bson.ObjectId(asset_id),
+ "type": "subset",
+ "name": {"$regex": "look*"}
+ })
return list(subset)
@@ -1566,13 +1571,17 @@ def assign_look_by_version(nodes, version_id):
"""
# Get representations of shader file and relationships
- look_representation = io.find_one({"type": "representation",
- "parent": version_id,
- "name": "ma"})
+ look_representation = legacy_io.find_one({
+ "type": "representation",
+ "parent": version_id,
+ "name": "ma"
+ })
- json_representation = io.find_one({"type": "representation",
- "parent": version_id,
- "name": "json"})
+ json_representation = legacy_io.find_one({
+ "type": "representation",
+ "parent": version_id,
+ "name": "json"
+ })
# See if representation is already loaded, if so reuse it.
host = registered_host()
@@ -1637,9 +1646,11 @@ def assign_look(nodes, subset="lookDefault"):
except bson.errors.InvalidId:
log.warning("Asset ID is not compatible with bson")
continue
- subset_data = io.find_one({"type": "subset",
- "name": subset,
- "parent": asset_id})
+ subset_data = legacy_io.find_one({
+ "type": "subset",
+ "name": subset,
+ "parent": asset_id
+ })
if not subset_data:
log.warning("No subset '{}' found for {}".format(subset, asset_id))
@@ -1647,13 +1658,18 @@ def assign_look(nodes, subset="lookDefault"):
# get last version
# with backwards compatibility
- version = io.find_one({"parent": subset_data['_id'],
- "type": "version",
- "data.families":
- {"$in": ["look"]}
- },
- sort=[("name", -1)],
- projection={"_id": True, "name": True})
+ version = legacy_io.find_one(
+ {
+ "parent": subset_data['_id'],
+ "type": "version",
+ "data.families": {"$in": ["look"]}
+ },
+ sort=[("name", -1)],
+ projection={
+ "_id": True,
+ "name": True
+ }
+ )
log.debug("Assigning look '{}' ".format(subset,
version["name"]))
@@ -2136,7 +2152,7 @@ def reset_scene_resolution():
None
"""
- project_doc = io.find_one({"type": "project"})
+ project_doc = legacy_io.find_one({"type": "project"})
project_data = project_doc["data"]
asset_data = lib.get_asset()["data"]
@@ -2169,13 +2185,13 @@ def set_context_settings():
"""
# Todo (Wijnand): apply renderer and resolution of project
- project_doc = io.find_one({"type": "project"})
+ project_doc = legacy_io.find_one({"type": "project"})
project_data = project_doc["data"]
asset_data = lib.get_asset()["data"]
# Set project fps
fps = asset_data.get("fps", project_data.get("fps", 25))
- api.Session["AVALON_FPS"] = str(fps)
+ legacy_io.Session["AVALON_FPS"] = str(fps)
set_scene_fps(fps)
reset_scene_resolution()
@@ -2210,15 +2226,17 @@ def validate_fps():
parent = get_main_window()
- dialog = popup.Popup2(parent=parent)
+ dialog = popup.PopupUpdateKeys(parent=parent)
dialog.setModal(True)
- dialog.setWindowTitle("Maya scene not in line with project")
- dialog.setMessage("The FPS is out of sync, please fix")
+ dialog.setWindowTitle("Maya scene does not match project FPS")
+ dialog.setMessage("Scene %i FPS does not match project %i FPS" %
+ (current_fps, fps))
+ dialog.setButtonText("Fix")
# Set new text for button (add optional argument for the popup?)
toggle = dialog.widgets["toggle"]
update = toggle.isChecked()
- dialog.on_show.connect(lambda: set_scene_fps(fps, update))
+ dialog.on_clicked_state.connect(lambda: set_scene_fps(fps, update))
dialog.show()
@@ -2935,7 +2953,7 @@ def update_content_on_context_change():
This will update scene content to match new asset on context change
"""
scene_sets = cmds.listSets(allSets=True)
- new_asset = api.Session["AVALON_ASSET"]
+ new_asset = legacy_io.Session["AVALON_ASSET"]
new_data = lib.get_asset()["data"]
for s in scene_sets:
try:
diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py
index 5f0fc39bf3..97f06c43af 100644
--- a/openpype/hosts/maya/api/menu.py
+++ b/openpype/hosts/maya/api/menu.py
@@ -6,10 +6,9 @@ from Qt import QtWidgets, QtGui
import maya.utils
import maya.cmds as cmds
-import avalon.api
-
from openpype.api import BuildWorkfile
from openpype.settings import get_project_settings
+from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
from openpype.hosts.maya.api import lib
from .lib import get_main_window, IS_HEADLESS
@@ -40,15 +39,15 @@ def install():
parent_widget = get_main_window()
cmds.menu(
MENU_NAME,
- label=avalon.api.Session["AVALON_LABEL"],
+ label=legacy_io.Session["AVALON_LABEL"],
tearOff=True,
parent="MayaWindow"
)
# Create context menu
context_label = "{}, {}".format(
- avalon.api.Session["AVALON_ASSET"],
- avalon.api.Session["AVALON_TASK"]
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"]
)
cmds.menuItem(
"currentContext",
@@ -211,7 +210,7 @@ def update_menu_task_label():
return
label = "{}, {}".format(
- avalon.api.Session["AVALON_ASSET"],
- avalon.api.Session["AVALON_TASK"]
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"]
)
cmds.menuItem(object_name, edit=True, label=label)
diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py
index f6f3472eef..b0e8fac635 100644
--- a/openpype/hosts/maya/api/pipeline.py
+++ b/openpype/hosts/maya/api/pipeline.py
@@ -7,7 +7,6 @@ from maya import utils, cmds, OpenMaya
import maya.api.OpenMaya as om
import pyblish.api
-import avalon.api
import openpype.hosts.maya
from openpype.tools.utils import host_tools
@@ -18,6 +17,7 @@ from openpype.lib import (
)
from openpype.lib.path_tools import HostDirmap
from openpype.pipeline import (
+ legacy_io,
register_loader_plugin_path,
register_inventory_action_path,
register_creator_plugin_path,
@@ -93,7 +93,7 @@ def _set_project():
None
"""
- workdir = avalon.api.Session["AVALON_WORKDIR"]
+ workdir = legacy_io.Session["AVALON_WORKDIR"]
try:
os.makedirs(workdir)
@@ -448,7 +448,7 @@ def on_open():
dialog.setWindowTitle("Maya scene has outdated content")
dialog.setMessage("There are outdated containers in "
"your Maya scene.")
- dialog.on_show.connect(_on_show_inventory)
+ dialog.on_clicked.connect(_on_show_inventory)
dialog.show()
@@ -473,7 +473,7 @@ def on_task_changed():
# Run
menu.update_menu_task_label()
- workdir = avalon.api.Session["AVALON_WORKDIR"]
+ workdir = legacy_io.Session["AVALON_WORKDIR"]
if os.path.exists(workdir):
log.info("Updating Maya workspace for task change to %s", workdir)
@@ -494,9 +494,9 @@ def on_task_changed():
lib.update_content_on_context_change()
msg = " project: {}\n asset: {}\n task:{}".format(
- avalon.api.Session["AVALON_PROJECT"],
- avalon.api.Session["AVALON_ASSET"],
- avalon.api.Session["AVALON_TASK"]
+ legacy_io.Session["AVALON_PROJECT"],
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"]
)
lib.show_message(
diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py
index 0b60564e5e..f8d3ed79b8 100644
--- a/openpype/hosts/maya/api/setdress.py
+++ b/openpype/hosts/maya/api/setdress.py
@@ -10,8 +10,9 @@ from bson.objectid import ObjectId
from maya import cmds
-from avalon import io
from openpype.pipeline import (
+ schema,
+ legacy_io,
discover_loader_plugins,
loaders_from_representation,
load_container,
@@ -253,7 +254,6 @@ def get_contained_containers(container):
"""
- import avalon.schema
from .pipeline import parse_container
# Get avalon containers in this package setdress container
@@ -263,7 +263,7 @@ def get_contained_containers(container):
try:
member_container = parse_container(node)
containers.append(member_container)
- except avalon.schema.ValidationError:
+ except schema.ValidationError:
pass
return containers
@@ -283,21 +283,23 @@ def update_package_version(container, version):
"""
# Versioning (from `core.maya.pipeline`)
- current_representation = io.find_one({
+ current_representation = legacy_io.find_one({
"_id": ObjectId(container["representation"])
})
assert current_representation is not None, "This is a bug"
- version_, subset, asset, project = io.parenthood(current_representation)
+ version_, subset, asset, project = legacy_io.parenthood(
+ current_representation
+ )
if version == -1:
- new_version = io.find_one({
+ new_version = legacy_io.find_one({
"type": "version",
"parent": subset["_id"]
}, sort=[("name", -1)])
else:
- new_version = io.find_one({
+ new_version = legacy_io.find_one({
"type": "version",
"parent": subset["_id"],
"name": version,
@@ -306,7 +308,7 @@ def update_package_version(container, version):
assert new_version is not None, "This is a bug"
# Get the new representation (new file)
- new_representation = io.find_one({
+ new_representation = legacy_io.find_one({
"type": "representation",
"parent": new_version["_id"],
"name": current_representation["name"]
@@ -328,7 +330,7 @@ def update_package(set_container, representation):
"""
# Load the original package data
- current_representation = io.find_one({
+ current_representation = legacy_io.find_one({
"_id": ObjectId(set_container['representation']),
"type": "representation"
})
@@ -479,10 +481,10 @@ def update_scene(set_container, containers, current_data, new_data, new_file):
# Check whether the conversion can be done by the Loader.
# They *must* use the same asset, subset and Loader for
# `update_container` to make sense.
- old = io.find_one({
+ old = legacy_io.find_one({
"_id": ObjectId(representation_current)
})
- new = io.find_one({
+ new = legacy_io.find_one({
"_id": ObjectId(representation_new)
})
is_valid = compare_representations(old=old, new=new)
diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py
index 15230519d2..70e6aee9b4 100644
--- a/openpype/hosts/maya/plugins/create/create_render.py
+++ b/openpype/hosts/maya/plugins/create/create_render.py
@@ -18,9 +18,10 @@ from openpype.api import (
get_project_settings,
get_asset)
from openpype.modules import ModulesManager
-from openpype.pipeline import CreatorError
-
-from avalon.api import Session
+from openpype.pipeline import (
+ CreatorError,
+ legacy_io,
+)
class CreateRender(plugin.Creator):
@@ -103,7 +104,7 @@ class CreateRender(plugin.Creator):
self.deadline_servers = {}
return
self._project_settings = get_project_settings(
- Session["AVALON_PROJECT"])
+ legacy_io.Session["AVALON_PROJECT"])
# project_settings/maya/create/CreateRender/aov_separator
try:
diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py
index a6deeeee2e..1a8e84c80d 100644
--- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py
+++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator for Unreal Skeletal Meshes."""
from openpype.hosts.maya.api import plugin, lib
-from avalon.api import Session
+from openpype.pipeline import legacy_io
from maya import cmds # noqa
@@ -26,7 +26,7 @@ class CreateUnrealSkeletalMesh(plugin.Creator):
dynamic_data = super(CreateUnrealSkeletalMesh, cls).get_dynamic_data(
variant, task_name, asset_id, project_name, host_name
)
- dynamic_data["asset"] = Session.get("AVALON_ASSET")
+ dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET")
return dynamic_data
def process(self):
diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py
index f62d15fe62..4e4417ff34 100644
--- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py
+++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""Creator for Unreal Static Meshes."""
from openpype.hosts.maya.api import plugin, lib
-from avalon.api import Session
from openpype.api import get_project_settings
+from openpype.pipeline import legacy_io
from maya import cmds # noqa
@@ -18,7 +18,7 @@ class CreateUnrealStaticMesh(plugin.Creator):
"""Constructor."""
super(CreateUnrealStaticMesh, self).__init__(*args, **kwargs)
self._project_settings = get_project_settings(
- Session["AVALON_PROJECT"])
+ legacy_io.Session["AVALON_PROJECT"])
@classmethod
def get_dynamic_data(
@@ -27,7 +27,7 @@ class CreateUnrealStaticMesh(plugin.Creator):
dynamic_data = super(CreateUnrealStaticMesh, cls).get_dynamic_data(
variant, task_name, asset_id, project_name, host_name
)
- dynamic_data["asset"] = Session.get("AVALON_ASSET")
+ dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET")
return dynamic_data
def process(self):
diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py
index 98dfabbbcb..45c4b7e443 100644
--- a/openpype/hosts/maya/plugins/create/create_vrayscene.py
+++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py
@@ -18,11 +18,12 @@ from openpype.api import (
)
from openpype.lib import requests_get
-from openpype.pipeline import CreatorError
+from openpype.pipeline import (
+ CreatorError,
+ legacy_io,
+)
from openpype.modules import ModulesManager
-from avalon.api import Session
-
class CreateVRayScene(plugin.Creator):
"""Create Vray Scene."""
@@ -47,7 +48,7 @@ class CreateVRayScene(plugin.Creator):
self.deadline_servers = {}
return
self._project_settings = get_project_settings(
- Session["AVALON_PROJECT"])
+ legacy_io.Session["AVALON_PROJECT"])
try:
default_servers = deadline_settings["deadline_urls"]
diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py
index c2e43f196f..a5367f16e5 100644
--- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py
+++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py
@@ -1,9 +1,10 @@
import json
-from avalon import io
from bson.objectid import ObjectId
+
from openpype.pipeline import (
InventoryAction,
get_representation_context,
+ legacy_io,
)
from openpype.hosts.maya.api.lib import (
maintained_selection,
@@ -39,7 +40,7 @@ class ImportModelRender(InventoryAction):
else:
nodes.append(n)
- repr_doc = io.find_one({
+ repr_doc = legacy_io.find_one({
"_id": ObjectId(container["representation"]),
})
version_id = repr_doc["parent"]
@@ -63,7 +64,7 @@ class ImportModelRender(InventoryAction):
from maya import cmds
# Get representations of shader file and relationships
- look_repr = io.find_one({
+ look_repr = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": {"$regex": self.scene_type_regex},
@@ -72,7 +73,7 @@ class ImportModelRender(InventoryAction):
print("No model render sets for this model version..")
return
- json_repr = io.find_one({
+ json_repr = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": self.look_data_type,
diff --git a/openpype/hosts/maya/plugins/load/load_audio.py b/openpype/hosts/maya/plugins/load/load_audio.py
index d8844ffea6..ce814e1299 100644
--- a/openpype/hosts/maya/plugins/load/load_audio.py
+++ b/openpype/hosts/maya/plugins/load/load_audio.py
@@ -1,8 +1,9 @@
from maya import cmds, mel
-from avalon import io
+
from openpype.pipeline import (
+ legacy_io,
load,
- get_representation_path
+ get_representation_path,
)
from openpype.hosts.maya.api.pipeline import containerise
from openpype.hosts.maya.api.lib import unique_namespace
@@ -64,9 +65,9 @@ class AudioLoader(load.LoaderPlugin):
)
# Set frame range.
- version = io.find_one({"_id": representation["parent"]})
- subset = io.find_one({"_id": version["parent"]})
- asset = io.find_one({"_id": subset["parent"]})
+ version = legacy_io.find_one({"_id": representation["parent"]})
+ subset = legacy_io.find_one({"_id": version["parent"]})
+ asset = legacy_io.find_one({"_id": subset["parent"]})
audio_node.sourceStart.set(1 - asset["data"]["frameStart"])
audio_node.sourceEnd.set(asset["data"]["frameEnd"])
diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py
index b250986489..b67c2cb209 100644
--- a/openpype/hosts/maya/plugins/load/load_image_plane.py
+++ b/openpype/hosts/maya/plugins/load/load_image_plane.py
@@ -1,7 +1,7 @@
from Qt import QtWidgets, QtCore
-from avalon import io
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path
)
@@ -216,9 +216,9 @@ class ImagePlaneLoader(load.LoaderPlugin):
)
# Set frame range.
- version = io.find_one({"_id": representation["parent"]})
- subset = io.find_one({"_id": version["parent"]})
- asset = io.find_one({"_id": subset["parent"]})
+ version = legacy_io.find_one({"_id": representation["parent"]})
+ subset = legacy_io.find_one({"_id": version["parent"]})
+ asset = legacy_io.find_one({"_id": subset["parent"]})
start_frame = asset["data"]["frameStart"]
end_frame = asset["data"]["frameEnd"]
image_plane_shape.frameOffset.set(1 - start_frame)
diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py
index 8f02ed59b8..80eac8e0b5 100644
--- a/openpype/hosts/maya/plugins/load/load_look.py
+++ b/openpype/hosts/maya/plugins/load/load_look.py
@@ -5,8 +5,10 @@ from collections import defaultdict
from Qt import QtWidgets
-from avalon import io
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ legacy_io,
+ get_representation_path,
+)
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api import lib
from openpype.widgets.message_window import ScrollMessageBox
@@ -71,7 +73,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
shader_nodes = cmds.ls(members, type='shadingEngine')
nodes = set(self._get_nodes_with_shader(shader_nodes))
- json_representation = io.find_one({
+ json_representation = legacy_io.find_one({
"type": "representation",
"parent": representation['parent'],
"name": "json"
diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py
index a7222edfd4..a8875cf216 100644
--- a/openpype/hosts/maya/plugins/load/load_reference.py
+++ b/openpype/hosts/maya/plugins/load/load_reference.py
@@ -1,10 +1,12 @@
import os
from maya import cmds
-from avalon import api
from openpype.api import get_project_settings
from openpype.lib import get_creator_by_name
-from openpype.pipeline import legacy_create
+from openpype.pipeline import (
+ legacy_io,
+ legacy_create,
+)
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api.lib import maintained_selection
@@ -143,7 +145,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
roots = cmds.ls(self[:], assemblies=True, long=True)
assert roots, "No root nodes in rig, this is a bug."
- asset = api.Session["AVALON_ASSET"]
+ asset = legacy_io.Session["AVALON_ASSET"]
dependency = str(context["representation"]["_id"])
self.log.info("Creating subset: {}".format(namespace))
diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py
index 69d54df62b..22d56139f6 100644
--- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py
+++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py
@@ -11,9 +11,9 @@ from bson.objectid import ObjectId
import maya.cmds as cmds
-from avalon import io
from openpype.api import get_project_settings
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path
)
@@ -185,12 +185,11 @@ class VRayProxyLoader(load.LoaderPlugin):
"""
self.log.debug(
"Looking for abc in published representations of this version.")
- abc_rep = io.find_one(
- {
- "type": "representation",
- "parent": ObjectId(version_id),
- "name": "abc"
- })
+ abc_rep = legacy_io.find_one({
+ "type": "representation",
+ "parent": ObjectId(version_id),
+ "name": "abc"
+ })
if abc_rep:
self.log.debug("Found, we'll link alembic to vray proxy.")
diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py
index c64e1c540b..fb903785ae 100644
--- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py
+++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py
@@ -7,9 +7,9 @@ from pprint import pprint
from maya import cmds
-from avalon import io
from openpype.api import get_project_settings
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path
)
@@ -111,11 +111,11 @@ class YetiCacheLoader(load.LoaderPlugin):
def update(self, container, representation):
- io.install()
+ legacy_io.install()
namespace = container["namespace"]
container_node = container["objectName"]
- fur_settings = io.find_one(
+ fur_settings = legacy_io.find_one(
{"parent": representation["parent"], "name": "fursettings"}
)
diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py
index 8e6691120a..7c9a1b76fb 100644
--- a/openpype/hosts/maya/plugins/publish/collect_ass.py
+++ b/openpype/hosts/maya/plugins/publish/collect_ass.py
@@ -1,23 +1,16 @@
from maya import cmds
-import pymel.core as pm
import pyblish.api
-import avalon.api
+
class CollectAssData(pyblish.api.InstancePlugin):
- """Collect Ass data
-
- """
+ """Collect Ass data."""
order = pyblish.api.CollectorOrder + 0.2
label = 'Collect Ass'
families = ["ass"]
def process(self, instance):
-
-
- context = instance.context
-
objsets = instance.data['setMembers']
for objset in objsets:
diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py
index 14b9157005..ea17ae020d 100644
--- a/openpype/hosts/maya/plugins/publish/collect_render.py
+++ b/openpype/hosts/maya/plugins/publish/collect_render.py
@@ -49,8 +49,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup
import pyblish.api
-from avalon import api
from openpype.lib import get_formatted_current_time
+from openpype.pipeline import legacy_io
from openpype.hosts.maya.api.lib_renderproducts import get as get_layer_render_products # noqa: E501
from openpype.hosts.maya.api import lib
@@ -93,7 +93,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
render_globals = render_instance
collected_render_layers = render_instance.data["setMembers"]
filepath = context.data["currentFile"].replace("\\", "/")
- asset = api.Session["AVALON_ASSET"]
+ asset = legacy_io.Session["AVALON_ASSET"]
workspace = context.data["workspaceDir"]
deadline_settings = (
diff --git a/openpype/hosts/maya/plugins/publish/collect_review.py b/openpype/hosts/maya/plugins/publish/collect_review.py
index 60183341f9..1af92c3bfc 100644
--- a/openpype/hosts/maya/plugins/publish/collect_review.py
+++ b/openpype/hosts/maya/plugins/publish/collect_review.py
@@ -2,7 +2,8 @@ from maya import cmds, mel
import pymel.core as pm
import pyblish.api
-import avalon.api
+
+from openpype.pipeline import legacy_io
class CollectReview(pyblish.api.InstancePlugin):
@@ -19,7 +20,7 @@ class CollectReview(pyblish.api.InstancePlugin):
self.log.debug('instance: {}'.format(instance))
- task = avalon.api.Session["AVALON_TASK"]
+ task = legacy_io.Session["AVALON_TASK"]
# get cameras
members = instance.data['setMembers']
diff --git a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py
index 327fc836dc..afdb570cbc 100644
--- a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py
+++ b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py
@@ -6,7 +6,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup
from maya import cmds
import pyblish.api
-from avalon import api
+
+from openpype.pipeline import legacy_io
from openpype.lib import get_formatted_current_time
from openpype.hosts.maya.api import lib
@@ -117,7 +118,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin):
# instance subset
"family": "vrayscene_layer",
"families": ["vrayscene_layer"],
- "asset": api.Session["AVALON_ASSET"],
+ "asset": legacy_io.Session["AVALON_ASSET"],
"time": get_formatted_current_time(),
"author": context.data["user"],
# Add source to allow tracing back to the scene from
diff --git a/openpype/hosts/maya/plugins/publish/collect_workfile.py b/openpype/hosts/maya/plugins/publish/collect_workfile.py
index ee676f50d0..12d86869ea 100644
--- a/openpype/hosts/maya/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/maya/plugins/publish/collect_workfile.py
@@ -1,7 +1,8 @@
-import pyblish.api
-import avalon.api
import os
+import pyblish.api
+
from maya import cmds
+from openpype.pipeline import legacy_io
class CollectWorkfile(pyblish.api.ContextPlugin):
@@ -19,7 +20,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
- task = avalon.api.Session["AVALON_TASK"]
+ task = legacy_io.Session["AVALON_TASK"]
data = {}
diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py
index 6fcc308f78..881705b92c 100644
--- a/openpype/hosts/maya/plugins/publish/extract_look.py
+++ b/openpype/hosts/maya/plugins/publish/extract_look.py
@@ -12,9 +12,9 @@ from collections import OrderedDict
from maya import cmds # noqa
import pyblish.api
-from avalon import io
import openpype.api
+from openpype.pipeline import legacy_io
from openpype.hosts.maya.api import lib
# Modes for transfer
@@ -40,7 +40,7 @@ def find_paths_by_hash(texture_hash):
"""
key = "data.sourceHashes.{0}".format(texture_hash)
- return io.distinct(key, {"type": "version"})
+ return legacy_io.distinct(key, {"type": "version"})
def maketx(source, destination, *args):
diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py
index 255ed96901..c4250a20bd 100644
--- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py
+++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py
@@ -7,11 +7,10 @@ import appdirs
from maya import cmds
-from avalon import api
-
import pyblish.api
from openpype.lib import requests_post
from openpype.hosts.maya.api import lib
+from openpype.pipeline import legacy_io
from openpype.api import get_system_settings
@@ -489,7 +488,6 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
"MAYA_RENDER_DESC_PATH",
"MAYA_MODULE_PATH",
"ARNOLD_PLUGIN_PATH",
- "AVALON_SCHEMA",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
@@ -503,7 +501,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
"TOOL_ENV"
]
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
# self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'):
@@ -548,4 +546,3 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
"%f=%d was rounded off to nearest integer"
% (value, int(value))
)
-
diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py
index 3757e13a9b..50acf2b8b7 100644
--- a/openpype/hosts/maya/plugins/publish/validate_model_name.py
+++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py
@@ -1,16 +1,17 @@
# -*- coding: utf-8 -*-
"""Validate model nodes names."""
+import os
+import re
from maya import cmds
import pyblish.api
+
import openpype.api
-import avalon.api
+from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action
from openpype.hosts.maya.api.shader_definition_editor import (
DEFINITION_FILENAME)
from openpype.lib.mongo import OpenPypeMongoConnection
import gridfs
-import re
-import os
class ValidateModelName(pyblish.api.InstancePlugin):
@@ -68,7 +69,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
invalid.append(top_group)
else:
if "asset" in r.groupindex:
- if m.group("asset") != avalon.api.Session["AVALON_ASSET"]:
+ if m.group("asset") != legacy_io.Session["AVALON_ASSET"]:
cls.log.error("Invalid asset name in top level group.")
return top_group
if "subset" in r.groupindex:
@@ -76,7 +77,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
cls.log.error("Invalid subset name in top level group.")
return top_group
if "project" in r.groupindex:
- if m.group("project") != avalon.api.Session["AVALON_PROJECT"]:
+ if m.group("project") != legacy_io.Session["AVALON_PROJECT"]:
cls.log.error("Invalid project name in top level group.")
return top_group
diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py
index c5f675c8ca..068d6b38a1 100644
--- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py
+++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py
@@ -1,8 +1,7 @@
import pyblish.api
-from avalon import io
-
import openpype.api
+from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action
from openpype.hosts.maya.api import lib
@@ -43,7 +42,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
nodes=instance[:])
# check ids against database ids
- db_asset_ids = io.find({"type": "asset"}).distinct("_id")
+ db_asset_ids = legacy_io.find({"type": "asset"}).distinct("_id")
db_asset_ids = set(str(i) for i in db_asset_ids)
# Get all asset IDs
diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py
index 276b6713f4..38407e4176 100644
--- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py
+++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py
@@ -1,9 +1,8 @@
import pyblish.api
import openpype.api
-from avalon import io
+from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action
-
from openpype.hosts.maya.api import lib
@@ -38,7 +37,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
invalid = list()
asset = instance.data['asset']
- asset_data = io.find_one(
+ asset_data = legacy_io.find_one(
{
"name": asset,
"type": "asset"
diff --git a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py
index 4eb445ac68..e65150eb0f 100644
--- a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py
+++ b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py
@@ -1,7 +1,7 @@
import pyblish.api
import openpype.hosts.maya.api.action
-from avalon import io
+from openpype.pipeline import legacy_io
import openpype.api
@@ -48,8 +48,8 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
def validate_subset_registered(self, asset_name, subset_name):
"""Check if subset is registered in the database under the asset"""
- asset = io.find_one({"type": "asset", "name": asset_name})
- is_valid = io.find_one({
+ asset = legacy_io.find_one({"type": "asset", "name": asset_name})
+ is_valid = legacy_io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py
index 43f6c85827..33788d1835 100644
--- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py
+++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
"""Validator for correct naming of Static Meshes."""
-from maya import cmds # noqa
+import re
+
import pyblish.api
import openpype.api
import openpype.hosts.maya.api.action
-from avalon.api import Session
+from openpype.pipeline import legacy_io
from openpype.api import get_project_settings
-import re
class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin):
@@ -63,7 +63,9 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin):
invalid = []
- project_settings = get_project_settings(Session["AVALON_PROJECT"])
+ project_settings = get_project_settings(
+ legacy_io.Session["AVALON_PROJECT"]
+ )
collision_prefixes = (
project_settings
["maya"]
diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py
index 6f74c08e97..c756c48a12 100644
--- a/openpype/hosts/nuke/api/command.py
+++ b/openpype/hosts/nuke/api/command.py
@@ -3,8 +3,7 @@ import contextlib
import nuke
from bson.objectid import ObjectId
-from avalon import api, io
-
+from openpype.pipeline import legacy_io
log = logging.getLogger(__name__)
@@ -15,11 +14,11 @@ def reset_frame_range():
displayed handles
"""
- fps = float(api.Session.get("AVALON_FPS", 25))
+ fps = float(legacy_io.Session.get("AVALON_FPS", 25))
nuke.root()["fps"].setValue(fps)
- name = api.Session["AVALON_ASSET"]
- asset = io.find_one({"name": name, "type": "asset"})
+ name = legacy_io.Session["AVALON_ASSET"]
+ asset = legacy_io.find_one({"name": name, "type": "asset"})
asset_data = asset["data"]
handles = get_handles(asset)
@@ -71,10 +70,10 @@ def get_handles(asset):
if "visualParent" in data:
vp = data["visualParent"]
if vp is not None:
- parent_asset = io.find_one({"_id": ObjectId(vp)})
+ parent_asset = legacy_io.find_one({"_id": ObjectId(vp)})
if parent_asset is None:
- parent_asset = io.find_one({"_id": ObjectId(asset["parent"])})
+ parent_asset = legacy_io.find_one({"_id": ObjectId(asset["parent"])})
if parent_asset is not None:
return get_handles(parent_asset)
@@ -84,7 +83,7 @@ def get_handles(asset):
def reset_resolution():
"""Set resolution to project resolution."""
- project = io.find_one({"type": "project"})
+ project = legacy_io.find_one({"type": "project"})
p_data = project["data"]
width = p_data.get("resolution_width",
diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py
index e05c6aecbd..4e38f811c9 100644
--- a/openpype/hosts/nuke/api/lib.py
+++ b/openpype/hosts/nuke/api/lib.py
@@ -10,8 +10,6 @@ from bson.objectid import ObjectId
import nuke
-from avalon import api, io
-
from openpype.api import (
Logger,
Anatomy,
@@ -26,7 +24,10 @@ from openpype.tools.utils import host_tools
from openpype.lib.path_tools import HostDirmap
from openpype.settings import get_project_settings
from openpype.modules import ModulesManager
-from openpype.pipeline import discover_legacy_creator_plugins
+from openpype.pipeline import (
+ discover_legacy_creator_plugins,
+ legacy_io,
+)
from .workio import (
save_file,
@@ -569,7 +570,7 @@ def check_inventory_versions():
avalon_knob_data = read(node)
# get representation from io
- representation = io.find_one({
+ representation = legacy_io.find_one({
"type": "representation",
"_id": ObjectId(avalon_knob_data["representation"])
})
@@ -583,13 +584,13 @@ def check_inventory_versions():
continue
# Get start frame from version data
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
@@ -726,8 +727,8 @@ def format_anatomy(data):
file = script_name()
data["version"] = get_version_from_path(file)
- project_doc = io.find_one({"type": "project"})
- asset_doc = io.find_one({
+ project_doc = legacy_io.find_one({"type": "project"})
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": data["avalon"]["asset"]
})
@@ -1062,6 +1063,14 @@ def add_deadline_tab(node):
knob.setValue(0)
node.addKnob(knob)
+ knob = nuke.Text_Knob("divd", '')
+ knob.setValue('')
+ node.addKnob(knob)
+
+ knob = nuke.Boolean_Knob("suspend_publish", "Suspend publish")
+ knob.setValue(False)
+ node.addKnob(knob)
+
def get_deadline_knob_names():
return [
@@ -1138,8 +1147,11 @@ class WorkfileSettings(object):
nodes=None,
**kwargs):
Context._project_doc = kwargs.get(
- "project") or io.find_one({"type": "project"})
- self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"]
+ "project") or legacy_io.find_one({"type": "project"})
+ self._asset = (
+ kwargs.get("asset_name")
+ or legacy_io.Session["AVALON_ASSET"]
+ )
self._asset_entity = get_asset(self._asset)
self._root_node = root_node or nuke.root()
self._nodes = self.get_nodes(nodes=nodes)
@@ -1486,9 +1498,9 @@ class WorkfileSettings(object):
def reset_resolution(self):
"""Set resolution to project resolution."""
log.info("Resetting resolution")
- project = io.find_one({"type": "project"})
- asset = api.Session["AVALON_ASSET"]
- asset = io.find_one({"name": asset, "type": "asset"})
+ project = legacy_io.find_one({"type": "project"})
+ asset = legacy_io.Session["AVALON_ASSET"]
+ asset = legacy_io.find_one({"name": asset, "type": "asset"})
asset_data = asset.get('data', {})
data = {
@@ -1608,7 +1620,7 @@ def get_hierarchical_attr(entity, attr, default=None):
):
parent_id = entity['data']['visualParent']
- parent = io.find_one({'_id': parent_id})
+ parent = legacy_io.find_one({'_id': parent_id})
return get_hierarchical_attr(parent, attr)
diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py
index 6ee3d2ce05..0194acd196 100644
--- a/openpype/hosts/nuke/api/pipeline.py
+++ b/openpype/hosts/nuke/api/pipeline.py
@@ -38,7 +38,6 @@ from .lib import (
log = Logger.get_logger(__name__)
-AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
@@ -79,11 +78,11 @@ def reload_config():
"""
for module in (
- "{}.api".format(AVALON_CONFIG),
- "{}.hosts.nuke.api.actions".format(AVALON_CONFIG),
- "{}.hosts.nuke.api.menu".format(AVALON_CONFIG),
- "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG),
- "{}.hosts.nuke.api.lib".format(AVALON_CONFIG),
+ "openpype.api",
+ "openpype.hosts.nuke.api.actions",
+ "openpype.hosts.nuke.api.menu",
+ "openpype.hosts.nuke.api.plugin",
+ "openpype.hosts.nuke.api.lib",
):
log.info("Reloading module: {}...".format(module))
diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py
index 3ac750a48f..eaf0ab6911 100644
--- a/openpype/hosts/nuke/api/plugin.py
+++ b/openpype/hosts/nuke/api/plugin.py
@@ -1,6 +1,8 @@
import os
import random
import string
+from collections import OrderedDict
+from abc import abstractmethod
import nuke
@@ -594,3 +596,139 @@ class ExporterReviewMov(ExporterReview):
nuke.scriptSave()
return self.data
+
+
+class AbstractWriteRender(OpenPypeCreator):
+ """Abstract creator to gather similar implementation for Write creators"""
+ name = ""
+ label = ""
+ hosts = ["nuke"]
+ n_class = "Write"
+ family = "render"
+ icon = "sign-out"
+ defaults = ["Main", "Mask"]
+
+ def __init__(self, *args, **kwargs):
+ super(AbstractWriteRender, self).__init__(*args, **kwargs)
+
+ data = OrderedDict()
+
+ data["family"] = self.family
+ data["families"] = self.n_class
+
+ for k, v in self.data.items():
+ if k not in data.keys():
+ data.update({k: v})
+
+ self.data = data
+ self.nodes = nuke.selectedNodes()
+ self.log.debug("_ self.data: '{}'".format(self.data))
+
+ def process(self):
+
+ inputs = []
+ outputs = []
+ instance = nuke.toNode(self.data["subset"])
+ selected_node = None
+
+ # use selection
+ if (self.options or {}).get("useSelection"):
+ nodes = self.nodes
+
+ if not (len(nodes) < 2):
+ msg = ("Select only one node. "
+ "The node you want to connect to, "
+ "or tick off `Use selection`")
+ self.log.error(msg)
+ nuke.message(msg)
+ return
+
+ if len(nodes) == 0:
+ msg = (
+ "No nodes selected. Please select a single node to connect"
+ " to or tick off `Use selection`"
+ )
+ self.log.error(msg)
+ nuke.message(msg)
+ return
+
+ selected_node = nodes[0]
+ inputs = [selected_node]
+ outputs = selected_node.dependent()
+
+ if instance:
+ if (instance.name() in selected_node.name()):
+ selected_node = instance.dependencies()[0]
+
+ # if node already exist
+ if instance:
+ # collect input / outputs
+ inputs = instance.dependencies()
+ outputs = instance.dependent()
+ selected_node = inputs[0]
+ # remove old one
+ nuke.delete(instance)
+
+ # recreate new
+ write_data = {
+ "nodeclass": self.n_class,
+ "families": [self.family],
+ "avalon": self.data
+ }
+
+ # add creator data
+ creator_data = {"creator": self.__class__.__name__}
+ self.data.update(creator_data)
+ write_data.update(creator_data)
+
+ if self.presets.get('fpath_template'):
+ self.log.info("Adding template path from preset")
+ write_data.update(
+ {"fpath_template": self.presets["fpath_template"]}
+ )
+ else:
+ self.log.info("Adding template path from plugin")
+ write_data.update({
+ "fpath_template":
+ ("{work}/" + self.family + "s/nuke/{subset}"
+ "/{subset}.{frame}.{ext}")})
+
+ write_node = self._create_write_node(selected_node,
+ inputs, outputs,
+ write_data)
+
+ # relinking to collected connections
+ for i, input in enumerate(inputs):
+ write_node.setInput(i, input)
+
+ write_node.autoplace()
+
+ for output in outputs:
+ output.setInput(0, write_node)
+
+ write_node = self._modify_write_node(write_node)
+
+ return write_node
+
+ @abstractmethod
+ def _create_write_node(self, selected_node, inputs, outputs, write_data):
+ """Family dependent implementation of Write node creation
+
+ Args:
+ selected_node (nuke.Node)
+ inputs (list of nuke.Node) - input dependencies (what is connected)
+ outputs (list of nuke.Node) - output dependencies
+ write_data (dict) - values used to fill Knobs
+ Returns:
+ node (nuke.Node): group node with data as Knobs
+ """
+ pass
+
+ @abstractmethod
+ def _modify_write_node(self, write_node):
+ """Family dependent modification of created 'write_node'
+
+ Returns:
+ node (nuke.Node): group node with data as Knobs
+ """
+ pass
diff --git a/openpype/hosts/nuke/plugins/__init__.py b/openpype/hosts/nuke/plugins/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/nuke/plugins/create/__init__.py b/openpype/hosts/nuke/plugins/create/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py
index bdc67add42..87a9dff0f8 100644
--- a/openpype/hosts/nuke/plugins/create/create_read.py
+++ b/openpype/hosts/nuke/plugins/create/create_read.py
@@ -2,8 +2,6 @@ from collections import OrderedDict
import nuke
-import avalon.api
-from openpype import api as pype
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
set_avalon_knob_data
diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py
index 761439fdb2..7297f74c13 100644
--- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py
+++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py
@@ -1,12 +1,10 @@
-from collections import OrderedDict
-
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import create_write_node
-class CreateWritePrerender(plugin.OpenPypeCreator):
+class CreateWritePrerender(plugin.AbstractWriteRender):
# change this to template preset
name = "WritePrerender"
label = "Create Write Prerender"
@@ -19,85 +17,7 @@ class CreateWritePrerender(plugin.OpenPypeCreator):
def __init__(self, *args, **kwargs):
super(CreateWritePrerender, self).__init__(*args, **kwargs)
- data = OrderedDict()
-
- data["family"] = self.family
- data["families"] = self.n_class
-
- for k, v in self.data.items():
- if k not in data.keys():
- data.update({k: v})
-
- self.data = data
- self.nodes = nuke.selectedNodes()
- self.log.debug("_ self.data: '{}'".format(self.data))
-
- def process(self):
- inputs = []
- outputs = []
- instance = nuke.toNode(self.data["subset"])
- selected_node = None
-
- # use selection
- if (self.options or {}).get("useSelection"):
- nodes = self.nodes
-
- if not (len(nodes) < 2):
- msg = ("Select only one node. The node "
- "you want to connect to, "
- "or tick off `Use selection`")
- self.log.error(msg)
- nuke.message(msg)
-
- if len(nodes) == 0:
- msg = (
- "No nodes selected. Please select a single node to connect"
- " to or tick off `Use selection`"
- )
- self.log.error(msg)
- nuke.message(msg)
-
- selected_node = nodes[0]
- inputs = [selected_node]
- outputs = selected_node.dependent()
-
- if instance:
- if (instance.name() in selected_node.name()):
- selected_node = instance.dependencies()[0]
-
- # if node already exist
- if instance:
- # collect input / outputs
- inputs = instance.dependencies()
- outputs = instance.dependent()
- selected_node = inputs[0]
- # remove old one
- nuke.delete(instance)
-
- # recreate new
- write_data = {
- "nodeclass": self.n_class,
- "families": [self.family],
- "avalon": self.data
- }
-
- # add creator data
- creator_data = {"creator": self.__class__.__name__}
- self.data.update(creator_data)
- write_data.update(creator_data)
-
- if self.presets.get('fpath_template'):
- self.log.info("Adding template path from preset")
- write_data.update(
- {"fpath_template": self.presets["fpath_template"]}
- )
- else:
- self.log.info("Adding template path from plugin")
- write_data.update({
- "fpath_template": ("{work}/prerenders/nuke/{subset}"
- "/{subset}.{frame}.{ext}")})
-
- self.log.info("write_data: {}".format(write_data))
+ def _create_write_node(self, selected_node, inputs, outputs, write_data):
reviewable = self.presets.get("reviewable")
write_node = create_write_node(
self.data["subset"],
@@ -107,15 +27,9 @@ class CreateWritePrerender(plugin.OpenPypeCreator):
review=reviewable,
linked_knobs=["channels", "___", "first", "last", "use_limit"])
- # relinking to collected connections
- for i, input in enumerate(inputs):
- write_node.setInput(i, input)
-
- write_node.autoplace()
-
- for output in outputs:
- output.setInput(0, write_node)
+ return write_node
+ def _modify_write_node(self, write_node):
# open group node
write_node.begin()
for n in nuke.allNodes():
diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py
index a9c4b5341e..18a101546f 100644
--- a/openpype/hosts/nuke/plugins/create/create_write_render.py
+++ b/openpype/hosts/nuke/plugins/create/create_write_render.py
@@ -1,12 +1,10 @@
-from collections import OrderedDict
-
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import create_write_node
-class CreateWriteRender(plugin.OpenPypeCreator):
+class CreateWriteRender(plugin.AbstractWriteRender):
# change this to template preset
name = "WriteRender"
label = "Create Write Render"
@@ -19,87 +17,7 @@ class CreateWriteRender(plugin.OpenPypeCreator):
def __init__(self, *args, **kwargs):
super(CreateWriteRender, self).__init__(*args, **kwargs)
- data = OrderedDict()
-
- data["family"] = self.family
- data["families"] = self.n_class
-
- for k, v in self.data.items():
- if k not in data.keys():
- data.update({k: v})
-
- self.data = data
- self.nodes = nuke.selectedNodes()
- self.log.debug("_ self.data: '{}'".format(self.data))
-
- def process(self):
-
- inputs = []
- outputs = []
- instance = nuke.toNode(self.data["subset"])
- selected_node = None
-
- # use selection
- if (self.options or {}).get("useSelection"):
- nodes = self.nodes
-
- if not (len(nodes) < 2):
- msg = ("Select only one node. "
- "The node you want to connect to, "
- "or tick off `Use selection`")
- self.log.error(msg)
- nuke.message(msg)
- return
-
- if len(nodes) == 0:
- msg = (
- "No nodes selected. Please select a single node to connect"
- " to or tick off `Use selection`"
- )
- self.log.error(msg)
- nuke.message(msg)
- return
-
- selected_node = nodes[0]
- inputs = [selected_node]
- outputs = selected_node.dependent()
-
- if instance:
- if (instance.name() in selected_node.name()):
- selected_node = instance.dependencies()[0]
-
- # if node already exist
- if instance:
- # collect input / outputs
- inputs = instance.dependencies()
- outputs = instance.dependent()
- selected_node = inputs[0]
- # remove old one
- nuke.delete(instance)
-
- # recreate new
- write_data = {
- "nodeclass": self.n_class,
- "families": [self.family],
- "avalon": self.data
- }
-
- # add creator data
- creator_data = {"creator": self.__class__.__name__}
- self.data.update(creator_data)
- write_data.update(creator_data)
-
- if self.presets.get('fpath_template'):
- self.log.info("Adding template path from preset")
- write_data.update(
- {"fpath_template": self.presets["fpath_template"]}
- )
- else:
- self.log.info("Adding template path from plugin")
- write_data.update({
- "fpath_template": ("{work}/renders/nuke/{subset}"
- "/{subset}.{frame}.{ext}")})
-
+ def _create_write_node(self, selected_node, inputs, outputs, write_data):
# add reformat node to cut off all outside of format bounding box
# get width and height
try:
@@ -126,13 +44,7 @@ class CreateWriteRender(plugin.OpenPypeCreator):
input=selected_node,
prenodes=_prenodes)
- # relinking to collected connections
- for i, input in enumerate(inputs):
- write_node.setInput(i, input)
-
- write_node.autoplace()
-
- for output in outputs:
- output.setInput(0, write_node)
-
+ return write_node
+
+ def _modify_write_node(self, write_node):
return write_node
diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py
index 0037b64ce3..d22b5eab3f 100644
--- a/openpype/hosts/nuke/plugins/create/create_write_still.py
+++ b/openpype/hosts/nuke/plugins/create/create_write_still.py
@@ -1,12 +1,10 @@
-from collections import OrderedDict
-
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import create_write_node
-class CreateWriteStill(plugin.OpenPypeCreator):
+class CreateWriteStill(plugin.AbstractWriteRender):
# change this to template preset
name = "WriteStillFrame"
label = "Create Write Still Image"
@@ -23,77 +21,8 @@ class CreateWriteStill(plugin.OpenPypeCreator):
def __init__(self, *args, **kwargs):
super(CreateWriteStill, self).__init__(*args, **kwargs)
- data = OrderedDict()
-
- data["family"] = self.family
- data["families"] = self.n_class
-
- for k, v in self.data.items():
- if k not in data.keys():
- data.update({k: v})
-
- self.data = data
- self.nodes = nuke.selectedNodes()
- self.log.debug("_ self.data: '{}'".format(self.data))
-
- def process(self):
-
- inputs = []
- outputs = []
- instance = nuke.toNode(self.data["subset"])
- selected_node = None
-
- # use selection
- if (self.options or {}).get("useSelection"):
- nodes = self.nodes
-
- if not (len(nodes) < 2):
- msg = ("Select only one node. "
- "The node you want to connect to, "
- "or tick off `Use selection`")
- self.log.error(msg)
- nuke.message(msg)
- return
-
- if len(nodes) == 0:
- msg = (
- "No nodes selected. Please select a single node to connect"
- " to or tick off `Use selection`"
- )
- self.log.error(msg)
- nuke.message(msg)
- return
-
- selected_node = nodes[0]
- inputs = [selected_node]
- outputs = selected_node.dependent()
-
- if instance:
- if (instance.name() in selected_node.name()):
- selected_node = instance.dependencies()[0]
-
- # if node already exist
- if instance:
- # collect input / outputs
- inputs = instance.dependencies()
- outputs = instance.dependent()
- selected_node = inputs[0]
- # remove old one
- nuke.delete(instance)
-
- # recreate new
- write_data = {
- "nodeclass": self.n_class,
- "families": [self.family],
- "avalon": self.data
- }
-
- # add creator data
- creator_data = {"creator": self.__class__.__name__}
- self.data.update(creator_data)
- write_data.update(creator_data)
-
- self.log.info("Adding template path from plugin")
+ def _create_write_node(self, selected_node, inputs, outputs, write_data):
+ # explicitly reset template to 'renders', not same as other 2 writes
write_data.update({
"fpath_template": (
"{work}/renders/nuke/{subset}/{subset}.{ext}")})
@@ -118,16 +47,9 @@ class CreateWriteStill(plugin.OpenPypeCreator):
farm=False,
linked_knobs=["channels", "___", "first", "last", "use_limit"])
- # relinking to collected connections
- for i, input in enumerate(inputs):
- write_node.setInput(i, input)
+ return write_node
- write_node.autoplace()
-
- for output in outputs:
- output.setInput(0, write_node)
-
- # link frame hold to group node
+ def _modify_write_node(self, write_node):
write_node.begin()
for n in nuke.allNodes():
# get write node
diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py
index d55dd4cf71..143fdf1f30 100644
--- a/openpype/hosts/nuke/plugins/load/load_backdrop.py
+++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py
@@ -1,8 +1,8 @@
-from avalon import io
import nuke
import nukescripts
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -188,7 +188,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
# get main variables
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -237,7 +237,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
GN["name"].setValue(object_name)
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py
index fb5f7f8ede..964ca5ec90 100644
--- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py
+++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py
@@ -1,7 +1,7 @@
import nuke
-from avalon import io
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -102,7 +102,7 @@ class AlembicCameraLoader(load.LoaderPlugin):
None
"""
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -175,7 +175,7 @@ class AlembicCameraLoader(load.LoaderPlugin):
""" Coloring a node by correct color by actual version
"""
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py
index 9b0588feac..681561e303 100644
--- a/openpype/hosts/nuke/plugins/load/load_clip.py
+++ b/openpype/hosts/nuke/plugins/load/load_clip.py
@@ -1,8 +1,10 @@
import nuke
import qargparse
-from avalon import io
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ legacy_io,
+ get_representation_path,
+)
from openpype.hosts.nuke.api.lib import (
get_imageio_input_colorspace,
maintained_selection
@@ -194,7 +196,7 @@ class LoadClip(plugin.NukeLoader):
start_at_workfile = bool("start at" in read_node['frame_mode'].value())
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -264,7 +266,7 @@ class LoadClip(plugin.NukeLoader):
# change color of read_node
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py
index 56c5acbb0a..6a30330ed0 100644
--- a/openpype/hosts/nuke/plugins/load/load_effects.py
+++ b/openpype/hosts/nuke/plugins/load/load_effects.py
@@ -3,9 +3,8 @@ from collections import OrderedDict
import nuke
import six
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -149,7 +148,7 @@ class LoadEffects(load.LoaderPlugin):
"""
# get main variables
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -245,7 +244,7 @@ class LoadEffects(load.LoaderPlugin):
self.connect_read_node(GN, namespace, json_f["assignTo"])
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py
index 0bc5f5a514..eaf151b3b8 100644
--- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py
+++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py
@@ -3,9 +3,8 @@ from collections import OrderedDict
import six
import nuke
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -154,7 +153,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
# get main variables
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -252,7 +251,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
# return
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py
index 6f2b191be9..4ea9d64d7d 100644
--- a/openpype/hosts/nuke/plugins/load/load_gizmo.py
+++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py
@@ -1,8 +1,7 @@
import nuke
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -102,7 +101,7 @@ class LoadGizmo(load.LoaderPlugin):
# get main variables
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -150,7 +149,7 @@ class LoadGizmo(load.LoaderPlugin):
GN["name"].setValue(object_name)
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py
index 46134afcf0..38dd70935e 100644
--- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py
+++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py
@@ -1,8 +1,8 @@
import nuke
import six
-from avalon import io
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -108,7 +108,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin):
# get main variables
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -156,7 +156,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin):
GN["name"].setValue(object_name)
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py
index 9a175a0cba..6df286a4f7 100644
--- a/openpype/hosts/nuke/plugins/load/load_image.py
+++ b/openpype/hosts/nuke/plugins/load/load_image.py
@@ -1,9 +1,9 @@
import nuke
import qargparse
-from avalon import io
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -186,13 +186,13 @@ class LoadImage(load.LoaderPlugin):
format(frame_number, "0{}".format(padding)))
# Get start frame from version data
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py
index e445beca05..9788bb25d2 100644
--- a/openpype/hosts/nuke/plugins/load/load_model.py
+++ b/openpype/hosts/nuke/plugins/load/load_model.py
@@ -1,6 +1,7 @@
import nuke
-from avalon import io
+
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -99,7 +100,7 @@ class AlembicModelLoader(load.LoaderPlugin):
None
"""
# Get version from io
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -172,7 +173,7 @@ class AlembicModelLoader(load.LoaderPlugin):
""" Coloring a node by correct color by actual version
"""
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py
index 779f101682..bd351ad785 100644
--- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py
+++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py
@@ -1,8 +1,7 @@
import nuke
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
load,
get_representation_path,
)
@@ -117,13 +116,13 @@ class LinkAsGroup(load.LoaderPlugin):
root = get_representation_path(representation).replace("\\", "/")
# Get start frame from version data
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py
index 45e9969eb9..4d6944f523 100644
--- a/openpype/hosts/nuke/plugins/publish/collect_reads.py
+++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py
@@ -2,7 +2,8 @@ import os
import re
import nuke
import pyblish.api
-from avalon import io, api
+
+from openpype.pipeline import legacy_io
@pyblish.api.log
@@ -15,8 +16,10 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
families = ["source"]
def process(self, instance):
- asset_data = io.find_one({"type": "asset",
- "name": api.Session["AVALON_ASSET"]})
+ asset_data = legacy_io.find_one({
+ "type": "asset",
+ "name": legacy_io.Session["AVALON_ASSET"]
+ })
self.log.debug("asset_data: {}".format(asset_data["data"]))
diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py
index 29c706f302..1a8fa3e6ad 100644
--- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py
+++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py
@@ -1,6 +1,7 @@
import nuke
import pyblish.api
-from avalon import io, api
+
+from openpype.pipeline import legacy_io
from openpype.hosts.nuke.api.lib import (
add_publish_knob,
get_avalon_knob_data
@@ -19,9 +20,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
sync_workfile_version_on_families = []
def process(self, context):
- asset_data = io.find_one({
+ asset_data = legacy_io.find_one({
"type": "asset",
- "name": api.Session["AVALON_ASSET"]
+ "name": legacy_io.Session["AVALON_ASSET"]
})
self.log.debug("asset_data: {}".format(asset_data["data"]))
@@ -69,6 +70,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
instance = context.create_instance(subset)
instance.append(node)
+ suspend_publish = False
+ if "suspend_publish" in node.knobs():
+ suspend_publish = node["suspend_publish"].value()
+ instance.data["suspend_publish"] = suspend_publish
+
# get review knob value
review = False
if "review" in node.knobs():
diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py
index 4826b2788f..8669f4f485 100644
--- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py
+++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py
@@ -3,9 +3,12 @@ import re
from pprint import pformat
import nuke
import pyblish.api
-from avalon import io
+
import openpype.api as pype
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ legacy_io,
+ get_representation_path,
+)
@pyblish.api.log
@@ -180,7 +183,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
repre_doc = None
if version_doc:
# Try to find it's representation (Expected there is only one)
- repre_doc = io.find_one(
+ repre_doc = legacy_io.find_one(
{"type": "representation", "parent": version_doc["_id"]}
)
diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py
index c35d09dcde..10c9e93f8b 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_script.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_script.py
@@ -1,6 +1,7 @@
import pyblish.api
-from avalon import io
+
from openpype import lib
+from openpype.pipeline import legacy_io
@pyblish.api.log
@@ -115,7 +116,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
def check_parent_hierarchical(self, entityId, attr):
if entityId is None:
return None
- entity = io.find_one({"_id": entityId})
+ entity = legacy_io.find_one({"_id": entityId})
if attr in entity['data']:
self.log.info(attr)
return entity['data'][attr]
diff --git a/openpype/hosts/photoshop/api/__init__.py b/openpype/hosts/photoshop/api/__init__.py
index 17ea957066..94152b5706 100644
--- a/openpype/hosts/photoshop/api/__init__.py
+++ b/openpype/hosts/photoshop/api/__init__.py
@@ -12,7 +12,10 @@ from .pipeline import (
remove_instance,
install,
uninstall,
- containerise
+ containerise,
+ get_context_data,
+ update_context_data,
+ get_context_title
)
from .plugin import (
PhotoshopLoader,
@@ -43,6 +46,9 @@ __all__ = [
"install",
"uninstall",
"containerise",
+ "get_context_data",
+ "update_context_data",
+ "get_context_title",
# Plugin
"PhotoshopLoader",
diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py
index 0021905cb5..0bbb19523d 100644
--- a/openpype/hosts/photoshop/api/launch_logic.py
+++ b/openpype/hosts/photoshop/api/launch_logic.py
@@ -11,9 +11,8 @@ from wsrpc_aiohttp import (
from Qt import QtCore
from openpype.api import Logger
+from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
-
-from avalon import api
from openpype.tools.adobe_webserver.app import WebServerTool
from .ws_stub import PhotoshopServerStub
@@ -320,13 +319,13 @@ class PhotoshopRoute(WebSocketRoute):
log.info("Setting context change")
log.info("project {} asset {} ".format(project, asset))
if project:
- api.Session["AVALON_PROJECT"] = project
+ legacy_io.Session["AVALON_PROJECT"] = project
os.environ["AVALON_PROJECT"] = project
if asset:
- api.Session["AVALON_ASSET"] = asset
+ legacy_io.Session["AVALON_ASSET"] = asset
os.environ["AVALON_ASSET"] = asset
if task:
- api.Session["AVALON_TASK"] = task
+ legacy_io.Session["AVALON_TASK"] = task
os.environ["AVALON_TASK"] = task
async def read(self):
diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py
index 1f069c2636..20a6e3169f 100644
--- a/openpype/hosts/photoshop/api/pipeline.py
+++ b/openpype/hosts/photoshop/api/pipeline.py
@@ -3,11 +3,11 @@ from Qt import QtWidgets
from bson.objectid import ObjectId
import pyblish.api
-from avalon import io
from openpype.api import Logger
from openpype.lib import register_event_callback
from openpype.pipeline import (
+ legacy_io,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
@@ -33,22 +33,11 @@ def check_inventory():
if not lib.any_outdated():
return
- host = registered_host()
- outdated_containers = []
- for container in host.ls():
- representation = container['representation']
- representation_doc = io.find_one(
- {
- "_id": ObjectId(representation),
- "type": "representation"
- },
- projection={"parent": True}
- )
- if representation_doc and not lib.is_latest(representation_doc):
- outdated_containers.append(container)
-
# Warn about outdated containers.
- print("Starting new QApplication..")
+ _app = QtWidgets.QApplication.instance()
+ if not _app:
+ print("Starting new QApplication..")
+ _app = QtWidgets.QApplication([])
message_box = QtWidgets.QMessageBox()
message_box.setIcon(QtWidgets.QMessageBox.Warning)
@@ -149,13 +138,9 @@ def list_instances():
instances = []
layers_meta = stub.get_layers_metadata()
if layers_meta:
- for key, instance in layers_meta.items():
- schema = instance.get("schema")
- if schema and "container" in schema:
- continue
-
- instance['uuid'] = key
- instances.append(instance)
+ for instance in layers_meta:
+ if instance.get("id") == "pyblish.avalon.instance":
+ instances.append(instance)
return instances
@@ -176,11 +161,18 @@ def remove_instance(instance):
if not stub:
return
- stub.remove_instance(instance.get("uuid"))
- layer = stub.get_layer(instance.get("uuid"))
- if layer:
- stub.rename_layer(instance.get("uuid"),
- layer.name.replace(stub.PUBLISH_ICON, ''))
+ inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
+ if not inst_id:
+ log.warning("No instance identifier for {}".format(instance))
+ return
+
+ stub.remove_instance(inst_id)
+
+ if instance.get("members"):
+ item = stub.get_layer(instance["members"][0])
+ if item:
+ stub.rename_layer(item.id,
+ item.name.replace(stub.PUBLISH_ICON, ''))
def _get_stub():
@@ -232,6 +224,33 @@ def containerise(
"members": [str(layer.id)]
}
stub = lib.stub()
- stub.imprint(layer, data)
+ stub.imprint(layer.id, data)
return layer
+
+
+def get_context_data():
+ """Get stored values for context (validation enable/disable etc)"""
+ meta = _get_stub().get_layers_metadata()
+ for item in meta:
+ if item.get("id") == "publish_context":
+ item.pop("id")
+ return item
+
+ return {}
+
+
+def update_context_data(data, changes):
+ """Store value needed for context"""
+ item = data
+ item["id"] = "publish_context"
+ _get_stub().imprint(item["id"], item)
+
+
+def get_context_title():
+ """Returns title for Creator window"""
+
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ return "{}/{}/{}".format(project_name, asset_name, task_name)
diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py
index 64d89f5420..fa076ecc7e 100644
--- a/openpype/hosts/photoshop/api/ws_stub.py
+++ b/openpype/hosts/photoshop/api/ws_stub.py
@@ -27,6 +27,7 @@ class PSItem(object):
members = attr.ib(factory=list)
long_name = attr.ib(default=None)
color_code = attr.ib(default=None) # color code of layer
+ instance_id = attr.ib(default=None)
class PhotoshopServerStub:
@@ -76,13 +77,31 @@ class PhotoshopServerStub:
layer: (PSItem)
layers_meta: full list from Headline (for performance in loops)
Returns:
+ (dict) of layer metadata stored in PS file
+
+ Example:
+ {
+ 'id': 'pyblish.avalon.container',
+ 'loader': 'ImageLoader',
+ 'members': ['64'],
+ 'name': 'imageMainMiddle',
+ 'namespace': 'Hero_imageMainMiddle_001',
+ 'representation': '6203dc91e80934d9f6ee7d96',
+ 'schema': 'openpype:container-2.0'
+ }
"""
if layers_meta is None:
layers_meta = self.get_layers_metadata()
- return layers_meta.get(str(layer.id))
+ for layer_meta in layers_meta:
+ layer_id = layer_meta.get("uuid") # legacy
+ if layer_meta.get("members"):
+ layer_id = layer_meta["members"][0]
+ if str(layer.id) == str(layer_id):
+ return layer_meta
+ print("Unable to find layer metadata for {}".format(layer.id))
- def imprint(self, layer, data, all_layers=None, layers_meta=None):
+ def imprint(self, item_id, data, all_layers=None, items_meta=None):
"""Save layer metadata to Headline field of active document
Stores metadata in format:
@@ -108,28 +127,37 @@ class PhotoshopServerStub:
}] - for loaded instances
Args:
- layer (PSItem):
+ item_id (str):
data(string): json representation for single layer
all_layers (list of PSItem): for performance, could be
injected for usage in loop, if not, single call will be
triggered
- layers_meta(string): json representation from Headline
+ items_meta(string): json representation from Headline
(for performance - provide only if imprint is in
loop - value should be same)
Returns: None
"""
- if not layers_meta:
- layers_meta = self.get_layers_metadata()
+ if not items_meta:
+ items_meta = self.get_layers_metadata()
# json.dumps writes integer values in a dictionary to string, so
# anticipating it here.
- if str(layer.id) in layers_meta and layers_meta[str(layer.id)]:
- if data:
- layers_meta[str(layer.id)].update(data)
+ item_id = str(item_id)
+ is_new = True
+ result_meta = []
+ for item_meta in items_meta:
+ if ((item_meta.get('members') and
+ item_id == str(item_meta.get('members')[0])) or
+ item_meta.get("instance_id") == item_id):
+ is_new = False
+ if data:
+ item_meta.update(data)
+ result_meta.append(item_meta)
else:
- layers_meta.pop(str(layer.id))
- else:
- layers_meta[str(layer.id)] = data
+ result_meta.append(item_meta)
+
+ if is_new:
+ result_meta.append(data)
# Ensure only valid ids are stored.
if not all_layers:
@@ -137,12 +165,14 @@ class PhotoshopServerStub:
layer_ids = [layer.id for layer in all_layers]
cleaned_data = []
- for layer_id in layers_meta:
- if int(layer_id) in layer_ids:
- cleaned_data.append(layers_meta[layer_id])
+ for item in result_meta:
+ if item.get("members"):
+ if int(item["members"][0]) not in layer_ids:
+ continue
+
+ cleaned_data.append(item)
payload = json.dumps(cleaned_data, indent=4)
-
self.websocketserver.call(
self.client.call('Photoshop.imprint', payload=payload)
)
@@ -370,38 +400,27 @@ class PhotoshopServerStub:
(Headline accessible by File > File Info)
Returns:
- (string): - json documents
+ (list)
example:
{"8":{"active":true,"subset":"imageBG",
"family":"image","id":"pyblish.avalon.instance",
"asset":"Town"}}
8 is layer(group) id - used for deletion, update etc.
"""
- layers_data = {}
res = self.websocketserver.call(self.client.call('Photoshop.read'))
+ layers_data = []
try:
- layers_data = json.loads(res)
+ if res:
+ layers_data = json.loads(res)
except json.decoder.JSONDecodeError:
- pass
+ raise ValueError("{} cannot be parsed, recreate meta".format(res))
# format of metadata changed from {} to [] because of standardization
# keep current implementation logic as its working
- if not isinstance(layers_data, dict):
- temp_layers_meta = {}
- for layer_meta in layers_data:
- layer_id = layer_meta.get("uuid")
- if not layer_id:
- layer_id = layer_meta.get("members")[0]
-
- temp_layers_meta[layer_id] = layer_meta
- layers_data = temp_layers_meta
- else:
- # legacy version of metadata
+ if isinstance(layers_data, dict):
for layer_id, layer_meta in layers_data.items():
if layer_meta.get("schema") != "openpype:container-2.0":
- layer_meta["uuid"] = str(layer_id)
- else:
layer_meta["members"] = [str(layer_id)]
-
+ layers_data = list(layers_data.values())
return layers_data
def import_smart_object(self, path, layer_name, as_reference=False):
@@ -472,11 +491,12 @@ class PhotoshopServerStub:
)
def remove_instance(self, instance_id):
- cleaned_data = {}
+ cleaned_data = []
- for key, instance in self.get_layers_metadata().items():
- if key != instance_id:
- cleaned_data[key] = instance
+ for item in self.get_layers_metadata():
+ inst_id = item.get("instance_id") or item.get("uuid")
+ if inst_id != instance_id:
+ cleaned_data.append(item)
payload = json.dumps(cleaned_data, indent=4)
@@ -528,6 +548,7 @@ class PhotoshopServerStub:
d.get('type'),
d.get('members'),
d.get('long_name'),
- d.get("color_code")
+ d.get("color_code"),
+ d.get("instance_id")
))
return ret
diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py
index 5078cbb587..f15068b031 100644
--- a/openpype/hosts/photoshop/plugins/create/create_image.py
+++ b/openpype/hosts/photoshop/plugins/create/create_image.py
@@ -1,99 +1,145 @@
-from Qt import QtWidgets
-from openpype.pipeline import LegacyCreator
-from openpype.hosts.photoshop import api as photoshop
+from openpype.hosts.photoshop import api
+from openpype.lib import BoolDef
+from openpype.pipeline import (
+ Creator,
+ CreatedInstance,
+ legacy_io
+)
-class CreateImage(LegacyCreator):
- """Image folder for publish."""
-
- name = "imageDefault"
+class ImageCreator(Creator):
+ """Creates image instance for publishing."""
+ identifier = "image"
label = "Image"
family = "image"
- defaults = ["Main"]
+ description = "Image creator"
- def process(self):
- groups = []
- layers = []
- create_group = False
+ def collect_instances(self):
+ for instance_data in api.list_instances():
+ # legacy instances have family=='image'
+ creator_id = (instance_data.get("creator_identifier") or
+ instance_data.get("family"))
- stub = photoshop.stub()
- if (self.options or {}).get("useSelection"):
- multiple_instances = False
- selection = stub.get_selected_layers()
- self.log.info("selection {}".format(selection))
- if len(selection) > 1:
- # Ask user whether to create one image or image per selected
- # item.
- msg_box = QtWidgets.QMessageBox()
- msg_box.setIcon(QtWidgets.QMessageBox.Warning)
- msg_box.setText(
- "Multiple layers selected."
- "\nDo you want to make one image per layer?"
+ if creator_id == self.identifier:
+ instance_data = self._handle_legacy(instance_data)
+ layer = api.stub().get_layer(instance_data["members"][0])
+ instance_data["layer"] = layer
+ instance = CreatedInstance.from_existing(
+ instance_data, self
)
- msg_box.setStandardButtons(
- QtWidgets.QMessageBox.Yes |
- QtWidgets.QMessageBox.No |
- QtWidgets.QMessageBox.Cancel
- )
- ret = msg_box.exec_()
- if ret == QtWidgets.QMessageBox.Yes:
- multiple_instances = True
- elif ret == QtWidgets.QMessageBox.Cancel:
- return
+ self._add_instance_to_context(instance)
- if multiple_instances:
- for item in selection:
- if item.group:
- groups.append(item)
- else:
- layers.append(item)
+ def create(self, subset_name_from_ui, data, pre_create_data):
+ groups_to_create = []
+ top_layers_to_wrap = []
+ create_empty_group = False
+
+ stub = api.stub() # only after PS is up
+ top_level_selected_items = stub.get_selected_layers()
+ if pre_create_data.get("use_selection"):
+ only_single_item_selected = len(top_level_selected_items) == 1
+ for selected_item in top_level_selected_items:
+ if (
+ only_single_item_selected or
+ pre_create_data.get("create_multiple")):
+ if selected_item.group:
+ groups_to_create.append(selected_item)
+ else:
+ top_layers_to_wrap.append(selected_item)
else:
- group = stub.group_selected_layers(self.name)
- groups.append(group)
+ group = stub.group_selected_layers(subset_name_from_ui)
+ groups_to_create.append(group)
- elif len(selection) == 1:
- # One selected item. Use group if its a LayerSet (group), else
- # create a new group.
- if selection[0].group:
- groups.append(selection[0])
- else:
- layers.append(selection[0])
- elif len(selection) == 0:
- # No selection creates an empty group.
- create_group = True
- else:
- group = stub.create_group(self.name)
- groups.append(group)
+ if not groups_to_create and not top_layers_to_wrap:
+ group = stub.create_group(subset_name_from_ui)
+ groups_to_create.append(group)
- if create_group:
- group = stub.create_group(self.name)
- groups.append(group)
-
- for layer in layers:
+ # wrap each top level layer into separate new group
+ for layer in top_layers_to_wrap:
stub.select_layers([layer])
group = stub.group_selected_layers(layer.name)
- groups.append(group)
+ groups_to_create.append(group)
- creator_subset_name = self.data["subset"]
- for group in groups:
- long_names = []
- group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
- replace(stub.LOADED_ICON, '')
+ creating_multiple_groups = len(groups_to_create) > 1
+ for group in groups_to_create:
+ subset_name = subset_name_from_ui # reset to name from creator UI
+ layer_names_in_hierarchy = []
+ created_group_name = self._clean_highlights(stub, group.name)
- subset_name = creator_subset_name
- if len(groups) > 1:
+ if creating_multiple_groups:
+ # concatenate with layer name to differentiate subsets
subset_name += group.name.title().replace(" ", "")
if group.long_name:
for directory in group.long_name[::-1]:
- name = directory.replace(stub.PUBLISH_ICON, '').\
- replace(stub.LOADED_ICON, '')
- long_names.append(name)
+ name = self._clean_highlights(stub, directory)
+ layer_names_in_hierarchy.append(name)
- self.data.update({"subset": subset_name})
- self.data.update({"uuid": str(group.id)})
- self.data.update({"long_name": "_".join(long_names)})
- stub.imprint(group, self.data)
+ data.update({"subset": subset_name})
+ data.update({"members": [str(group.id)]})
+ data.update({"long_name": "_".join(layer_names_in_hierarchy)})
+
+ new_instance = CreatedInstance(self.family, subset_name, data,
+ self)
+
+ stub.imprint(new_instance.get("instance_id"),
+ new_instance.data_to_store())
+ self._add_instance_to_context(new_instance)
# reusing existing group, need to rename afterwards
- if not create_group:
- stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name)
+ if not create_empty_group:
+ stub.rename_layer(group.id,
+ stub.PUBLISH_ICON + created_group_name)
+
+ def update_instances(self, update_list):
+ self.log.debug("update_list:: {}".format(update_list))
+ for created_inst, _changes in update_list:
+ if created_inst.get("layer"):
+ # not storing PSItem layer to metadata
+ created_inst.pop("layer")
+ api.stub().imprint(created_inst.get("instance_id"),
+ created_inst.data_to_store())
+
+ def remove_instances(self, instances):
+ for instance in instances:
+ api.remove_instance(instance)
+ self._remove_instance_from_context(instance)
+
+ def get_default_variants(self):
+ return [
+ "Main"
+ ]
+
+ def get_pre_create_attr_defs(self):
+ output = [
+ BoolDef("use_selection", default=True,
+ label="Create only for selected"),
+ BoolDef("create_multiple",
+ default=True,
+ label="Create separate instance for each selected")
+ ]
+ return output
+
+ def get_detail_description(self):
+ return """Creator for Image instances"""
+
+ def _handle_legacy(self, instance_data):
+ """Converts old instances to new format."""
+ if not instance_data.get("members"):
+ instance_data["members"] = [instance_data.get("uuid")]
+
+ if instance_data.get("uuid"):
+ # uuid not needed, replaced with unique instance_id
+ api.stub().remove_instance(instance_data.get("uuid"))
+ instance_data.pop("uuid")
+
+ if not instance_data.get("task"):
+ instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
+
+ if not instance_data.get("variant"):
+ instance_data["variant"] = ''
+
+ return instance_data
+
+ def _clean_highlights(self, stub, item):
+ return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON,
+ '')
diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py
new file mode 100644
index 0000000000..9736471a26
--- /dev/null
+++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py
@@ -0,0 +1,100 @@
+from Qt import QtWidgets
+from openpype.pipeline import create
+from openpype.hosts.photoshop import api as photoshop
+
+
+class CreateImage(create.LegacyCreator):
+ """Image folder for publish."""
+
+ name = "imageDefault"
+ label = "Image"
+ family = "image"
+ defaults = ["Main"]
+
+ def process(self):
+ groups = []
+ layers = []
+ create_group = False
+
+ stub = photoshop.stub()
+ if (self.options or {}).get("useSelection"):
+ multiple_instances = False
+ selection = stub.get_selected_layers()
+ self.log.info("selection {}".format(selection))
+ if len(selection) > 1:
+ # Ask user whether to create one image or image per selected
+ # item.
+ msg_box = QtWidgets.QMessageBox()
+ msg_box.setIcon(QtWidgets.QMessageBox.Warning)
+ msg_box.setText(
+ "Multiple layers selected."
+ "\nDo you want to make one image per layer?"
+ )
+ msg_box.setStandardButtons(
+ QtWidgets.QMessageBox.Yes |
+ QtWidgets.QMessageBox.No |
+ QtWidgets.QMessageBox.Cancel
+ )
+ ret = msg_box.exec_()
+ if ret == QtWidgets.QMessageBox.Yes:
+ multiple_instances = True
+ elif ret == QtWidgets.QMessageBox.Cancel:
+ return
+
+ if multiple_instances:
+ for item in selection:
+ if item.group:
+ groups.append(item)
+ else:
+ layers.append(item)
+ else:
+ group = stub.group_selected_layers(self.name)
+ groups.append(group)
+
+ elif len(selection) == 1:
+ # One selected item. Use group if its a LayerSet (group), else
+ # create a new group.
+ if selection[0].group:
+ groups.append(selection[0])
+ else:
+ layers.append(selection[0])
+ elif len(selection) == 0:
+ # No selection creates an empty group.
+ create_group = True
+ else:
+ group = stub.create_group(self.name)
+ groups.append(group)
+
+ if create_group:
+ group = stub.create_group(self.name)
+ groups.append(group)
+
+ for layer in layers:
+ stub.select_layers([layer])
+ group = stub.group_selected_layers(layer.name)
+ groups.append(group)
+
+ creator_subset_name = self.data["subset"]
+ for group in groups:
+ long_names = []
+ group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
+ replace(stub.LOADED_ICON, '')
+
+ subset_name = creator_subset_name
+ if len(groups) > 1:
+ subset_name += group.name.title().replace(" ", "")
+
+ if group.long_name:
+ for directory in group.long_name[::-1]:
+ name = directory.replace(stub.PUBLISH_ICON, '').\
+ replace(stub.LOADED_ICON, '')
+ long_names.append(name)
+
+ self.data.update({"subset": subset_name})
+ self.data.update({"uuid": str(group.id)})
+ self.data.update({"members": [str(group.id)]})
+ self.data.update({"long_name": "_".join(long_names)})
+ stub.imprint(group, self.data)
+ # reusing existing group, need to rename afterwards
+ if not create_group:
+ stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name)
diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py
new file mode 100644
index 0000000000..875a9b8a94
--- /dev/null
+++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py
@@ -0,0 +1,78 @@
+import openpype.hosts.photoshop.api as api
+from openpype.pipeline import (
+ AutoCreator,
+ CreatedInstance,
+ legacy_io
+)
+
+
+class PSWorkfileCreator(AutoCreator):
+ identifier = "workfile"
+ family = "workfile"
+
+ def get_instance_attr_defs(self):
+ return []
+
+ def collect_instances(self):
+ for instance_data in api.list_instances():
+ creator_id = instance_data.get("creator_identifier")
+ if creator_id == self.identifier:
+ subset_name = instance_data["subset"]
+ instance = CreatedInstance(
+ self.family, subset_name, instance_data, self
+ )
+ self._add_instance_to_context(instance)
+
+ def update_instances(self, update_list):
+ # nothing to change on workfiles
+ pass
+
+ def create(self, options=None):
+ existing_instance = None
+ for instance in self.create_context.instances:
+ if instance.family == self.family:
+ existing_instance = instance
+ break
+
+ variant = ''
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ host_name = legacy_io.Session["AVALON_APP"]
+ if existing_instance is None:
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ data = {
+ "asset": asset_name,
+ "task": task_name,
+ "variant": variant
+ }
+ data.update(self.get_dynamic_data(
+ variant, task_name, asset_doc, project_name, host_name
+ ))
+
+ new_instance = CreatedInstance(
+ self.family, subset_name, data, self
+ )
+ self._add_instance_to_context(new_instance)
+ api.stub().imprint(new_instance.get("instance_id"),
+ new_instance.data_to_store())
+
+ elif (
+ existing_instance["asset"] != asset_name
+ or existing_instance["task"] != task_name
+ ):
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ existing_instance["asset"] = asset_name
+ existing_instance["task"] = task_name
diff --git a/openpype/hosts/photoshop/plugins/load/load_image.py b/openpype/hosts/photoshop/plugins/load/load_image.py
index 0a9421b8f2..91a9787781 100644
--- a/openpype/hosts/photoshop/plugins/load/load_image.py
+++ b/openpype/hosts/photoshop/plugins/load/load_image.py
@@ -61,7 +61,7 @@ class ImageLoader(photoshop.PhotoshopLoader):
)
stub.imprint(
- layer, {"representation": str(representation["_id"])}
+ layer.id, {"representation": str(representation["_id"])}
)
def remove(self, container):
@@ -73,7 +73,7 @@ class ImageLoader(photoshop.PhotoshopLoader):
stub = self.get_stub()
layer = container.pop("layer")
- stub.imprint(layer, {})
+ stub.imprint(layer.id, {})
stub.delete_layer(layer.id)
def switch(self, container, representation):
diff --git a/openpype/hosts/photoshop/plugins/load/load_reference.py b/openpype/hosts/photoshop/plugins/load/load_reference.py
index f5f0545d39..1f32a5d23c 100644
--- a/openpype/hosts/photoshop/plugins/load/load_reference.py
+++ b/openpype/hosts/photoshop/plugins/load/load_reference.py
@@ -61,7 +61,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader):
)
stub.imprint(
- layer, {"representation": str(representation["_id"])}
+ layer.id, {"representation": str(representation["_id"])}
)
def remove(self, container):
@@ -72,7 +72,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader):
"""
stub = self.get_stub()
layer = container.pop("layer")
- stub.imprint(layer, {})
+ stub.imprint(layer.id, {})
stub.delete_layer(layer.id)
def switch(self, container, representation):
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
new file mode 100644
index 0000000000..448493d370
--- /dev/null
+++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
@@ -0,0 +1,74 @@
+"""Parses batch context from json and continues in publish process.
+
+Provides:
+ context -> Loaded batch file.
+ - asset
+ - task (task name)
+ - taskType
+ - project_name
+ - variant
+
+Code is practically copy of `openype/hosts/webpublish/collect_batch_data` as
+webpublisher should be eventually ejected as an addon, eg. mentioned plugin
+shouldn't be pushed into general publish plugins.
+"""
+
+import os
+
+import pyblish.api
+
+from openpype.lib.plugin_tools import (
+ parse_json,
+ get_batch_asset_task_info
+)
+from openpype.pipeline import legacy_io
+
+
+class CollectBatchData(pyblish.api.ContextPlugin):
+ """Collect batch data from json stored in 'OPENPYPE_PUBLISH_DATA' env dir.
+
+ The directory must contain 'manifest.json' file where batch data should be
+ stored.
+ """
+ # must be really early, context values are only in json file
+ order = pyblish.api.CollectorOrder - 0.495
+ label = "Collect batch data"
+ hosts = ["photoshop"]
+ targets = ["remotepublish"]
+
+ def process(self, context):
+ self.log.info("CollectBatchData")
+ batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
+
+ assert batch_dir, (
+ "Missing `OPENPYPE_PUBLISH_DATA`")
+
+ assert os.path.exists(batch_dir), \
+ "Folder {} doesn't exist".format(batch_dir)
+
+ project_name = os.environ.get("AVALON_PROJECT")
+ if project_name is None:
+ raise AssertionError(
+ "Environment `AVALON_PROJECT` was not found."
+ "Could not set project `root` which may cause issues."
+ )
+
+ batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
+
+ context.data["batchDir"] = batch_dir
+ context.data["batchData"] = batch_data
+
+ asset_name, task_name, task_type = get_batch_asset_task_info(
+ batch_data["context"]
+ )
+
+ os.environ["AVALON_ASSET"] = asset_name
+ os.environ["AVALON_TASK"] = task_name
+ legacy_io.Session["AVALON_ASSET"] = asset_name
+ legacy_io.Session["AVALON_TASK"] = task_name
+
+ context.data["asset"] = asset_name
+ context.data["task"] = task_name
+ context.data["taskType"] = task_type
+ context.data["project_name"] = project_name
+ context.data["variant"] = batch_data["variant"]
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
index 7d44d55a80..122428eea0 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
@@ -4,7 +4,6 @@ import re
import pyblish.api
from openpype.lib import prepare_template_data
-from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info
from openpype.hosts.photoshop import api as photoshop
@@ -46,7 +45,10 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
existing_subset_names = self._get_existing_subset_names(context)
- asset_name, task_name, variant = self._parse_batch(batch_dir)
+ # from CollectBatchData
+ asset_name = context.data["asset"]
+ task_name = context.data["task"]
+ variant = context.data["variant"]
stub = photoshop.stub()
layers = stub.get_layers()
@@ -130,25 +132,6 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
return existing_subset_names
- def _parse_batch(self, batch_dir):
- """Parses asset_name, task_name, variant from batch manifest."""
- task_data = None
- if batch_dir and os.path.exists(batch_dir):
- task_data = parse_json(os.path.join(batch_dir,
- "manifest.json"))
- if not task_data:
- raise ValueError(
- "Cannot parse batch meta in {} folder".format(batch_dir))
- variant = task_data["variant"]
-
- asset, task_name, task_type = get_batch_asset_task_info(
- task_data["context"])
-
- if not task_name:
- task_name = task_type
-
- return asset, task_name, variant
-
def _create_instance(self, context, layer, family,
asset, subset, task_name):
instance = context.create_instance(layer.name)
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py
index 6198ed0156..b466ec8687 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py
@@ -1,16 +1,18 @@
-from avalon import api
+import pprint
+
import pyblish.api
from openpype.settings import get_project_settings
from openpype.hosts.photoshop import api as photoshop
from openpype.lib import prepare_template_data
+from openpype.pipeline import legacy_io
class CollectInstances(pyblish.api.ContextPlugin):
"""Gather instances by LayerSet and file metadata
- This collector takes into account assets that are associated with
- an LayerSet and marked with a unique identifier;
+ Collects publishable instances from file metadata or enhance
+ already collected by creator (family == "image").
If no image instances are explicitly created, it looks if there is value
in `flatten_subset_template` (configurable in Settings), in that case it
@@ -20,7 +22,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
id (str): "pyblish.avalon.instance"
"""
- label = "Instances"
+ label = "Collect Instances"
order = pyblish.api.CollectorOrder
hosts = ["photoshop"]
families_mapping = {
@@ -30,42 +32,53 @@ class CollectInstances(pyblish.api.ContextPlugin):
flatten_subset_template = ""
def process(self, context):
+ instance_by_layer_id = {}
+ for instance in context:
+ if (
+ instance.data["family"] == "image" and
+ instance.data.get("members")):
+ layer_id = str(instance.data["members"][0])
+ instance_by_layer_id[layer_id] = instance
+
stub = photoshop.stub()
- layers = stub.get_layers()
+ layer_items = stub.get_layers()
layers_meta = stub.get_layers_metadata()
instance_names = []
+
all_layer_ids = []
- for layer in layers:
- all_layer_ids.append(layer.id)
- layer_data = stub.read(layer, layers_meta)
+ for layer_item in layer_items:
+ layer_meta_data = stub.read(layer_item, layers_meta)
+ all_layer_ids.append(layer_item.id)
# Skip layers without metadata.
- if layer_data is None:
+ if layer_meta_data is None:
continue
# Skip containers.
- if "container" in layer_data["id"]:
+ if "container" in layer_meta_data["id"]:
continue
- # child_layers = [*layer.Layers]
- # self.log.debug("child_layers {}".format(child_layers))
- # if not child_layers:
- # self.log.info("%s skipped, it was empty." % layer.Name)
- # continue
+ # active might not be in legacy meta
+ if not layer_meta_data.get("active", True):
+ continue
- instance = context.create_instance(layer_data["subset"])
- instance.data["layer"] = layer
- instance.data.update(layer_data)
+ instance = instance_by_layer_id.get(str(layer_item.id))
+ if instance is None:
+ instance = context.create_instance(layer_meta_data["subset"])
+
+ instance.data["layer"] = layer_item
+ instance.data.update(layer_meta_data)
instance.data["families"] = self.families_mapping[
- layer_data["family"]
+ layer_meta_data["family"]
]
- instance.data["publish"] = layer.visible
- instance_names.append(layer_data["subset"])
+ instance.data["publish"] = layer_item.visible
+ instance_names.append(layer_meta_data["subset"])
# Produce diagnostic message for any graphical
# user interface interested in visualising it.
self.log.info("Found: \"%s\" " % instance.data["name"])
- self.log.info("instance: {} ".format(instance.data))
+ self.log.info("instance: {} ".format(
+ pprint.pformat(instance.data, indent=4)))
if len(instance_names) != len(set(instance_names)):
self.log.warning("Duplicate instances found. " +
@@ -79,11 +92,12 @@ class CollectInstances(pyblish.api.ContextPlugin):
"CreateImage", {}).get(
"defaults", [''])
family = "image"
- task_name = api.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
asset_name = context.data["assetEntity"]["name"]
+ variant = context.data.get("variant") or variants[0]
fill_pairs = {
- "variant": variants[0],
+ "variant": variant,
"family": family,
"task": task_name
}
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py
index f3842b9ee5..2ea5503f3f 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_review.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py
@@ -1,3 +1,11 @@
+"""
+Requires:
+ None
+
+Provides:
+ instance -> family ("review")
+"""
+
import os
import pyblish.api
@@ -6,33 +14,35 @@ from openpype.lib import get_subset_name_with_asset_doc
class CollectReview(pyblish.api.ContextPlugin):
- """Gather the active document as review instance."""
+ """Gather the active document as review instance.
+ Triggers once even if no 'image' is published as by defaults it creates
+ flatten image from a workfile.
+ """
+
+ label = "Collect Review"
label = "Review"
- order = pyblish.api.CollectorOrder + 0.1
hosts = ["photoshop"]
+ order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
family = "review"
subset = get_subset_name_with_asset_doc(
family,
- "",
+ context.data.get("variant", ''),
context.data["anatomyData"]["task"]["name"],
context.data["assetEntity"],
context.data["anatomyData"]["project"]["name"],
host_name=context.data["hostName"]
)
- file_path = context.data["currentFile"]
- base_name = os.path.basename(file_path)
-
instance = context.create_instance(subset)
instance.data.update({
"subset": subset,
- "label": base_name,
- "name": base_name,
+ "label": subset,
+ "name": subset,
"family": family,
- "families": ["ftrack"],
+ "families": [],
"representations": [],
"asset": os.environ["AVALON_ASSET"]
})
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py
index 0dbe2c6609..e4f0a07b34 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py
@@ -12,6 +12,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
hosts = ["photoshop"]
def process(self, context):
+ existing_instance = None
+ for instance in context:
+ if instance.data["family"] == "workfile":
+ self.log.debug("Workfile instance found, won't create new")
+ existing_instance = instance
+ break
+
family = "workfile"
subset = get_subset_name_with_asset_doc(
family,
@@ -27,16 +34,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
base_name = os.path.basename(file_path)
# Create instance
- instance = context.create_instance(subset)
- instance.data.update({
- "subset": subset,
- "label": base_name,
- "name": base_name,
- "family": family,
- "families": [],
- "representations": [],
- "asset": os.environ["AVALON_ASSET"]
- })
+ if existing_instance is None:
+ instance = context.create_instance(subset)
+ instance.data.update({
+ "subset": subset,
+ "label": base_name,
+ "name": base_name,
+ "family": family,
+ "families": [],
+ "representations": [],
+ "asset": os.environ["AVALON_ASSET"]
+ })
+ else:
+ instance = existing_instance
# creating representation
_, ext = os.path.splitext(file_path)
diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py
index b07d0740c1..a133e33409 100644
--- a/openpype/hosts/photoshop/plugins/publish/extract_image.py
+++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py
@@ -16,7 +16,6 @@ class ExtractImage(openpype.api.Extractor):
formats = ["png", "jpg"]
def process(self, instance):
-
staging_dir = self.staging_dir(instance)
self.log.info("Outputting image to {}".format(staging_dir))
diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml
new file mode 100644
index 0000000000..5a1e266748
--- /dev/null
+++ b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml
@@ -0,0 +1,21 @@
+
+
+
+Subset name
+
+## Invalid subset or layer name
+
+Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when subset name is used in a published file name.
+ {msg}
+
+### How to repair?
+
+You can fix this with "repair" button on the right.
+
+
+### __Detailed Info__ (optional)
+
+Not all characters are available in a file names on all OS. Wrong characters could be configured in Settings.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml
new file mode 100644
index 0000000000..4b47973193
--- /dev/null
+++ b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml
@@ -0,0 +1,14 @@
+
+
+
+Subset not unique
+
+## Non unique subset name found
+
+ Non unique subset names: '{non_unique}'
+### How to repair?
+
+Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances?
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py
index ebe9cc21ea..b65f9d259f 100644
--- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py
+++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py
@@ -1,6 +1,7 @@
-from avalon import api
import pyblish.api
+
import openpype.api
+from openpype.pipeline import legacy_io
from openpype.hosts.photoshop import api as photoshop
@@ -26,7 +27,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
for instance in instances:
data = stub.read(instance[0])
- data["asset"] = api.Session["AVALON_ASSET"]
+ data["asset"] = legacy_io.Session["AVALON_ASSET"]
stub.imprint(instance[0], data)
@@ -48,7 +49,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
def process(self, instance):
instance_asset = instance.data["asset"]
- current_asset = api.Session["AVALON_ASSET"]
+ current_asset = legacy_io.Session["AVALON_ASSET"]
msg = (
f"Instance asset {instance_asset} is not the same "
f"as current context {current_asset}. PLEASE DO:\n"
diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py
index 583e9c7a4e..bcae24108c 100644
--- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py
+++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py
@@ -2,6 +2,7 @@ import re
import pyblish.api
import openpype.api
+from openpype.pipeline import PublishXmlValidationError
from openpype.hosts.photoshop import api as photoshop
@@ -22,33 +23,34 @@ class ValidateNamingRepair(pyblish.api.Action):
failed.append(result["instance"])
invalid_chars, replace_char = plugin.get_replace_chars()
- self.log.info("{} --- {}".format(invalid_chars, replace_char))
+ self.log.debug("{} --- {}".format(invalid_chars, replace_char))
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
stub = photoshop.stub()
for instance in instances:
- self.log.info("validate_naming instance {}".format(instance))
- layer_item = instance.data["layer"]
- metadata = stub.read(layer_item)
- self.log.info("metadata instance {}".format(metadata))
- layer_name = None
- if metadata.get("uuid"):
- layer_data = stub.get_layer(metadata["uuid"])
- self.log.info("layer_data {}".format(layer_data))
- if layer_data:
- layer_name = re.sub(invalid_chars,
- replace_char,
- layer_data.name)
+ self.log.debug("validate_naming instance {}".format(instance))
+ current_layer_state = stub.get_layer(instance.data["layer"].id)
+ self.log.debug("current_layer{}".format(current_layer_state))
- stub.rename_layer(instance.data["uuid"], layer_name)
+ layer_meta = stub.read(current_layer_state)
+ instance_id = (layer_meta.get("instance_id") or
+ layer_meta.get("uuid"))
+ if not instance_id:
+ self.log.warning("Unable to repair, cannot find layer")
+ continue
+
+ layer_name = re.sub(invalid_chars,
+ replace_char,
+ current_layer_state.name)
+
+ stub.rename_layer(current_layer_state.id, layer_name)
subset_name = re.sub(invalid_chars, replace_char,
instance.data["subset"])
- layer_item.name = layer_name or subset_name
- metadata["subset"] = subset_name
- stub.imprint(layer_item, metadata)
+ layer_meta["subset"] = subset_name
+ stub.imprint(instance_id, layer_meta)
return True
@@ -73,11 +75,18 @@ class ValidateNaming(pyblish.api.InstancePlugin):
help_msg = ' Use Repair action (A) in Pyblish to fix it.'
msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"],
help_msg)
- assert not re.search(self.invalid_chars, instance.data["name"]), msg
+
+ formatting_data = {"msg": msg}
+ if re.search(self.invalid_chars, instance.data["name"]):
+ raise PublishXmlValidationError(self, msg,
+ formatting_data=formatting_data)
msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"],
help_msg)
- assert not re.search(self.invalid_chars, instance.data["subset"]), msg
+ formatting_data = {"msg": msg}
+ if re.search(self.invalid_chars, instance.data["subset"]):
+ raise PublishXmlValidationError(self, msg,
+ formatting_data=formatting_data)
@classmethod
def get_replace_chars(cls):
diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py
index 40abfb1bbd..01f2323157 100644
--- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py
+++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py
@@ -1,6 +1,7 @@
import collections
import pyblish.api
import openpype.api
+from openpype.pipeline import PublishXmlValidationError
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
@@ -27,4 +28,10 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
if count > 1]
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
"Remove duplicates via SubsetManager.")
- assert not non_unique, msg
+ formatting_data = {
+ "non_unique": ",".join(non_unique)
+ }
+
+ if non_unique:
+ raise PublishXmlValidationError(self, msg,
+ formatting_data=formatting_data)
diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py
index 636c826a11..4a7d1c5bea 100644
--- a/openpype/hosts/resolve/api/pipeline.py
+++ b/openpype/hosts/resolve/api/pipeline.py
@@ -7,10 +7,9 @@ from collections import OrderedDict
from pyblish import api as pyblish
-from avalon import schema
-
from openpype.api import Logger
from openpype.pipeline import (
+ schema,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
diff --git a/openpype/hosts/resolve/plugins/load/load_clip.py b/openpype/hosts/resolve/plugins/load/load_clip.py
index 71850d95f6..cf88b14e81 100644
--- a/openpype/hosts/resolve/plugins/load/load_clip.py
+++ b/openpype/hosts/resolve/plugins/load/load_clip.py
@@ -1,9 +1,11 @@
from copy import deepcopy
from importlib import reload
-from avalon import io
from openpype.hosts import resolve
-from openpype.pipeline import get_representation_path
+from openpype.pipeline import (
+ get_representation_path,
+ legacy_io,
+)
from openpype.hosts.resolve.api import lib, plugin
reload(plugin)
reload(lib)
@@ -94,7 +96,7 @@ class LoadClip(resolve.TimelineItemLoader):
namespace = container['namespace']
timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace)
timeline_item = timeline_item_data["clip"]["item"]
- version = io.find_one({
+ version = legacy_io.find_one({
"type": "version",
"_id": representation["parent"]
})
@@ -140,7 +142,7 @@ class LoadClip(resolve.TimelineItemLoader):
# define version name
version_name = version.get("name", None)
# get all versions in list
- versions = io.find({
+ versions = legacy_io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
diff --git a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py
index 1333516177..a58f288770 100644
--- a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py
+++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py
@@ -1,10 +1,9 @@
import pyblish.api
-from openpype.hosts import resolve
-from avalon import api as avalon
from pprint import pformat
-
-# dev
from importlib import reload
+
+from openpype.hosts import resolve
+from openpype.pipeline import legacy_io
from openpype.hosts.resolve.otio import davinci_export
reload(davinci_export)
@@ -17,7 +16,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
def process(self, context):
- asset = avalon.Session["AVALON_ASSET"]
+ asset = legacy_io.Session["AVALON_ASSET"]
subset = "workfile"
project = resolve.get_current_project()
fps = project.GetSetting("timelineFrameRate")
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py
index 9f075d66cf..3e7fb19c00 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py
@@ -2,8 +2,8 @@ import copy
import json
import pyblish.api
-from avalon import io
from openpype.lib import get_subset_name_with_asset_doc
+from openpype.pipeline import legacy_io
class CollectBulkMovInstances(pyblish.api.InstancePlugin):
@@ -26,7 +26,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
context = instance.context
asset_name = instance.data["asset"]
- asset_doc = io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
@@ -52,7 +52,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
self.subset_name_variant,
task_name,
asset_doc,
- io.Session["AVALON_PROJECT"]
+ legacy_io.Session["AVALON_PROJECT"]
)
instance_name = f"{asset_name}_{subset_name}"
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py
index 6913e0836d..2bf3917e2f 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py
@@ -19,7 +19,8 @@ import copy
from pprint import pformat
import clique
import pyblish.api
-from avalon import io
+
+from openpype.pipeline import legacy_io
class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
@@ -37,7 +38,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
def process(self, context):
# get json paths from os and load them
- io.install()
+ legacy_io.install()
# get json file context
input_json_path = os.environ.get("SAPUBLISH_INPATH")
@@ -247,7 +248,8 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
self.log.debug("collecting sequence: {}".format(collections))
instance.data["frameStart"] = int(component["frameStart"])
instance.data["frameEnd"] = int(component["frameEnd"])
- instance.data["fps"] = int(component["fps"])
+ if component.get("fps"):
+ instance.data["fps"] = int(component["fps"])
ext = component["ext"]
if ext.startswith("."):
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py
index b2735f3428..77163651c4 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py
@@ -1,8 +1,10 @@
-import pyblish.api
-import re
import os
-from avalon import io
+import re
from copy import deepcopy
+import pyblish.api
+
+from openpype.pipeline import legacy_io
+
class CollectHierarchyInstance(pyblish.api.ContextPlugin):
"""Collecting hierarchy context from `parents` and `hierarchy` data
@@ -63,7 +65,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
hierarchy = list()
visual_hierarchy = [instance.context.data["assetEntity"]]
while True:
- visual_parent = io.find_one(
+ visual_parent = legacy_io.find_one(
{"_id": visual_hierarchy[-1]["data"]["visualParent"]}
)
if visual_parent:
@@ -129,7 +131,8 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
if self.shot_add_tasks:
tasks_to_add = dict()
- project_tasks = io.find_one({"type": "project"})["config"]["tasks"]
+ project_doc = legacy_io.find_one({"type": "project"})
+ project_tasks = project_doc["config"]["tasks"]
for task_name, task_data in self.shot_add_tasks.items():
_task_data = deepcopy(task_data)
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py
index 0d629b1b44..9d94bfdc91 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py
@@ -2,9 +2,10 @@ import os
import re
import collections
import pyblish.api
-from avalon import io
from pprint import pformat
+from openpype.pipeline import legacy_io
+
class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin):
"""
@@ -119,7 +120,7 @@ class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin):
def _asset_docs_by_parent_id(self, instance):
# Query all assets for project and store them by parent's id to list
asset_docs_by_parent_id = collections.defaultdict(list)
- for asset_doc in io.find({"type": "asset"}):
+ for asset_doc in legacy_io.find({"type": "asset"}):
parent_id = asset_doc["data"]["visualParent"]
asset_docs_by_parent_id[parent_id].append(asset_doc)
return asset_docs_by_parent_id
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py
index f07499c15d..9621d70739 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py
@@ -1,8 +1,9 @@
import os
import json
import copy
+
import openpype.api
-from avalon import io
+from openpype.pipeline import legacy_io
PSDImage = None
@@ -221,7 +222,7 @@ class ExtractBGForComp(openpype.api.Extractor):
self.log.debug("resourcesDir: \"{}\"".format(resources_folder))
def find_last_version(self, subset_name, asset_doc):
- subset_doc = io.find_one({
+ subset_doc = legacy_io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_doc["_id"]
@@ -230,7 +231,7 @@ class ExtractBGForComp(openpype.api.Extractor):
if subset_doc is None:
self.log.debug("Subset entity does not exist yet.")
else:
- version_doc = io.find_one(
+ version_doc = legacy_io.find_one(
{
"type": "version",
"parent": subset_doc["_id"]
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py
index 2c92366ae9..b45f04e574 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py
@@ -1,9 +1,11 @@
import os
import copy
import json
-import openpype.api
+
import pyblish.api
-from avalon import io
+
+import openpype.api
+from openpype.pipeline import legacy_io
PSDImage = None
@@ -225,7 +227,7 @@ class ExtractBGMainGroups(openpype.api.Extractor):
self.log.debug("resourcesDir: \"{}\"".format(resources_folder))
def find_last_version(self, subset_name, asset_doc):
- subset_doc = io.find_one({
+ subset_doc = legacy_io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_doc["_id"]
@@ -234,7 +236,7 @@ class ExtractBGMainGroups(openpype.api.Extractor):
if subset_doc is None:
self.log.debug("Subset entity does not exist yet.")
else:
- version_doc = io.find_one(
+ version_doc = legacy_io.find_one(
{
"type": "version",
"parent": subset_doc["_id"]
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py
index e3094b2e3f..8485fa0915 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py
@@ -1,8 +1,9 @@
import os
import copy
-import openpype.api
import pyblish.api
-from avalon import io
+
+import openpype.api
+from openpype.pipeline import legacy_io
PSDImage = None
@@ -149,7 +150,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor):
new_instance.data["representations"] = [new_repre]
def find_last_version(self, subset_name, asset_doc):
- subset_doc = io.find_one({
+ subset_doc = legacy_io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_doc["_id"]
@@ -158,7 +159,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor):
if subset_doc is None:
self.log.debug("Subset entity does not exist yet.")
else:
- version_doc = io.find_one(
+ version_doc = legacy_io.find_one(
{
"type": "version",
"parent": subset_doc["_id"]
diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py
index 825092c81b..4c761c7a4c 100644
--- a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py
+++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py
@@ -1,7 +1,9 @@
import pyblish.api
-from avalon import io
-from openpype.pipeline import PublishXmlValidationError
+from openpype.pipeline import (
+ PublishXmlValidationError,
+ legacy_io,
+)
class ValidateTaskExistence(pyblish.api.ContextPlugin):
@@ -18,7 +20,7 @@ class ValidateTaskExistence(pyblish.api.ContextPlugin):
for instance in context:
asset_names.add(instance.data["asset"])
- asset_docs = io.find(
+ asset_docs = legacy_io.find(
{
"type": "asset",
"name": {"$in": list(asset_names)}
diff --git a/openpype/hosts/testhost/api/__init__.py b/openpype/hosts/testhost/api/__init__.py
index 7840b25892..a929a891aa 100644
--- a/openpype/hosts/testhost/api/__init__.py
+++ b/openpype/hosts/testhost/api/__init__.py
@@ -1,8 +1,8 @@
import os
import logging
import pyblish.api
-import avalon.api
-from openpype.pipeline import BaseCreator
+
+from openpype.pipeline import register_creator_plugin_path
from .pipeline import (
ls,
@@ -27,7 +27,7 @@ def install():
log.info("OpenPype - Installing TestHost integration")
pyblish.api.register_host("testhost")
pyblish.api.register_plugin_path(PUBLISH_PATH)
- avalon.api.register_plugin_path(BaseCreator, CREATE_PATH)
+ register_creator_plugin_path(CREATE_PATH)
__all__ = (
diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py
index 1f5d680705..285fe8f8d6 100644
--- a/openpype/hosts/testhost/api/pipeline.py
+++ b/openpype/hosts/testhost/api/pipeline.py
@@ -1,5 +1,6 @@
import os
import json
+from openpype.pipeline import legacy_io
class HostContext:
@@ -16,9 +17,7 @@ class HostContext:
if not asset_name:
return project_name
- from avalon import io
-
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": asset_name},
{"data.parents": 1}
)
diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py
index 4c22eea9dd..06b95375b1 100644
--- a/openpype/hosts/testhost/plugins/create/auto_creator.py
+++ b/openpype/hosts/testhost/plugins/create/auto_creator.py
@@ -1,7 +1,7 @@
-from avalon import io
from openpype.lib import NumberDef
from openpype.hosts.testhost.api import pipeline
from openpype.pipeline import (
+ legacy_io,
AutoCreator,
CreatedInstance,
)
@@ -38,13 +38,16 @@ class MyAutoCreator(AutoCreator):
break
variant = "Main"
- project_name = io.Session["AVALON_PROJECT"]
- asset_name = io.Session["AVALON_ASSET"]
- task_name = io.Session["AVALON_TASK"]
- host_name = io.Session["AVALON_APP"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ host_name = legacy_io.Session["AVALON_APP"]
if existing_instance is None:
- asset_doc = io.find_one({"type": "asset", "name": asset_name})
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
@@ -66,7 +69,10 @@ class MyAutoCreator(AutoCreator):
existing_instance["asset"] != asset_name
or existing_instance["task"] != task_name
):
- asset_doc = io.find_one({"type": "asset", "name": asset_name})
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": asset_name
+ })
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
diff --git a/openpype/hosts/testhost/run_publish.py b/openpype/hosts/testhost/run_publish.py
index cc80bdc604..c7ad63aafd 100644
--- a/openpype/hosts/testhost/run_publish.py
+++ b/openpype/hosts/testhost/run_publish.py
@@ -22,13 +22,11 @@ openpype_dir = multi_dirname(current_file, 4)
os.environ["OPENPYPE_MONGO"] = mongo_url
os.environ["OPENPYPE_ROOT"] = openpype_dir
-os.environ["AVALON_MONGO"] = mongo_url
os.environ["AVALON_PROJECT"] = project_name
os.environ["AVALON_ASSET"] = asset_name
os.environ["AVALON_TASK"] = task_name
os.environ["AVALON_APP"] = host_name
os.environ["OPENPYPE_DATABASE_NAME"] = "openpype"
-os.environ["AVALON_CONFIG"] = "openpype"
os.environ["AVALON_TIMEOUT"] = "1000"
os.environ["AVALON_DB"] = "avalon"
os.environ["FTRACK_SERVER"] = ftrack_url
diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py
index 24175883d9..954a0bae47 100644
--- a/openpype/hosts/traypublisher/api/pipeline.py
+++ b/openpype/hosts/traypublisher/api/pipeline.py
@@ -3,11 +3,12 @@ import json
import tempfile
import atexit
-from avalon import io
-import avalon.api
import pyblish.api
-from openpype.pipeline import register_creator_plugin_path
+from openpype.pipeline import (
+ register_creator_plugin_path,
+ legacy_io,
+)
ROOT_DIR = os.path.dirname(os.path.dirname(
os.path.abspath(__file__)
@@ -175,6 +176,6 @@ def install():
def set_project_name(project_name):
# TODO Deregister project specific plugins and register new project plugins
os.environ["AVALON_PROJECT"] = project_name
- avalon.api.Session["AVALON_PROJECT"] = project_name
- io.install()
+ legacy_io.Session["AVALON_PROJECT"] = project_name
+ legacy_io.install()
HostContext.set_project_name(project_name)
diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py
index 9e6404e72f..0c63dbe5be 100644
--- a/openpype/hosts/tvpaint/api/lib.py
+++ b/openpype/hosts/tvpaint/api/lib.py
@@ -2,8 +2,6 @@ import os
import logging
import tempfile
-import avalon.io
-
from . import CommunicationWrapper
log = logging.getLogger(__name__)
diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py
index 78c10c3dae..f473f51457 100644
--- a/openpype/hosts/tvpaint/api/pipeline.py
+++ b/openpype/hosts/tvpaint/api/pipeline.py
@@ -7,14 +7,12 @@ import logging
import requests
import pyblish.api
-import avalon.api
-
-from avalon import io
from openpype.hosts import tvpaint
from openpype.api import get_current_project_settings
from openpype.lib import register_event_callback
from openpype.pipeline import (
+ legacy_io,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
@@ -70,10 +68,10 @@ def install():
"""Install TVPaint-specific functionality."""
log.info("OpenPype - Installing TVPaint integration")
- io.install()
+ legacy_io.install()
# Create workdir folder if does not exist yet
- workdir = io.Session["AVALON_WORKDIR"]
+ workdir = legacy_io.Session["AVALON_WORKDIR"]
if not os.path.exists(workdir):
os.makedirs(workdir)
@@ -446,12 +444,12 @@ def set_context_settings(asset_doc=None):
"""
if asset_doc is None:
# Use current session asset if not passed
- asset_doc = avalon.io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
- "name": avalon.io.Session["AVALON_ASSET"]
+ "name": legacy_io.Session["AVALON_ASSET"]
})
- project_doc = avalon.io.find_one({"type": "project"})
+ project_doc = legacy_io.find_one({"type": "project"})
framerate = asset_doc["data"].get("fps")
if framerate is None:
diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py
index 88bdd7117e..1a5ad00ca8 100644
--- a/openpype/hosts/tvpaint/api/workio.py
+++ b/openpype/hosts/tvpaint/api/workio.py
@@ -3,8 +3,10 @@
has_unsaved_changes
"""
-from avalon import api
-from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
+from openpype.pipeline import (
+ HOST_WORKFILE_EXTENSIONS,
+ legacy_io,
+)
from .lib import (
execute_george,
execute_george_through_file
@@ -24,9 +26,9 @@ def save_file(filepath):
"""Save the open scene file."""
# Store context to workfile before save
context = {
- "project": api.Session["AVALON_PROJECT"],
- "asset": api.Session["AVALON_ASSET"],
- "task": api.Session["AVALON_TASK"]
+ "project": legacy_io.Session["AVALON_PROJECT"],
+ "asset": legacy_io.Session["AVALON_ASSET"],
+ "task": legacy_io.Session["AVALON_TASK"]
}
save_current_workfile_context(context)
diff --git a/openpype/hosts/tvpaint/hooks/pre_launch_args.py b/openpype/hosts/tvpaint/hooks/pre_launch_args.py
index 2a8f49d5b0..c31403437a 100644
--- a/openpype/hosts/tvpaint/hooks/pre_launch_args.py
+++ b/openpype/hosts/tvpaint/hooks/pre_launch_args.py
@@ -1,14 +1,8 @@
-import os
-import shutil
-
-from openpype.hosts import tvpaint
from openpype.lib import (
PreLaunchHook,
get_openpype_execute_args
)
-import avalon
-
class TvpaintPrelaunchHook(PreLaunchHook):
"""Launch arguments preparation.
diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py
index 1ce5449065..0eab083c22 100644
--- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py
+++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py
@@ -1,13 +1,15 @@
import os
-from avalon import io
from openpype.lib import (
StringTemplate,
get_workfile_template_key_from_context,
get_workdir_data,
get_last_workfile_with_version,
)
-from openpype.pipeline import registered_host
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
from openpype.api import Anatomy
from openpype.hosts.tvpaint.api import lib, pipeline, plugin
@@ -46,13 +48,13 @@ class LoadWorkfile(plugin.Loader):
task_name = context.get("task")
# Far cases when there is workfile without context
if not asset_name:
- asset_name = io.Session["AVALON_ASSET"]
- task_name = io.Session["AVALON_TASK"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
- project_doc = io.find_one({
+ project_doc = legacy_io.find_one({
"type": "project"
})
- asset_doc = io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
@@ -63,7 +65,7 @@ class LoadWorkfile(plugin.Loader):
task_name,
host_name,
project_name=project_name,
- dbcon=io
+ dbcon=legacy_io
)
anatomy = Anatomy(project_name)
diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py
index 5e8d13592c..188aa8c41a 100644
--- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py
+++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py
@@ -1,10 +1,9 @@
-import os
import json
import copy
import pyblish.api
-from avalon import io
from openpype.lib import get_subset_name_with_asset_doc
+from openpype.pipeline import legacy_io
class CollectInstances(pyblish.api.ContextPlugin):
@@ -82,7 +81,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# - not sure if it's good idea to require asset id in
# get_subset_name?
asset_name = context.data["workfile_context"]["asset"]
- asset_doc = io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
@@ -93,7 +92,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
host_name = context.data["hostName"]
# Use empty variant value
variant = ""
- task_name = io.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
new_subset_name = get_subset_name_with_asset_doc(
family,
variant,
@@ -157,7 +156,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Change subset name
# Final family of an instance will be `render`
new_family = "render"
- task_name = io.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
new_subset_name = "{}{}_{}_Beauty".format(
new_family, task_name.capitalize(), name
)
@@ -202,7 +201,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Final family of an instance will be `render`
new_family = "render"
old_subset_name = instance_data["subset"]
- task_name = io.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
new_subset_name = "{}{}_{}_{}".format(
new_family, task_name.capitalize(), render_layer, pass_name
)
diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py
index 0af9a9a400..1c042a62fb 100644
--- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py
+++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py
@@ -1,9 +1,9 @@
import json
import copy
import pyblish.api
-from avalon import io
from openpype.lib import get_subset_name_with_asset_doc
+from openpype.pipeline import legacy_io
class CollectRenderScene(pyblish.api.ContextPlugin):
@@ -57,7 +57,7 @@ class CollectRenderScene(pyblish.api.ContextPlugin):
# get_subset_name?
workfile_context = context.data["workfile_context"]
asset_name = workfile_context["asset"]
- asset_doc = io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py
index 89348037d3..70d92f82e9 100644
--- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py
@@ -1,9 +1,9 @@
import os
import json
import pyblish.api
-from avalon import io
from openpype.lib import get_subset_name_with_asset_doc
+from openpype.pipeline import legacy_io
class CollectWorkfile(pyblish.api.ContextPlugin):
@@ -28,7 +28,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
# get_subset_name?
family = "workfile"
asset_name = context.data["workfile_context"]["asset"]
- asset_doc = io.find_one({
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": asset_name
})
@@ -39,7 +39,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
host_name = os.environ["AVALON_APP"]
# Use empty variant value
variant = ""
- task_name = io.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
subset_name = get_subset_name_with_asset_doc(
family,
variant,
diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py
index f5c86c613b..c59ef82f85 100644
--- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py
+++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py
@@ -3,7 +3,8 @@ import json
import tempfile
import pyblish.api
-import avalon.api
+
+from openpype.pipeline import legacy_io
from openpype.hosts.tvpaint.api import pipeline, lib
@@ -49,9 +50,9 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
# Collect and store current context to have reference
current_context = {
- "project": avalon.api.Session["AVALON_PROJECT"],
- "asset": avalon.api.Session["AVALON_ASSET"],
- "task": avalon.api.Session["AVALON_TASK"]
+ "project": legacy_io.Session["AVALON_PROJECT"],
+ "asset": legacy_io.Session["AVALON_ASSET"],
+ "task": legacy_io.Session["AVALON_TASK"]
}
context.data["previous_context"] = current_context
self.log.debug("Current context is: {}".format(current_context))
@@ -69,7 +70,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
("AVALON_TASK", "task")
)
for env_key, key in key_map:
- avalon.api.Session[env_key] = workfile_context[key]
+ legacy_io.Session[env_key] = workfile_context[key]
os.environ[env_key] = workfile_context[key]
self.log.info("Context changed to: {}".format(workfile_context))
diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py
index 40bca0b0c7..63c0845ec2 100644
--- a/openpype/hosts/unreal/plugins/load/load_camera.py
+++ b/openpype/hosts/unreal/plugins/load/load_camera.py
@@ -2,8 +2,10 @@
"""Load camera from FBX."""
import os
-from avalon import io
-from openpype.pipeline import AVALON_CONTAINER_ID
+from openpype.pipeline import (
+ AVALON_CONTAINER_ID,
+ legacy_io,
+)
from openpype.hosts.unreal.api import plugin
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
import unreal # noqa
@@ -87,8 +89,8 @@ class CameraLoader(plugin.Loader):
factory=unreal.LevelSequenceFactoryNew()
)
- io_asset = io.Session["AVALON_ASSET"]
- asset_doc = io.find_one({
+ io_asset = legacy_io.Session["AVALON_ASSET"]
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": io_asset
})
@@ -172,8 +174,8 @@ class CameraLoader(plugin.Loader):
factory=unreal.LevelSequenceFactoryNew()
)
- io_asset = io.Session["AVALON_ASSET"]
- asset_doc = io.find_one({
+ io_asset = legacy_io.Session["AVALON_ASSET"]
+ asset_doc = legacy_io.find_one({
"type": "asset",
"name": io_asset
})
diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py
index f34a47b89f..87e6693a97 100644
--- a/openpype/hosts/unreal/plugins/publish/extract_layout.py
+++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py
@@ -10,7 +10,7 @@ from unreal import EditorLevelLibrary as ell
from unreal import EditorAssetLibrary as eal
import openpype.api
-from avalon import io
+from openpype.pipeline import legacy_io
class ExtractLayout(openpype.api.Extractor):
@@ -61,7 +61,7 @@ class ExtractLayout(openpype.api.Extractor):
family = eal.get_metadata_tag(asset_container, "family")
self.log.info("Parent: {}".format(parent))
- blend = io.find_one(
+ blend = legacy_io.find_one(
{
"type": "representation",
"parent": ObjectId(parent),
diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py
index 72bbffd099..18e3a16cf5 100644
--- a/openpype/hosts/webpublisher/api/__init__.py
+++ b/openpype/hosts/webpublisher/api/__init__.py
@@ -1,9 +1,9 @@
import os
import logging
-from avalon import io
from pyblish import api as pyblish
import openpype.hosts.webpublisher
+from openpype.pipeline import legacy_io
log = logging.getLogger("openpype.hosts.webpublisher")
@@ -19,7 +19,7 @@ def install():
pyblish.register_plugin_path(PUBLISH_PATH)
log.info(PUBLISH_PATH)
- io.install()
+ legacy_io.install()
def uninstall():
diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
index ca14538d7d..9ff779636a 100644
--- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
+++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
@@ -1,18 +1,24 @@
-"""Loads batch context from json and continues in publish process.
+"""Parses batch context from json and continues in publish process.
Provides:
context -> Loaded batch file.
+ - asset
+ - task (task name)
+ - taskType
+ - project_name
+ - variant
"""
import os
import pyblish.api
-from avalon import io
+
from openpype.lib.plugin_tools import (
parse_json,
get_batch_asset_task_info
)
from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS
+from openpype.pipeline import legacy_io
class CollectBatchData(pyblish.api.ContextPlugin):
@@ -24,7 +30,7 @@ class CollectBatchData(pyblish.api.ContextPlugin):
# must be really early, context values are only in json file
order = pyblish.api.CollectorOrder - 0.495
label = "Collect batch data"
- host = ["webpublisher"]
+ hosts = ["webpublisher"]
def process(self, context):
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
@@ -52,14 +58,15 @@ class CollectBatchData(pyblish.api.ContextPlugin):
)
os.environ["AVALON_ASSET"] = asset_name
- io.Session["AVALON_ASSET"] = asset_name
+ legacy_io.Session["AVALON_ASSET"] = asset_name
os.environ["AVALON_TASK"] = task_name
- io.Session["AVALON_TASK"] = task_name
+ legacy_io.Session["AVALON_TASK"] = task_name
context.data["asset"] = asset_name
context.data["task"] = task_name
context.data["taskType"] = task_type
context.data["project_name"] = project_name
+ context.data["variant"] = batch_data["variant"]
self._set_ctx_path(batch_data)
diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
index 8edaf4f67b..bdd3caccfd 100644
--- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
+++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
@@ -12,7 +12,6 @@ import clique
import tempfile
import math
-from avalon import io
import pyblish.api
from openpype.lib import (
prepare_template_data,
@@ -24,6 +23,7 @@ from openpype.lib.plugin_tools import (
parse_json,
get_subset_name_with_asset_doc
)
+from openpype.pipeline import legacy_io
class CollectPublishedFiles(pyblish.api.ContextPlugin):
@@ -40,7 +40,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
# must be really early, context values are only in json file
order = pyblish.api.CollectorOrder - 0.490
label = "Collect rendered frames"
- host = ["webpublisher"]
+ hosts = ["webpublisher"]
targets = ["filespublish"]
# from Settings
@@ -61,6 +61,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
task_name = context.data["task"]
task_type = context.data["taskType"]
project_name = context.data["project_name"]
+ variant = context.data["variant"]
for task_dir in task_subfolders:
task_data = parse_json(os.path.join(task_dir,
"manifest.json"))
@@ -76,7 +77,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
extension.replace(".", ''))
subset_name = get_subset_name_with_asset_doc(
- family, task_data["variant"], task_name, asset_doc,
+ family, variant, task_name, asset_doc,
project_name=project_name, host_name="webpublisher"
)
version = self._get_last_version(asset_name, subset_name) + 1
@@ -261,7 +262,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
}
}
]
- version = list(io.aggregate(query))
+ version = list(legacy_io.aggregate(query))
if version:
return version[0].get("version") or 0
diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py
index cb6ed8481c..a56521891b 100644
--- a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py
+++ b/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py
@@ -8,7 +8,7 @@ from openpype.lib import (
run_subprocess,
get_transcode_temp_directory,
- convert_for_ffmpeg,
+ convert_input_paths_for_ffmpeg,
should_convert_for_ffmpeg
)
@@ -59,11 +59,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
if do_convert:
convert_dir = get_transcode_temp_directory()
filename = os.path.basename(full_input_path)
- convert_for_ffmpeg(
- full_input_path,
+ convert_input_paths_for_ffmpeg(
+ [full_input_path],
convert_dir,
- None,
- None,
self.log
)
full_input_path = os.path.join(convert_dir, filename)
diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
index 1f9089aa27..e82ba7f2b8 100644
--- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
+++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
@@ -7,18 +7,20 @@ import collections
from aiohttp.web_response import Response
import subprocess
-from avalon.api import AvalonMongoDB
-
-from openpype.lib import OpenPypeMongoConnection
-from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint
-from openpype.settings import get_project_settings
-
-from openpype.lib import PypeLogger
+from openpype.lib import (
+ OpenPypeMongoConnection,
+ PypeLogger,
+)
from openpype.lib.remote_publish import (
get_task_data,
ERROR_STATUS,
REPROCESS_STATUS
)
+from openpype.pipeline import AvalonMongoDB
+from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint
+from openpype.settings import get_project_settings
+
+
log = PypeLogger.get_logger("WebServer")
diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py
index b57e469f5b..29719b63bd 100644
--- a/openpype/lib/__init__.py
+++ b/openpype/lib/__init__.py
@@ -105,6 +105,7 @@ from .transcoding import (
get_transcode_temp_directory,
should_convert_for_ffmpeg,
convert_for_ffmpeg,
+ convert_input_paths_for_ffmpeg,
get_ffprobe_data,
get_ffprobe_streams,
get_ffmpeg_codec_args,
@@ -276,6 +277,7 @@ __all__ = [
"get_transcode_temp_directory",
"should_convert_for_ffmpeg",
"convert_for_ffmpeg",
+ "convert_input_paths_for_ffmpeg",
"get_ffprobe_data",
"get_ffprobe_streams",
"get_ffmpeg_codec_args",
diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py
index 7c768e280c..3d81f6d794 100644
--- a/openpype/lib/abstract_collect_render.py
+++ b/openpype/lib/abstract_collect_render.py
@@ -9,9 +9,10 @@ from abc import abstractmethod
import attr
import six
-from avalon import api
import pyblish.api
+from openpype.pipeline import legacy_io
+
from .abstract_metaplugins import AbstractMetaContextPlugin
@@ -30,6 +31,7 @@ class RenderInstance(object):
source = attr.ib() # path to source scene file
label = attr.ib() # label to show in GUI
subset = attr.ib() # subset name
+ task = attr.ib() # task name
asset = attr.ib() # asset name (AVALON_ASSET)
attachTo = attr.ib() # subset name to attach render to
setMembers = attr.ib() # list of nodes/members producing render output
@@ -127,7 +129,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
"""Constructor."""
super(AbstractCollectRender, self).__init__(*args, **kwargs)
self._file_path = None
- self._asset = api.Session["AVALON_ASSET"]
+ self._asset = legacy_io.Session["AVALON_ASSET"]
self._context = None
def process(self, context):
@@ -138,7 +140,9 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
try:
if "workfile" in instance.data["families"]:
instance.data["publish"] = True
- if "renderFarm" in instance.data["families"]:
+ # TODO merge renderFarm and render.farm
+ if ("renderFarm" in instance.data["families"] or
+ "render.farm" in instance.data["families"]):
instance.data["remove"] = True
except KeyError:
# be tolerant if 'families' is missing.
diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py
index 07b91dda03..b52da52dc9 100644
--- a/openpype/lib/applications.py
+++ b/openpype/lib/applications.py
@@ -1295,7 +1295,7 @@ def get_app_environments_for_context(
Returns:
dict: Environments for passed context and application.
"""
- from avalon.api import AvalonMongoDB
+ from openpype.pipeline import AvalonMongoDB
# Avalon database connection
dbcon = AvalonMongoDB()
diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py
index e82dcc558f..3fcddef745 100644
--- a/openpype/lib/avalon_context.py
+++ b/openpype/lib/avalon_context.py
@@ -20,9 +20,7 @@ from .profiles_filtering import filter_profiles
from .events import emit_event
from .path_templates import StringTemplate
-# avalon module is not imported at the top
-# - may not be in path at the time of pype.lib initialization
-avalon = None
+legacy_io = None
log = logging.getLogger("AvalonContext")
@@ -64,8 +62,8 @@ def create_project(
"""
from openpype.settings import ProjectSettings, SaveWarningExc
- from avalon.api import AvalonMongoDB
- from avalon.schema import validate
+ from openpype.pipeline import AvalonMongoDB
+ from openpype.pipeline.schema import validate
if dbcon is None:
dbcon = AvalonMongoDB()
@@ -120,17 +118,17 @@ def create_project(
return project_doc
-def with_avalon(func):
+def with_pipeline_io(func):
@functools.wraps(func)
- def wrap_avalon(*args, **kwargs):
- global avalon
- if avalon is None:
- import avalon
+ def wrapped(*args, **kwargs):
+ global legacy_io
+ if legacy_io is None:
+ from openpype.pipeline import legacy_io
return func(*args, **kwargs)
- return wrap_avalon
+ return wrapped
-@with_avalon
+@with_pipeline_io
def is_latest(representation):
"""Return whether the representation is from latest version
@@ -142,12 +140,12 @@ def is_latest(representation):
"""
- version = avalon.io.find_one({"_id": representation['parent']})
+ version = legacy_io.find_one({"_id": representation['parent']})
if version["type"] == "hero_version":
return True
# Get highest version under the parent
- highest_version = avalon.io.find_one({
+ highest_version = legacy_io.find_one({
"type": "version",
"parent": version["parent"]
}, sort=[("name", -1)], projection={"name": True})
@@ -158,7 +156,7 @@ def is_latest(representation):
return False
-@with_avalon
+@with_pipeline_io
def any_outdated():
"""Return whether the current scene has any outdated content"""
from openpype.pipeline import registered_host
@@ -170,7 +168,7 @@ def any_outdated():
if representation in checked:
continue
- representation_doc = avalon.io.find_one(
+ representation_doc = legacy_io.find_one(
{
"_id": ObjectId(representation),
"type": "representation"
@@ -189,7 +187,7 @@ def any_outdated():
return False
-@with_avalon
+@with_pipeline_io
def get_asset(asset_name=None):
""" Returning asset document from database by its name.
@@ -202,9 +200,9 @@ def get_asset(asset_name=None):
(MongoDB document)
"""
if not asset_name:
- asset_name = avalon.api.Session["AVALON_ASSET"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
- asset_document = avalon.io.find_one({
+ asset_document = legacy_io.find_one({
"name": asset_name,
"type": "asset"
})
@@ -215,7 +213,7 @@ def get_asset(asset_name=None):
return asset_document
-@with_avalon
+@with_pipeline_io
def get_hierarchy(asset_name=None):
"""
Obtain asset hierarchy path string from mongo db
@@ -228,12 +226,12 @@ def get_hierarchy(asset_name=None):
"""
if not asset_name:
- asset_name = avalon.io.Session.get(
+ asset_name = legacy_io.Session.get(
"AVALON_ASSET",
os.environ["AVALON_ASSET"]
)
- asset_entity = avalon.io.find_one({
+ asset_entity = legacy_io.find_one({
"type": 'asset',
"name": asset_name
})
@@ -252,13 +250,13 @@ def get_hierarchy(asset_name=None):
parent_id = entity.get("data", {}).get("visualParent")
if not parent_id:
break
- entity = avalon.io.find_one({"_id": parent_id})
+ entity = legacy_io.find_one({"_id": parent_id})
hierarchy_items.append(entity["name"])
# Add parents to entity data for next query
entity_data = asset_entity.get("data", {})
entity_data["parents"] = hierarchy_items
- avalon.io.update_many(
+ legacy_io.update_many(
{"_id": asset_entity["_id"]},
{"$set": {"data": entity_data}}
)
@@ -305,7 +303,7 @@ def get_linked_asset_ids(asset_doc):
return output
-@with_avalon
+@with_pipeline_io
def get_linked_assets(asset_doc):
"""Return linked assets for `asset_doc` from DB
@@ -319,10 +317,10 @@ def get_linked_assets(asset_doc):
if not link_ids:
return []
- return list(avalon.io.find({"_id": {"$in": link_ids}}))
+ return list(legacy_io.find({"_id": {"$in": link_ids}}))
-@with_avalon
+@with_pipeline_io
def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
"""Retrieve latest version from `asset_name`, and `subset_name`.
@@ -333,8 +331,7 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
Args:
asset_name (str): Name of asset.
subset_name (str): Name of subset.
- dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection
- with Session.
+ dbcon (AvalonMongoDB, optional): Avalon Mongo connection with Session.
project_name (str, optional): Find latest version in specific project.
Returns:
@@ -343,13 +340,13 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
"""
if not dbcon:
- log.debug("Using `avalon.io` for query.")
- dbcon = avalon.io
+ log.debug("Using `legacy_io` for query.")
+ dbcon = legacy_io
# Make sure is installed
dbcon.install()
if project_name and project_name != dbcon.Session.get("AVALON_PROJECT"):
- # `avalon.io` has only `_database` attribute
+ # `legacy_io` has only `_database` attribute
# but `AvalonMongoDB` has `database`
database = getattr(dbcon, "database", dbcon._database)
collection = database[project_name]
@@ -429,7 +426,7 @@ def get_workfile_template_key_from_context(
"`get_workfile_template_key_from_context` requires to pass"
" one of 'dbcon' or 'project_name' arguments."
))
- from avalon.api import AvalonMongoDB
+ from openpype.pipeline import AvalonMongoDB
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
@@ -649,6 +646,7 @@ def get_workdir(
)
+@with_pipeline_io
def template_data_from_session(session=None):
""" Return dictionary with template from session keys.
@@ -658,15 +656,15 @@ def template_data_from_session(session=None):
Returns:
dict: All available data from session.
"""
- from avalon import io
- import avalon.api
if session is None:
- session = avalon.api.Session
+ session = legacy_io.Session
project_name = session["AVALON_PROJECT"]
- project_doc = io._database[project_name].find_one({"type": "project"})
- asset_doc = io._database[project_name].find_one({
+ project_doc = legacy_io.database[project_name].find_one({
+ "type": "project"
+ })
+ asset_doc = legacy_io.database[project_name].find_one({
"type": "asset",
"name": session["AVALON_ASSET"]
})
@@ -675,6 +673,7 @@ def template_data_from_session(session=None):
return get_workdir_data(project_doc, asset_doc, task_name, host_name)
+@with_pipeline_io
def compute_session_changes(
session, task=None, asset=None, app=None, template_key=None
):
@@ -713,10 +712,8 @@ def compute_session_changes(
asset = asset["name"]
if not asset_document or not asset_tasks:
- from avalon import io
-
# Assume asset name
- asset_document = io.find_one(
+ asset_document = legacy_io.find_one(
{
"name": asset,
"type": "asset"
@@ -748,11 +745,10 @@ def compute_session_changes(
return changes
+@with_pipeline_io
def get_workdir_from_session(session=None, template_key=None):
- import avalon.api
-
if session is None:
- session = avalon.api.Session
+ session = legacy_io.Session
project_name = session["AVALON_PROJECT"]
host_name = session["AVALON_APP"]
anatomy = Anatomy(project_name)
@@ -769,6 +765,7 @@ def get_workdir_from_session(session=None, template_key=None):
return anatomy_filled[template_key]["folder"]
+@with_pipeline_io
def update_current_task(task=None, asset=None, app=None, template_key=None):
"""Update active Session to a new task work area.
@@ -783,10 +780,8 @@ def update_current_task(task=None, asset=None, app=None, template_key=None):
dict: The changed key, values in the current Session.
"""
- import avalon.api
-
changes = compute_session_changes(
- avalon.api.Session,
+ legacy_io.Session,
task=task,
asset=asset,
app=app,
@@ -796,7 +791,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None):
# Update the Session and environments. Pop from environments all keys with
# value set to None.
for key, value in changes.items():
- avalon.api.Session[key] = value
+ legacy_io.Session[key] = value
if value is None:
os.environ.pop(key, None)
else:
@@ -808,7 +803,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None):
return changes
-@with_avalon
+@with_pipeline_io
def get_workfile_doc(asset_id, task_name, filename, dbcon=None):
"""Return workfile document for entered context.
@@ -820,14 +815,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None):
task_name (str): Name of task under which the workfile belongs.
filename (str): Name of a workfile.
dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and
- `avalon.io` is used if not entered.
+ `legacy_io` is used if not entered.
Returns:
dict: Workfile document or None.
"""
- # Use avalon.io if dbcon is not entered
+ # Use legacy_io if dbcon is not entered
if not dbcon:
- dbcon = avalon.io
+ dbcon = legacy_io
return dbcon.find_one({
"type": "workfile",
@@ -837,7 +832,7 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None):
})
-@with_avalon
+@with_pipeline_io
def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None):
"""Creates or replace workfile document in mongo.
@@ -850,11 +845,11 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None):
filename (str): Filename of workfile.
workdir (str): Path to directory where `filename` is located.
dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and
- `avalon.io` is used if not entered.
+ `legacy_io` is used if not entered.
"""
- # Use avalon.io if dbcon is not entered
+ # Use legacy_io if dbcon is not entered
if not dbcon:
- dbcon = avalon.io
+ dbcon = legacy_io
# Filter of workfile document
doc_filter = {
@@ -899,7 +894,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None):
)
-@with_avalon
+@with_pipeline_io
def save_workfile_data_to_doc(workfile_doc, data, dbcon=None):
if not workfile_doc:
# TODO add log message
@@ -908,9 +903,9 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None):
if not data:
return
- # Use avalon.io if dbcon is not entered
+ # Use legacy_io if dbcon is not entered
if not dbcon:
- dbcon = avalon.io
+ dbcon = legacy_io
# Convert data to mongo modification keys/values
# - this is naive implementation which does not expect nested
@@ -960,7 +955,7 @@ class BuildWorkfile:
return containers
- @with_avalon
+ @with_pipeline_io
def build_workfile(self):
"""Prepares and load containers into workfile.
@@ -987,8 +982,8 @@ class BuildWorkfile:
from openpype.pipeline import discover_loader_plugins
# Get current asset name and entity
- current_asset_name = avalon.io.Session["AVALON_ASSET"]
- current_asset_entity = avalon.io.find_one({
+ current_asset_name = legacy_io.Session["AVALON_ASSET"]
+ current_asset_entity = legacy_io.find_one({
"type": "asset",
"name": current_asset_name
})
@@ -1016,7 +1011,7 @@ class BuildWorkfile:
return
# Get current task name
- current_task_name = avalon.io.Session["AVALON_TASK"]
+ current_task_name = legacy_io.Session["AVALON_TASK"]
# Load workfile presets for task
self.build_presets = self.get_build_presets(
@@ -1104,7 +1099,7 @@ class BuildWorkfile:
# Return list of loaded containers
return loaded_containers
- @with_avalon
+ @with_pipeline_io
def get_build_presets(self, task_name, asset_doc):
""" Returns presets to build workfile for task name.
@@ -1120,7 +1115,7 @@ class BuildWorkfile:
"""
host_name = os.environ["AVALON_APP"]
project_settings = get_project_settings(
- avalon.io.Session["AVALON_PROJECT"]
+ legacy_io.Session["AVALON_PROJECT"]
)
host_settings = project_settings.get(host_name) or {}
@@ -1370,7 +1365,7 @@ class BuildWorkfile:
"containers": containers
}
- @with_avalon
+ @with_pipeline_io
def _load_containers(
self, repres_by_subset_id, subsets_by_id,
profiles_per_subset_id, loaders_by_name
@@ -1496,7 +1491,7 @@ class BuildWorkfile:
return loaded_containers
- @with_avalon
+ @with_pipeline_io
def _collect_last_version_repres(self, asset_entities):
"""Collect subsets, versions and representations for asset_entities.
@@ -1535,13 +1530,13 @@ class BuildWorkfile:
asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities}
- subsets = list(avalon.io.find({
+ subsets = list(legacy_io.find({
"type": "subset",
"parent": {"$in": asset_entity_by_ids.keys()}
}))
subset_entity_by_ids = {subset["_id"]: subset for subset in subsets}
- sorted_versions = list(avalon.io.find({
+ sorted_versions = list(legacy_io.find({
"type": "version",
"parent": {"$in": subset_entity_by_ids.keys()}
}).sort("name", -1))
@@ -1555,7 +1550,7 @@ class BuildWorkfile:
subset_id_with_latest_version.append(subset_id)
last_versions_by_id[version["_id"]] = version
- repres = avalon.io.find({
+ repres = legacy_io.find({
"type": "representation",
"parent": {"$in": last_versions_by_id.keys()}
})
@@ -1593,7 +1588,7 @@ class BuildWorkfile:
return output
-@with_avalon
+@with_pipeline_io
def get_creator_by_name(creator_name, case_sensitive=False):
"""Find creator plugin by name.
@@ -1623,7 +1618,7 @@ def get_creator_by_name(creator_name, case_sensitive=False):
return None
-@with_avalon
+@with_pipeline_io
def change_timer_to_current_context():
"""Called after context change to change timers.
@@ -1642,9 +1637,9 @@ def change_timer_to_current_context():
log.warning("Couldn't start timer")
return
data = {
- "project_name": avalon.io.Session["AVALON_PROJECT"],
- "asset_name": avalon.io.Session["AVALON_ASSET"],
- "task_name": avalon.io.Session["AVALON_TASK"]
+ "project_name": legacy_io.Session["AVALON_PROJECT"],
+ "asset_name": legacy_io.Session["AVALON_ASSET"],
+ "task_name": legacy_io.Session["AVALON_TASK"]
}
requests.post(rest_api_url, json=data)
@@ -1794,7 +1789,7 @@ def get_custom_workfile_template_by_string_context(
"""
if dbcon is None:
- from avalon.api import AvalonMongoDB
+ from openpype.pipeline import AvalonMongoDB
dbcon = AvalonMongoDB()
@@ -1828,10 +1823,11 @@ def get_custom_workfile_template_by_string_context(
)
+@with_pipeline_io
def get_custom_workfile_template(template_profiles):
"""Filter and fill workfile template profiles by current context.
- Current context is defined by `avalon.api.Session`. That's why this
+ Current context is defined by `legacy_io.Session`. That's why this
function should be used only inside host where context is set and stable.
Args:
@@ -1841,15 +1837,13 @@ def get_custom_workfile_template(template_profiles):
str: Path to template or None if none of profiles match current
context. (Existence of formatted path is not validated.)
"""
- # Use `avalon.io` as Mongo connection
- from avalon import io
return get_custom_workfile_template_by_string_context(
template_profiles,
- io.Session["AVALON_PROJECT"],
- io.Session["AVALON_ASSET"],
- io.Session["AVALON_TASK"],
- io
+ legacy_io.Session["AVALON_PROJECT"],
+ legacy_io.Session["AVALON_ASSET"],
+ legacy_io.Session["AVALON_TASK"],
+ legacy_io
)
@@ -1973,3 +1967,119 @@ def get_last_workfile(
return os.path.normpath(os.path.join(workdir, filename))
return filename
+
+
+@with_pipeline_io
+def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None,
+ link_type=None, max_depth=0):
+ """Returns list of linked ids of particular type (if provided).
+
+ Goes from representations to version, back to representations
+ Args:
+ project_name (str)
+ repre_ids (list) or (ObjectId)
+ dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection
+ with Session.
+ link_type (str): ['reference', '..]
+ max_depth (int): limit how many levels of recursion
+ Returns:
+ (list) of ObjectId - linked representations
+ """
+ # Create new dbcon if not passed and use passed project name
+ if not dbcon:
+ from openpype.pipeline import AvalonMongoDB
+ dbcon = AvalonMongoDB()
+ dbcon.Session["AVALON_PROJECT"] = project_name
+ # Validate that passed dbcon has same project
+ elif dbcon.Session["AVALON_PROJECT"] != project_name:
+ raise ValueError("Passed connection does not have right project")
+
+ if not isinstance(repre_ids, list):
+ repre_ids = [repre_ids]
+
+ version_ids = dbcon.distinct("parent", {
+ "_id": {"$in": repre_ids},
+ "type": "representation"
+ })
+
+ match = {
+ "_id": {"$in": version_ids},
+ "type": "version"
+ }
+
+ graph_lookup = {
+ "from": project_name,
+ "startWith": "$data.inputLinks.id",
+ "connectFromField": "data.inputLinks.id",
+ "connectToField": "_id",
+ "as": "outputs_recursive",
+ "depthField": "depth"
+ }
+ if max_depth != 0:
+ # We offset by -1 since 0 basically means no recursion
+ # but the recursion only happens after the initial lookup
+ # for outputs.
+ graph_lookup["maxDepth"] = max_depth - 1
+
+ pipeline_ = [
+ # Match
+ {"$match": match},
+ # Recursive graph lookup for inputs
+ {"$graphLookup": graph_lookup}
+ ]
+
+ result = dbcon.aggregate(pipeline_)
+ referenced_version_ids = _process_referenced_pipeline_result(result,
+ link_type)
+
+ ref_ids = dbcon.distinct(
+ "_id",
+ filter={
+ "parent": {"$in": list(referenced_version_ids)},
+ "type": "representation"
+ }
+ )
+
+ return list(ref_ids)
+
+
+def _process_referenced_pipeline_result(result, link_type):
+ """Filters result from pipeline for particular link_type.
+
+ Pipeline cannot use link_type directly in a query.
+ Returns:
+ (list)
+ """
+ referenced_version_ids = set()
+ correctly_linked_ids = set()
+ for item in result:
+ input_links = item["data"].get("inputLinks", [])
+ correctly_linked_ids = _filter_input_links(input_links,
+ link_type,
+ correctly_linked_ids)
+
+ # outputs_recursive in random order, sort by depth
+ outputs_recursive = sorted(item.get("outputs_recursive", []),
+ key=lambda d: d["depth"])
+
+ for output in outputs_recursive:
+ if output["_id"] not in correctly_linked_ids: # leaf
+ continue
+
+ correctly_linked_ids = _filter_input_links(
+ output["data"].get("inputLinks", []),
+ link_type,
+ correctly_linked_ids)
+
+ referenced_version_ids.add(output["_id"])
+
+ return referenced_version_ids
+
+
+def _filter_input_links(input_links, link_type, correctly_linked_ids):
+ for input_link in input_links:
+ if not link_type or input_link["type"] == link_type:
+ correctly_linked_ids.add(input_link.get("id") or
+ input_link.get("_id")) # legacy
+
+ return correctly_linked_ids
diff --git a/openpype/lib/log.py b/openpype/lib/log.py
index f33385e0ba..2cdb7ec8e4 100644
--- a/openpype/lib/log.py
+++ b/openpype/lib/log.py
@@ -216,8 +216,8 @@ class PypeLogger:
# Collection name under database in Mongo
log_collection_name = "logs"
- # OPENPYPE_DEBUG
- pype_debug = 0
+ # Logging level - OPENPYPE_LOG_LEVEL
+ log_level = None
# Data same for all record documents
process_data = None
@@ -231,10 +231,7 @@ class PypeLogger:
logger = logging.getLogger(name or "__main__")
- if cls.pype_debug > 0:
- logger.setLevel(logging.DEBUG)
- else:
- logger.setLevel(logging.INFO)
+ logger.setLevel(cls.log_level)
add_mongo_handler = cls.use_mongo_logging
add_console_handler = True
@@ -333,6 +330,9 @@ class PypeLogger:
# Define if should logging to mongo be used
use_mongo_logging = bool(log4mongo is not None)
+ if use_mongo_logging:
+ use_mongo_logging = os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1"
+
# Set mongo id for process (ONLY ONCE)
if use_mongo_logging and cls.mongo_process_id is None:
try:
@@ -357,8 +357,16 @@ class PypeLogger:
# Store result to class definition
cls.use_mongo_logging = use_mongo_logging
- # Define if is in OPENPYPE_DEBUG mode
- cls.pype_debug = int(os.getenv("OPENPYPE_DEBUG") or "0")
+ # Define what is logging level
+ log_level = os.getenv("OPENPYPE_LOG_LEVEL")
+ if not log_level:
+ # Check OPENPYPE_DEBUG for backwards compatibility
+ op_debug = os.getenv("OPENPYPE_DEBUG")
+ if op_debug and int(op_debug) > 0:
+ log_level = 10
+ else:
+ log_level = 20
+ cls.log_level = int(log_level)
if not os.environ.get("OPENPYPE_MONGO"):
cls.use_mongo_logging = False
diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py
index f11ba56865..bcbf06a0e8 100644
--- a/openpype/lib/plugin_tools.py
+++ b/openpype/lib/plugin_tools.py
@@ -72,9 +72,9 @@ def get_subset_name_with_asset_doc(
family = family.rsplit(".", 1)[-1]
if project_name is None:
- import avalon.api
+ from openpype.pipeline import legacy_io
- project_name = avalon.api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
task_info = asset_tasks.get(task_name) or {}
@@ -136,7 +136,7 @@ def get_subset_name(
`get_subset_name_with_asset_doc` where asset document is expected.
"""
if dbcon is None:
- from avalon.api import AvalonMongoDB
+ from openpype.pipeline import AvalonMongoDB
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py
index 11fd0c0c3e..396479c725 100644
--- a/openpype/lib/project_backpack.py
+++ b/openpype/lib/project_backpack.py
@@ -25,7 +25,7 @@ from bson.json_util import (
CANONICAL_JSON_OPTIONS
)
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
DOCUMENTS_FILE_NAME = "database"
METADATA_FILE_NAME = "metadata"
diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py
index c2fecf6628..fcec5d4216 100644
--- a/openpype/lib/transcoding.py
+++ b/openpype/lib/transcoding.py
@@ -382,6 +382,11 @@ def should_convert_for_ffmpeg(src_filepath):
return False
+# Deprecated since 2022 4 20
+# - Reason - Doesn't convert sequences right way: Can't handle gaps, reuse
+# first frame for all frames and changes filenames when input
+# is sequence.
+# - use 'convert_input_paths_for_ffmpeg' instead
def convert_for_ffmpeg(
first_input_path,
output_dir,
@@ -409,6 +414,12 @@ def convert_for_ffmpeg(
if logger is None:
logger = logging.getLogger(__name__)
+ logger.warning((
+ "DEPRECATED: 'openpype.lib.transcoding.convert_for_ffmpeg' is"
+ " deprecated function of conversion for FFMpeg. Please replace usage"
+ " with 'openpype.lib.transcoding.convert_input_paths_for_ffmpeg'"
+ ))
+
ext = os.path.splitext(first_input_path)[1].lower()
if ext != ".exr":
raise ValueError((
@@ -516,6 +527,130 @@ def convert_for_ffmpeg(
run_subprocess(oiio_cmd, logger=logger)
+def convert_input_paths_for_ffmpeg(
+ input_paths,
+ output_dir,
+ logger=None
+):
+ """Contert source file to format supported in ffmpeg.
+
+ Currently can convert only exrs. The input filepaths should be files
+ with same type. Information about input is loaded only from first found
+ file.
+
+ Filenames of input files are kept so make sure that output directory
+ is not the same directory as input files have.
+ - This way it can handle gaps and can keep input filenames without handling
+ frame template
+
+ Args:
+ input_paths (str): Paths that should be converted. It is expected that
+ contains single file or image sequence of samy type.
+ output_dir (str): Path to directory where output will be rendered.
+ Must not be same as input's directory.
+ logger (logging.Logger): Logger used for logging.
+
+ Raises:
+ ValueError: If input filepath has extension not supported by function.
+ Currently is supported only ".exr" extension.
+ """
+ if logger is None:
+ logger = logging.getLogger(__name__)
+
+ first_input_path = input_paths[0]
+ ext = os.path.splitext(first_input_path)[1].lower()
+ if ext != ".exr":
+ raise ValueError((
+ "Function 'convert_for_ffmpeg' currently support only"
+ " \".exr\" extension. Got \"{}\"."
+ ).format(ext))
+
+ input_info = get_oiio_info_for_input(first_input_path)
+
+ # Change compression only if source compression is "dwaa" or "dwab"
+ # - they're not supported in ffmpeg
+ compression = input_info["attribs"].get("compression")
+ if compression in ("dwaa", "dwab"):
+ compression = "none"
+
+ # Collect channels to export
+ channel_names = input_info["channelnames"]
+ review_channels = get_convert_rgb_channels(channel_names)
+ if review_channels is None:
+ raise ValueError(
+ "Couldn't find channels that can be used for conversion."
+ )
+
+ red, green, blue, alpha = review_channels
+ input_channels = [red, green, blue]
+ channels_arg = "R={},G={},B={}".format(red, green, blue)
+ if alpha is not None:
+ channels_arg += ",A={}".format(alpha)
+ input_channels.append(alpha)
+ input_channels_str = ",".join(input_channels)
+
+ for input_path in input_paths:
+ # Prepare subprocess arguments
+ oiio_cmd = [
+ get_oiio_tools_path(),
+
+ # Don't add any additional attributes
+ "--nosoftwareattrib",
+ ]
+ # Add input compression if available
+ if compression:
+ oiio_cmd.extend(["--compression", compression])
+
+ oiio_cmd.extend([
+ # Tell oiiotool which channels should be loaded
+ # - other channels are not loaded to memory so helps to
+ # avoid memory leak issues
+ "-i:ch={}".format(input_channels_str), input_path,
+ # Tell oiiotool which channels should be put to top stack
+ # (and output)
+ "--ch", channels_arg
+ ])
+
+ for attr_name, attr_value in input_info["attribs"].items():
+ if not isinstance(attr_value, str):
+ continue
+
+ # Remove attributes that have string value longer than allowed
+ # length for ffmpeg or when containt unallowed symbols
+ erase_reason = "Missing reason"
+ erase_attribute = False
+ if len(attr_value) > MAX_FFMPEG_STRING_LEN:
+ erase_reason = "has too long value ({} chars).".format(
+ len(attr_value)
+ )
+
+ if erase_attribute:
+ for char in NOT_ALLOWED_FFMPEG_CHARS:
+ if char in attr_value:
+ erase_attribute = True
+ erase_reason = (
+ "contains unsupported character \"{}\"."
+ ).format(char)
+ break
+
+ if erase_attribute:
+ # Set attribute to empty string
+ logger.info((
+ "Removed attribute \"{}\" from metadata because {}."
+ ).format(attr_name, erase_reason))
+ oiio_cmd.extend(["--eraseattrib", attr_name])
+
+ # Add last argument - path to output
+ base_filename = os.path.basename(input_path)
+ output_path = os.path.join(output_dir, base_filename)
+ oiio_cmd.extend([
+ "-o", output_path
+ ])
+
+ logger.debug("Conversion command: {}".format(" ".join(oiio_cmd)))
+ run_subprocess(oiio_cmd, logger=logger)
+
+
# FFMPEG functions
def get_ffprobe_data(path_to_file, logger=None):
"""Load data about entered filepath via ffprobe.
diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py
index 7b3b7112de..86de19b4be 100644
--- a/openpype/lib/usdlib.py
+++ b/openpype/lib/usdlib.py
@@ -8,8 +8,10 @@ except ImportError:
# Allow to fall back on Multiverse 6.3.0+ pxr usd library
from mvpxr import Usd, UsdGeom, Sdf, Kind
-from avalon import io, api
-from openpype.pipeline import registered_root
+from openpype.pipeline import (
+ registered_root,
+ legacy_io,
+)
log = logging.getLogger(__name__)
@@ -126,7 +128,7 @@ def create_model(filename, asset, variant_subsets):
"""
- asset_doc = io.find_one({"name": asset, "type": "asset"})
+ asset_doc = legacy_io.find_one({"name": asset, "type": "asset"})
assert asset_doc, "Asset not found: %s" % asset
variants = []
@@ -176,7 +178,7 @@ def create_shade(filename, asset, variant_subsets):
"""
- asset_doc = io.find_one({"name": asset, "type": "asset"})
+ asset_doc = legacy_io.find_one({"name": asset, "type": "asset"})
assert asset_doc, "Asset not found: %s" % asset
variants = []
@@ -211,7 +213,7 @@ def create_shade_variation(filename, asset, model_variant, shade_variants):
"""
- asset_doc = io.find_one({"name": asset, "type": "asset"})
+ asset_doc = legacy_io.find_one({"name": asset, "type": "asset"})
assert asset_doc, "Asset not found: %s" % asset
variants = []
@@ -311,7 +313,7 @@ def get_usd_master_path(asset, subset, representation):
"""
- project = io.find_one(
+ project = legacy_io.find_one(
{"type": "project"}, projection={"config.template.publish": True}
)
template = project["config"]["template"]["publish"]
@@ -320,12 +322,12 @@ def get_usd_master_path(asset, subset, representation):
# Allow explicitly passing asset document
asset_doc = asset
else:
- asset_doc = io.find_one({"name": asset, "type": "asset"})
+ asset_doc = legacy_io.find_one({"name": asset, "type": "asset"})
path = template.format(
**{
"root": registered_root(),
- "project": api.Session["AVALON_PROJECT"],
+ "project": legacy_io.Session["AVALON_PROJECT"],
"asset": asset_doc["name"],
"subset": subset,
"representation": representation,
diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/avalon_apps/rest_api.py
index 533050fc0c..b35f5bf357 100644
--- a/openpype/modules/avalon_apps/rest_api.py
+++ b/openpype/modules/avalon_apps/rest_api.py
@@ -1,4 +1,3 @@
-import os
import json
import datetime
@@ -6,7 +5,7 @@ from bson.objectid import ObjectId
from aiohttp.web_response import Response
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.webserver.base_routes import RestApiEndpoint
diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/clockify/launcher_actions/ClockifyStart.py
index 6428d5e7aa..4669f98b01 100644
--- a/openpype/modules/clockify/launcher_actions/ClockifyStart.py
+++ b/openpype/modules/clockify/launcher_actions/ClockifyStart.py
@@ -1,7 +1,8 @@
-from avalon import io
-
from openpype.api import Logger
-from openpype.pipeline import LauncherAction
+from openpype.pipeline import (
+ legacy_io,
+ LauncherAction,
+)
from openpype_modules.clockify.clockify_api import ClockifyAPI
@@ -28,7 +29,7 @@ class ClockifyStart(LauncherAction):
task_name = session['AVALON_TASK']
description = asset_name
- asset = io.find_one({
+ asset = legacy_io.find_one({
'type': 'asset',
'name': asset_name
})
diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/clockify/launcher_actions/ClockifySync.py
index 3c81e2766c..356bbd0306 100644
--- a/openpype/modules/clockify/launcher_actions/ClockifySync.py
+++ b/openpype/modules/clockify/launcher_actions/ClockifySync.py
@@ -1,8 +1,9 @@
-from avalon import io
-
from openpype_modules.clockify.clockify_api import ClockifyAPI
from openpype.api import Logger
-from openpype.pipeline import LauncherAction
+from openpype.pipeline import (
+ legacy_io,
+ LauncherAction,
+)
log = Logger.get_logger(__name__)
@@ -25,10 +26,10 @@ class ClockifySync(LauncherAction):
projects_to_sync = []
if project_name.strip() == '' or project_name is None:
- for project in io.projects():
+ for project in legacy_io.projects():
projects_to_sync.append(project)
else:
- project = io.find_one({'type': 'project'})
+ project = legacy_io.find_one({'type': 'project'})
projects_to_sync.append(project)
projects_info = {}
diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py
index 1295d40654..b6584f239e 100644
--- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py
@@ -3,10 +3,9 @@ import attr
import getpass
import pyblish.api
-from avalon import api
-
from openpype.lib import env_value_to_bool
from openpype.lib.delivery import collect_frames
+from openpype.pipeline import legacy_io
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
@@ -87,7 +86,7 @@ class AfterEffectsSubmitDeadline(
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
for key in keys:
val = environment.get(key)
if val:
diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py
index e320b6df4b..912f0f4026 100644
--- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py
@@ -8,8 +8,8 @@ import re
import attr
import pyblish.api
-from avalon import api
+from openpype.pipeline import legacy_io
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
@@ -280,7 +280,7 @@ class HarmonySubmitDeadline(
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
for key in keys:
val = environment.get(key)
if val:
diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py
index c683eb68a8..f834ae7e92 100644
--- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py
+++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py
@@ -4,10 +4,10 @@ import json
import requests
import hou
-from avalon import api, io
-
import pyblish.api
+from openpype.pipeline import legacy_io
+
class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
"""Submit Houdini scene to perform a local publish in Deadline.
@@ -35,7 +35,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
), "Errors found, aborting integration.."
# Deadline connection
- AVALON_DEADLINE = api.Session.get(
+ AVALON_DEADLINE = legacy_io.Session.get(
"AVALON_DEADLINE", "http://localhost:8082"
)
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
@@ -55,7 +55,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
scenename = os.path.basename(scene)
# Get project code
- project = io.find_one({"type": "project"})
+ project = legacy_io.find_one({"type": "project"})
code = project["data"].get("code", project["name"])
job_name = "{scene} [PUBLISH]".format(scene=scenename)
@@ -137,7 +137,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
environment = dict(
{key: os.environ[key] for key in keys if key in os.environ},
- **api.Session
+ **legacy_io.Session
)
environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances)
diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
index 82ff723e84..aca88c7440 100644
--- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
@@ -3,12 +3,12 @@ import json
import getpass
import requests
-from avalon import api
-
import pyblish.api
# import hou ???
+from openpype.pipeline import legacy_io
+
class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
"""Submit Solaris USD Render ROPs to Deadline.
@@ -107,7 +107,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@@ -141,7 +141,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
def submit(self, instance, payload):
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py
index 02e89edd1e..819e8fb585 100644
--- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py
@@ -32,11 +32,11 @@ import requests
from maya import cmds
-from avalon import api
import pyblish.api
from openpype.lib import requests_post
from openpype.hosts.maya.api import lib
+from openpype.pipeline import legacy_io
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
@@ -489,7 +489,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True)
# to recognize job from PYPE for turning Event On/Off
diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py
index 2980193254..94c703d66d 100644
--- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py
@@ -4,10 +4,10 @@ import json
import getpass
import requests
-
-from avalon import api
import pyblish.api
+
import nuke
+from openpype.pipeline import legacy_io
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
@@ -242,7 +242,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
keys = [
"PYTHONPATH",
"PATH",
- "AVALON_SCHEMA",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
@@ -264,7 +263,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
keys += self.env_allowed_keys
environment = dict({key: os.environ[key] for key in keys
- if key in os.environ}, **api.Session)
+ if key in os.environ}, **legacy_io.Session)
for _path in os.environ:
if _path.lower().startswith('openpype_'):
diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py
index 4f781de62d..306237c78c 100644
--- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py
+++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py
@@ -7,14 +7,15 @@ import re
from copy import copy, deepcopy
import requests
import clique
-import openpype.api
-from openpype.pipeline.farm.patterning import match_aov_pattern
-
-from avalon import api, io
import pyblish.api
-from openpype.pipeline import get_representation_path
+import openpype.api
+from openpype.pipeline import (
+ get_representation_path,
+ legacy_io,
+)
+from openpype.pipeline.farm.patterning import match_aov_pattern
def get_resources(version, extension=None):
@@ -23,7 +24,7 @@ def get_resources(version, extension=None):
if extension:
query["name"] = extension
- representation = io.find_one(query)
+ representation = legacy_io.find_one(query)
assert representation, "This is a bug"
directory = get_representation_path(representation)
@@ -222,9 +223,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
self._create_metadata_path(instance)
environment = job["Props"].get("Env", {})
- environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
- environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"]
- environment["AVALON_TASK"] = io.Session["AVALON_TASK"]
+ environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"]
+ environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"]
+ environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_USERNAME"] = instance.context.data["user"]
@@ -284,6 +285,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
else:
payload["JobInfo"]["JobDependency0"] = job["_id"]
+ if instance.data.get("suspend_publish"):
+ payload["JobInfo"]["InitialStatus"] = "Suspended"
+
index = 0
for key in environment:
if key.upper() in self.enviro_filter:
@@ -666,7 +670,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if hasattr(instance, "_log"):
data['_log'] = instance._log
- asset = data.get("asset") or api.Session["AVALON_ASSET"]
+ asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"]
subset = data.get("subset")
start = instance.data.get("frameStart")
@@ -958,7 +962,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"intent": context.data.get("intent"),
"comment": context.data.get("comment"),
"job": render_job or None,
- "session": api.Session.copy(),
+ "session": legacy_io.Session.copy(),
"instances": instances
}
@@ -1066,7 +1070,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
else:
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
- project_name = api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py
index 2e55be2743..975e49cb28 100644
--- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py
+++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py
@@ -1,8 +1,8 @@
import json
-from avalon.api import AvalonMongoDB
from openpype.api import ProjectSettings
from openpype.lib import create_project
+from openpype.pipeline import AvalonMongoDB
from openpype.settings import SaveWarningExc
from openpype_modules.ftrack.lib import (
diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py
index 9610e7f5de..ae70c6756f 100644
--- a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py
+++ b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py
@@ -1,7 +1,7 @@
from pymongo import UpdateOne
from bson.objectid import ObjectId
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.ftrack.lib import (
CUST_ATTR_ID_KEY,
diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py
index 46c333c4c4..b5f199b3e4 100644
--- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py
+++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py
@@ -12,8 +12,7 @@ from pymongo import UpdateOne
import arrow
import ftrack_api
-from avalon import schema
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB, schema
from openpype_modules.ftrack.lib import (
get_openpype_attr,
diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py
index 96243c8c36..593fc5e596 100644
--- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py
+++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py
@@ -1,10 +1,9 @@
-import os
import re
import subprocess
from openpype_modules.ftrack.lib import BaseEvent
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
from bson.objectid import ObjectId
diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/ftrack/event_handlers_user/action_applications.py
index 48a0dea006..b25bc1b5cb 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_applications.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_applications.py
@@ -1,5 +1,4 @@
import os
-from uuid import uuid4
from openpype_modules.ftrack.lib import BaseAction
from openpype.lib.applications import (
@@ -8,7 +7,7 @@ from openpype.lib.applications import (
ApplictionExecutableNotFound,
CUSTOM_LAUNCH_APP_GROUPS
)
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
class AppplicationsAction(BaseAction):
diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py
index 0ed12bd03e..81f38e0c39 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py
@@ -1,6 +1,8 @@
import os
-from openpype_modules.ftrack.lib import BaseAction, statics_icon
+import collections
+import copy
from openpype.api import Anatomy
+from openpype_modules.ftrack.lib import BaseAction, statics_icon
class CreateFolders(BaseAction):
@@ -9,55 +11,59 @@ class CreateFolders(BaseAction):
icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg")
def discover(self, session, entities, event):
- if len(entities) != 1:
- return False
-
- not_allowed = ["assetversion", "project"]
- if entities[0].entity_type.lower() in not_allowed:
- return False
-
- return True
+ for entity_item in event["data"]["selection"]:
+ if entity_item.get("entityType").lower() in ("task", "show"):
+ return True
+ return False
def interface(self, session, entities, event):
if event["data"].get("values", {}):
return
- entity = entities[0]
- without_interface = True
- for child in entity["children"]:
- if child["object_type"]["name"].lower() != "task":
- without_interface = False
+
+ with_interface = False
+ for entity in entities:
+ if entity.entity_type.lower() != "task":
+ with_interface = True
break
- self.without_interface = without_interface
- if without_interface:
+
+ if "values" not in event["data"]:
+ event["data"]["values"] = {}
+
+ event["data"]["values"]["with_interface"] = with_interface
+ if not with_interface:
return
+
title = "Create folders"
entity_name = entity["name"]
msg = (
"Do you want create folders also"
- " for all children of \"{}\"?
"
+ " for all children of your selection?"
)
if entity.entity_type.lower() == "project":
entity_name = entity["full_name"]
msg = msg.replace(" also", "")
msg += "(Project root won't be created if not checked)
"
- items = []
- item_msg = {
- "type": "label",
- "value": msg.format(entity_name)
- }
- item_label = {
- "type": "label",
- "value": "With all chilren entities"
- }
- item = {
- "name": "children_included",
- "type": "boolean",
- "value": False
- }
- items.append(item_msg)
- items.append(item_label)
- items.append(item)
+ items = [
+ {
+ "type": "label",
+ "value": msg.format(entity_name)
+ },
+ {
+ "type": "label",
+ "value": "With all chilren entities"
+ },
+ {
+ "name": "children_included",
+ "type": "boolean",
+ "value": False
+ },
+ {
+ "type": "hidden",
+ "name": "with_interface",
+ "value": with_interface
+ }
+ ]
return {
"items": items,
@@ -66,30 +72,47 @@ class CreateFolders(BaseAction):
def launch(self, session, entities, event):
'''Callback method for custom action.'''
+
+ if "values" not in event["data"]:
+ return
+
+ with_interface = event["data"]["values"]["with_interface"]
with_childrens = True
- if self.without_interface is False:
- if "values" not in event["data"]:
- return
+ if with_interface:
with_childrens = event["data"]["values"]["children_included"]
- entity = entities[0]
- if entity.entity_type.lower() == "project":
- proj = entity
- else:
- proj = entity["project"]
- project_name = proj["full_name"]
- project_code = proj["name"]
+ filtered_entities = []
+ for entity in entities:
+ low_context_type = entity["context_type"].lower()
+ if low_context_type in ("task", "show"):
+ if not with_childrens and low_context_type == "show":
+ continue
+ filtered_entities.append(entity)
- if entity.entity_type.lower() == 'project' and with_childrens is False:
+ if not filtered_entities:
return {
- 'success': True,
- 'message': 'Nothing was created'
+ "success": True,
+ "message": 'Nothing was created'
}
- all_entities = []
- all_entities.append(entity)
- if with_childrens:
- all_entities = self.get_notask_children(entity)
+ project_entity = self.get_project_from_entity(filtered_entities[0])
+
+ project_name = project_entity["full_name"]
+ project_code = project_entity["name"]
+
+ task_entities = []
+ other_entities = []
+ self.get_all_entities(
+ session, entities, task_entities, other_entities
+ )
+ hierarchy = self.get_entities_hierarchy(
+ session, task_entities, other_entities
+ )
+ task_types = session.query("select id, name from Type").all()
+ task_type_names_by_id = {
+ task_type["id"]: task_type["name"]
+ for task_type in task_types
+ }
anatomy = Anatomy(project_name)
@@ -97,77 +120,67 @@ class CreateFolders(BaseAction):
work_template = anatomy.templates
for key in work_keys:
work_template = work_template[key]
- work_has_apps = "{app" in work_template
publish_keys = ["publish", "folder"]
publish_template = anatomy.templates
for key in publish_keys:
publish_template = publish_template[key]
- publish_has_apps = "{app" in publish_template
+
+ project_data = {
+ "project": {
+ "name": project_name,
+ "code": project_code
+ }
+ }
collected_paths = []
- for entity in all_entities:
- if entity.entity_type.lower() == "project":
- continue
- ent_data = {
- "project": {
- "name": project_name,
- "code": project_code
- }
- }
+ for item in hierarchy:
+ parent_entity, task_entities = item
- ent_data["asset"] = entity["name"]
+ parent_data = copy.deepcopy(project_data)
- parents = entity["link"][1:-1]
+ parents = parent_entity["link"][1:-1]
hierarchy_names = [p["name"] for p in parents]
- hierarchy = ""
+ hierarchy = "/".join(hierarchy_names)
+
if hierarchy_names:
- hierarchy = os.path.sep.join(hierarchy_names)
- ent_data["hierarchy"] = hierarchy
+ parent_name = hierarchy_names[-1]
+ else:
+ parent_name = project_name
- tasks_created = False
- for child in entity["children"]:
- if child["object_type"]["name"].lower() != "task":
- continue
- tasks_created = True
- task_data = ent_data.copy()
- task_data["task"] = child["name"]
+ parent_data.update({
+ "asset": parent_entity["name"],
+ "hierarchy": hierarchy,
+ "parent": parent_name
+ })
- apps = []
-
- # Template wok
- if work_has_apps:
- app_data = task_data.copy()
- for app in apps:
- app_data["app"] = app
- collected_paths.append(self.compute_template(
- anatomy, app_data, work_keys
- ))
- else:
- collected_paths.append(self.compute_template(
- anatomy, task_data, work_keys
- ))
-
- # Template publish
- if publish_has_apps:
- app_data = task_data.copy()
- for app in apps:
- app_data["app"] = app
- collected_paths.append(self.compute_template(
- anatomy, app_data, publish_keys
- ))
- else:
- collected_paths.append(self.compute_template(
- anatomy, task_data, publish_keys
- ))
-
- if not tasks_created:
+ if not task_entities:
# create path for entity
collected_paths.append(self.compute_template(
- anatomy, ent_data, work_keys
+ anatomy, parent_data, work_keys
))
collected_paths.append(self.compute_template(
- anatomy, ent_data, publish_keys
+ anatomy, parent_data, publish_keys
+ ))
+ continue
+
+ for task_entity in task_entities:
+ task_type_id = task_entity["type_id"]
+ task_type_name = task_type_names_by_id[task_type_id]
+ task_data = copy.deepcopy(parent_data)
+ task_data["task"] = {
+ "name": task_entity["name"],
+ "type": task_type_name
+ }
+
+ # Template wok
+ collected_paths.append(self.compute_template(
+ anatomy, task_data, work_keys
+ ))
+
+ # Template publish
+ collected_paths.append(self.compute_template(
+ anatomy, task_data, publish_keys
))
if len(collected_paths) == 0:
@@ -188,14 +201,65 @@ class CreateFolders(BaseAction):
"message": "Successfully created project folders."
}
- def get_notask_children(self, entity):
+ def get_all_entities(
+ self, session, entities, task_entities, other_entities
+ ):
+ if not entities:
+ return
+
+ no_task_entities = []
+ for entity in entities:
+ if entity.entity_type.lower() == "task":
+ task_entities.append(entity)
+ else:
+ no_task_entities.append(entity)
+
+ if not no_task_entities:
+ return task_entities
+
+ other_entities.extend(no_task_entities)
+
+ no_task_entity_ids = [entity["id"] for entity in no_task_entities]
+ next_entities = session.query((
+ "select id, parent_id"
+ " from TypedContext where parent_id in ({})"
+ ).format(self.join_query_keys(no_task_entity_ids))).all()
+
+ self.get_all_entities(
+ session, next_entities, task_entities, other_entities
+ )
+
+ def get_entities_hierarchy(self, session, task_entities, other_entities):
+ task_entity_ids = [entity["id"] for entity in task_entities]
+ full_task_entities = session.query((
+ "select id, name, type_id, parent_id"
+ " from TypedContext where id in ({})"
+ ).format(self.join_query_keys(task_entity_ids)))
+ task_entities_by_parent_id = collections.defaultdict(list)
+ for entity in full_task_entities:
+ parent_id = entity["parent_id"]
+ task_entities_by_parent_id[parent_id].append(entity)
+
output = []
- if entity.entity_type.lower() == "task":
+ if not task_entities_by_parent_id:
return output
- output.append(entity)
- for child in entity["children"]:
- output.extend(self.get_notask_children(child))
+ other_ids = set()
+ for entity in other_entities:
+ other_ids.add(entity["id"])
+ other_ids |= set(task_entities_by_parent_id.keys())
+
+ parent_entities = session.query((
+ "select id, name from TypedContext where id in ({})"
+ ).format(self.join_query_keys(other_ids))).all()
+
+ for parent_entity in parent_entities:
+ parent_id = parent_entity["id"]
+ output.append((
+ parent_entity,
+ task_entities_by_parent_id[parent_id]
+ ))
+
return output
def compute_template(self, anatomy, data, anatomy_keys):
diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py
index 94f359c317..ebea8872f9 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py
@@ -1,6 +1,4 @@
-import os
import re
-import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import get_project_basic_paths, create_project_folders
diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py
index 94385a36c5..ee5c3d0d97 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py
@@ -3,7 +3,8 @@ import uuid
from datetime import datetime
from bson.objectid import ObjectId
-from avalon.api import AvalonMongoDB
+
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py
index 5871646b20..f5addde8ae 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py
@@ -5,10 +5,10 @@ import uuid
import clique
from pymongo import UpdateOne
-from avalon.api import AvalonMongoDB
from openpype.api import Anatomy
from openpype.lib import StringTemplate, TemplateUnsolved
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.ftrack.lib import BaseAction, statics_icon
diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py
index 1f28b18900..9ef2a1668e 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py
@@ -15,7 +15,7 @@ from openpype.lib.delivery import (
process_single_file,
process_sequence
)
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
class Delivery(BaseAction):
diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py
index 3888379e04..c7237a1150 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py
@@ -7,7 +7,6 @@ import datetime
import ftrack_api
-from avalon.api import AvalonMongoDB
from openpype.api import get_project_settings
from openpype.lib import (
get_workfile_template_key,
@@ -15,6 +14,7 @@ from openpype.lib import (
Anatomy,
StringTemplate,
)
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py
index 3759bc81ac..0b14e7aa2b 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py
@@ -1,8 +1,8 @@
import json
-from avalon.api import AvalonMongoDB
from openpype.api import ProjectSettings
from openpype.lib import create_project
+from openpype.pipeline import AvalonMongoDB
from openpype.settings import SaveWarningExc
from openpype_modules.ftrack.lib import (
diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/ftrack/event_handlers_user/action_rv.py
index bdb0eaf250..040ca75582 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_rv.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_rv.py
@@ -4,8 +4,11 @@ import traceback
import json
import ftrack_api
-from avalon import io, api
-from openpype.pipeline import get_representation_path
+
+from openpype.pipeline import (
+ get_representation_path,
+ legacy_io,
+)
from openpype_modules.ftrack.lib import BaseAction, statics_icon
@@ -253,8 +256,8 @@ class RVAction(BaseAction):
)["version"]["asset"]["parent"]["link"][0]
project = session.get(link["type"], link["id"])
os.environ["AVALON_PROJECT"] = project["name"]
- api.Session["AVALON_PROJECT"] = project["name"]
- io.install()
+ legacy_io.Session["AVALON_PROJECT"] = project["name"]
+ legacy_io.install()
location = ftrack_api.Session().pick_location()
@@ -278,22 +281,22 @@ class RVAction(BaseAction):
if online_source:
continue
- asset = io.find_one({"type": "asset", "name": parent_name})
- subset = io.find_one(
+ asset = legacy_io.find_one({"type": "asset", "name": parent_name})
+ subset = legacy_io.find_one(
{
"type": "subset",
"name": component["version"]["asset"]["name"],
"parent": asset["_id"]
}
)
- version = io.find_one(
+ version = legacy_io.find_one(
{
"type": "version",
"name": component["version"]["version"],
"parent": subset["_id"]
}
)
- representation = io.find_one(
+ representation = legacy_io.find_one(
{
"type": "representation",
"parent": version["_id"],
@@ -301,7 +304,7 @@ class RVAction(BaseAction):
}
)
if representation is None:
- representation = io.find_one(
+ representation = legacy_io.find_one(
{
"type": "representation",
"parent": version["_id"],
diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py
index 4820925844..62fdfa2bdd 100644
--- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py
+++ b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py
@@ -4,9 +4,10 @@ import json
import requests
from bson.objectid import ObjectId
+
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import Anatomy
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py
index f8319b67d4..5c6d6352d2 100644
--- a/openpype/modules/ftrack/ftrack_server/lib.py
+++ b/openpype/modules/ftrack/ftrack_server/lib.py
@@ -31,10 +31,13 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status"
TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result"
-def check_ftrack_url(url, log_errors=True):
+def check_ftrack_url(url, log_errors=True, logger=None):
"""Checks if Ftrack server is responding"""
+ if logger is None:
+ logger = Logger.get_logger(__name__)
+
if not url:
- print('ERROR: Ftrack URL is not set!')
+ logger.error("Ftrack URL is not set!")
return None
url = url.strip('/ ')
@@ -48,15 +51,15 @@ def check_ftrack_url(url, log_errors=True):
result = requests.get(url, allow_redirects=False)
except requests.exceptions.RequestException:
if log_errors:
- print('ERROR: Entered Ftrack URL is not accesible!')
+ logger.error("Entered Ftrack URL is not accesible!")
return False
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
if log_errors:
- print('ERROR: Entered Ftrack URL is not accesible!')
+ logger.error("Entered Ftrack URL is not accesible!")
return False
- print('DEBUG: Ftrack server {} is accessible.'.format(url))
+ logger.debug("Ftrack server {} is accessible.".format(url))
return url
@@ -133,7 +136,7 @@ class ProcessEventHub(SocketBaseEventHub):
hearbeat_msg = b"processor"
is_collection_created = False
- pypelog = Logger().get_logger("Session Processor")
+ pypelog = Logger.get_logger("Session Processor")
def __init__(self, *args, **kwargs):
self.mongo_url = None
@@ -192,7 +195,7 @@ class ProcessEventHub(SocketBaseEventHub):
except pymongo.errors.AutoReconnect:
self.pypelog.error((
"Mongo server \"{}\" is not responding, exiting."
- ).format(os.environ["AVALON_MONGO"]))
+ ).format(os.environ["OPENPYPE_MONGO"]))
sys.exit(0)
# Additional special processing of events.
if event['topic'] == 'ftrack.meta.disconnected':
diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py
index c5b58ca94d..124787e467 100644
--- a/openpype/modules/ftrack/lib/avalon_sync.py
+++ b/openpype/modules/ftrack/lib/avalon_sync.py
@@ -6,16 +6,12 @@ import numbers
import six
-from avalon.api import AvalonMongoDB
-
-import avalon
-
from openpype.api import (
Logger,
- Anatomy,
get_anatomy_settings
)
from openpype.lib import ApplicationManager
+from openpype.pipeline import AvalonMongoDB, schema
from .constants import CUST_ATTR_ID_KEY, FPS_KEYS
from .custom_attributes import get_openpype_attr, query_custom_attributes
@@ -175,7 +171,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
if not name_pattern:
default_pattern = "^[a-zA-Z0-9_.]*$"
- schema_obj = avalon.schema._cache.get(schema_name + ".json")
+ schema_obj = schema._cache.get(schema_name + ".json")
if not schema_obj:
name_pattern = default_pattern
else:
diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py
index 436a61cc18..14da188150 100644
--- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py
+++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py
@@ -1,6 +1,7 @@
import logging
import pyblish.api
-import avalon.api
+
+from openpype.pipeline import legacy_io
class CollectFtrackApi(pyblish.api.ContextPlugin):
@@ -23,9 +24,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
# Collect task
- project_name = avalon.api.Session["AVALON_PROJECT"]
- asset_name = avalon.api.Session["AVALON_ASSET"]
- task_name = avalon.api.Session["AVALON_TASK"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
# Find project entity
project_query = 'Project where full_name is "{0}"'.format(project_name)
diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py
index 158135c952..5758068f86 100644
--- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py
+++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py
@@ -6,8 +6,8 @@ Provides:
instance -> families ([])
"""
import pyblish.api
-import avalon.api
+from openpype.pipeline import legacy_io
from openpype.lib.plugin_tools import filter_profiles
@@ -36,8 +36,8 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin):
add_ftrack_family = False
task_name = instance.data.get("task",
- avalon.api.Session["AVALON_TASK"])
- host_name = avalon.api.Session["AVALON_APP"]
+ legacy_io.Session["AVALON_TASK"])
+ host_name = legacy_io.Session["AVALON_APP"]
family = instance.data["family"]
filtering_criteria = {
diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py
index 61892240d7..cf90c11b65 100644
--- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py
+++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py
@@ -2,7 +2,8 @@ import sys
import collections
import six
import pyblish.api
-from avalon import io
+
+from openpype.pipeline import legacy_io
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
@@ -80,8 +81,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
auto_sync_state = project[
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
- if not io.Session:
- io.install()
+ if not legacy_io.Session:
+ legacy_io.install()
self.ft_project = None
@@ -271,7 +272,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# Create new links.
for input in entity_data.get("inputs", []):
- input_id = io.find_one({"_id": input})["data"]["ftrackId"]
+ input_id = legacy_io.find_one({"_id": input})["data"]["ftrackId"]
assetbuild = self.session.get("AssetBuild", input_id)
self.log.debug(
"Creating link from {0} to {1}".format(
diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py
index 5543ed74e2..946ecbff79 100644
--- a/openpype/modules/ftrack/scripts/sub_event_storer.py
+++ b/openpype/modules/ftrack/scripts/sub_event_storer.py
@@ -67,7 +67,7 @@ def launch(event):
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
- os.environ["AVALON_MONGO"]
+ os.environ["OPENPYPE_MONGO"]
))
sys.exit(0)
diff --git a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py
index 4d216c1c0a..65af90e8a6 100644
--- a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py
+++ b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py
@@ -7,7 +7,8 @@ import json
from pprint import pformat
import pyblish.api
-from avalon import api
+
+from openpype.pipeline import legacy_io
def collect(root,
@@ -127,7 +128,7 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
session = metadata.get("session")
if session:
self.log.info("setting session using metadata")
- api.Session.update(session)
+ legacy_io.Session.update(session)
os.environ.update(session)
else:
@@ -187,7 +188,9 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
"family": families[0], # backwards compatibility / pyblish
"families": list(families),
"subset": subset,
- "asset": data.get("asset", api.Session["AVALON_ASSET"]),
+ "asset": data.get(
+ "asset", legacy_io.Session["AVALON_ASSET"]
+ ),
"stagingDir": root,
"frameStart": start,
"frameEnd": end,
diff --git a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py
index 82a79daf3b..cdc37588cd 100644
--- a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py
+++ b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py
@@ -119,7 +119,7 @@ class OpenPypeContextSelector:
# app names and versions, but since app_name is not used
# currently down the line (but it is required by OP publish command
# right now).
- self.context["app_name"] = "maya/2020"
+ # self.context["app_name"] = "maya/2022"
return True
@staticmethod
@@ -139,7 +139,8 @@ class OpenPypeContextSelector:
env = {"AVALON_PROJECT": str(self.context.get("project")),
"AVALON_ASSET": str(self.context.get("asset")),
"AVALON_TASK": str(self.context.get("task")),
- "AVALON_APP_NAME": str(self.context.get("app_name"))}
+ # "AVALON_APP_NAME": str(self.context.get("app_name"))
+ }
print(">>> setting environment:")
for k, v in env.items():
@@ -184,7 +185,7 @@ selector = OpenPypeContextSelector()
selector.context["project"] = os.getenv("AVALON_PROJECT")
selector.context["asset"] = os.getenv("AVALON_ASSET")
selector.context["task"] = os.getenv("AVALON_TASK")
-selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
+# selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
# if anything inside is None, scratch the whole thing and
# ask user for context.
diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py
index 7475bdc89e..39b05937dc 100644
--- a/openpype/modules/slack/plugins/publish/collect_slack_family.py
+++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py
@@ -1,7 +1,7 @@
-from avalon import io
import pyblish.api
from openpype.lib.profiles_filtering import filter_profiles
+from openpype.pipeline import legacy_io
class CollectSlackFamilies(pyblish.api.InstancePlugin):
@@ -18,7 +18,7 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
profiles = None
def process(self, instance):
- task_name = io.Session.get("AVALON_TASK")
+ task_name = legacy_io.Session.get("AVALON_TASK")
family = self.main_family_from_instance(instance)
key_values = {
"families": family,
diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py
index 3744a21b43..45ff8bc4d1 100644
--- a/openpype/modules/sync_server/sync_server_module.py
+++ b/openpype/modules/sync_server/sync_server_module.py
@@ -6,7 +6,6 @@ import platform
import copy
from collections import deque, defaultdict
-from avalon.api import AvalonMongoDB
from openpype.modules import OpenPypeModule
from openpype_interfaces import ITrayModule
@@ -14,16 +13,19 @@ from openpype.api import (
Anatomy,
get_project_settings,
get_system_settings,
- get_local_site_id)
+ get_local_site_id
+)
from openpype.lib import PypeLogger
+from openpype.pipeline import AvalonMongoDB
from openpype.settings.lib import (
get_default_anatomy_settings,
- get_anatomy_settings)
+ get_anatomy_settings
+)
from .providers.local_drive import LocalDriveHandler
from .providers import lib
-from .utils import time_function, SyncStatus
+from .utils import time_function, SyncStatus, SiteAlreadyPresentError
log = PypeLogger().get_logger("SyncServer")
@@ -131,21 +133,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
def add_site(self, collection, representation_id, site_name=None,
force=False):
"""
- Adds new site to representation to be synced.
+ Adds new site to representation to be synced.
- 'collection' must have synchronization enabled (globally or
- project only)
+ 'collection' must have synchronization enabled (globally or
+ project only)
- Used as a API endpoint from outside applications (Loader etc)
+ Used as a API endpoint from outside applications (Loader etc).
- Args:
- collection (string): project name (must match DB)
- representation_id (string): MongoDB _id value
- site_name (string): name of configured and active site
- force (bool): reset site if exists
+ Use 'force' to reset existing site.
- Returns:
- throws ValueError if any issue
+ Args:
+ collection (string): project name (must match DB)
+ representation_id (string): MongoDB _id value
+ site_name (string): name of configured and active site
+ force (bool): reset site if exists
+
+ Throws:
+ SiteAlreadyPresentError - if adding already existing site and
+ not 'force'
+ ValueError - other errors (repre not found, misconfiguration)
"""
if not self.get_sync_project_setting(collection):
raise ValueError("Project not configured")
@@ -155,7 +161,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.reset_site_on_representation(collection,
representation_id,
- site_name=site_name, force=force)
+ site_name=site_name,
+ force=force)
def remove_site(self, collection, representation_id, site_name,
remove_local_files=False):
@@ -351,36 +358,38 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
def create_validate_project_task(self, collection, site_name):
"""Adds metadata about project files validation on a queue.
- This process will loop through all representation and check if
- their files actually exist on an active site.
+ This process will loop through all representation and check if
+ their files actually exist on an active site.
- This might be useful for edge cases when artists is switching
- between sites, remote site is actually physically mounted and
- active site has same file urls etc.
+ It also checks if site is set in DB, but file is physically not
+ present
- Task will run on a asyncio loop, shouldn't be blocking.
+ This might be useful for edge cases when artists is switching
+ between sites, remote site is actually physically mounted and
+ active site has same file urls etc.
+
+ Task will run on a asyncio loop, shouldn't be blocking.
"""
task = {
"type": "validate",
"project_name": collection,
- "func": lambda: self.validate_project(collection, site_name)
+ "func": lambda: self.validate_project(collection, site_name,
+ reset_missing=True)
}
self.projects_processed.add(collection)
self.long_running_tasks.append(task)
- def validate_project(self, collection, site_name, remove_missing=False):
- """
- Validate 'collection' of 'site_name' and its local files
+ def validate_project(self, collection, site_name, reset_missing=False):
+ """Validate 'collection' of 'site_name' and its local files
- If file present and not marked with a 'site_name' in DB, DB is
- updated with site name and file modified date.
+ If file present and not marked with a 'site_name' in DB, DB is
+ updated with site name and file modified date.
- Args:
- module (SyncServerModule)
- collection (string): project name
- site_name (string): active site name
- remove_missing (bool): if True remove sites in DB if missing
- physically
+ Args:
+ collection (string): project name
+ site_name (string): active site name
+ reset_missing (bool): if True reset site in DB if missing
+ physically
"""
self.log.debug("Validation of {} for {} started".format(collection,
site_name))
@@ -395,29 +404,32 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return
sites_added = 0
- sites_removed = 0
+ sites_reset = 0
for repre in representations:
repre_id = repre["_id"]
for repre_file in repre.get("files", []):
try:
- has_site = site_name in [site["name"]
- for site in repre_file["sites"]]
- except TypeError:
+ is_on_site = site_name in [site["name"]
+ for site in repre_file["sites"]
+ if (site.get("created_dt") and
+ not site.get("error"))]
+ except (TypeError, AttributeError):
self.log.debug("Structure error in {}".format(repre_id))
continue
- if has_site and not remove_missing:
- continue
-
file_path = repre_file.get("path", "")
local_file_path = self.get_local_file_path(collection,
site_name,
file_path)
- if local_file_path and os.path.exists(local_file_path):
- self.log.debug("Adding site {} for {}".format(site_name,
- repre_id))
- if not has_site:
+ file_exists = (local_file_path and
+ os.path.exists(local_file_path))
+ if not is_on_site:
+ if file_exists:
+ self.log.debug(
+ "Adding site {} for {}".format(site_name,
+ repre_id))
+
query = {
"_id": repre_id
}
@@ -425,27 +437,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
os.path.getmtime(local_file_path))
elem = {"name": site_name,
"created_dt": created_dt}
- self._add_site(collection, query, [repre], elem,
+ self._add_site(collection, query, repre, elem,
site_name=site_name,
- file_id=repre_file["_id"])
+ file_id=repre_file["_id"],
+ force=True)
sites_added += 1
else:
- if has_site and remove_missing:
- self.log.debug("Removing site {} for {}".
+ if not file_exists and reset_missing:
+ self.log.debug("Resetting site {} for {}".
format(site_name, repre_id))
- self.reset_provider_for_file(collection,
- repre_id,
- file_id=repre_file["_id"],
- remove=True)
- sites_removed += 1
+ self.reset_site_on_representation(
+ collection, repre_id, site_name=site_name,
+ file_id=repre_file["_id"])
+ sites_reset += 1
if sites_added % 100 == 0:
self.log.debug("Sites added {}".format(sites_added))
self.log.debug("Validation of {} for {} ended".format(collection,
site_name))
- self.log.info("Sites added {}, sites removed {}".format(sites_added,
- sites_removed))
+ self.log.info("Sites added {}, sites reset {}".format(sites_added,
+ reset_missing))
def pause_representation(self, collection, representation_id, site_name):
"""
@@ -963,7 +975,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.log.debug("Adding alternate {} to {}".format(
alt_site, representation["_id"]))
self._add_site(collection, query,
- [representation], elem,
+ representation, elem,
alt_site, file_id=file_id, force=True)
""" End of Public API """
@@ -1567,14 +1579,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
pause (bool or None): if True - pause, False - unpause
force (bool): hard reset - currently only for add_site
- Returns:
- throws ValueError
+ Raises:
+ SiteAlreadyPresentError - if adding already existing site and
+ not 'force'
+ ValueError - other errors (repre not found, misconfiguration)
"""
query = {
"_id": ObjectId(representation_id)
}
- representation = list(self.connection.database[collection].find(query))
+ representation = self.connection.database[collection].find_one(query)
if not representation:
raise ValueError("Representation {} not found in {}".
format(representation_id, collection))
@@ -1605,7 +1619,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
representation, site_name, pause)
else: # add new site to all files for representation
self._add_site(collection, query, representation, elem, site_name,
- force)
+ force=force)
def _update_site(self, collection, query, update, arr_filter):
"""
@@ -1660,7 +1674,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Throws ValueError if 'site_name' not found on 'representation'
"""
found = False
- for repre_file in representation.pop().get("files"):
+ for repre_file in representation.get("files"):
for site in repre_file.get("sites"):
if site.get("name") == site_name:
found = True
@@ -1686,7 +1700,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
"""
found = False
site = None
- for repre_file in representation.pop().get("files"):
+ for repre_file in representation.get("files"):
for site in repre_file.get("sites"):
if site["name"] == site_name:
found = True
@@ -1718,29 +1732,34 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Adds 'site_name' to 'representation' on 'collection'
Args:
- representation (list of 1 dict)
+ representation (dict)
file_id (ObjectId)
Use 'force' to remove existing or raises ValueError
"""
- reseted_existing = False
- for repre_file in representation.pop().get("files"):
+ reset_existing = False
+ files = representation.get("files", [])
+ if not files:
+ log.debug("No files for {}".format(representation["_id"]))
+ return
+
+ for repre_file in files:
if file_id and file_id != repre_file["_id"]:
continue
for site in repre_file.get("sites"):
if site["name"] == site_name:
- if force:
+ if force or site.get("error"):
self._reset_site_for_file(collection, query,
elem, repre_file["_id"],
site_name)
- reseted_existing = True
+ reset_existing = True
else:
msg = "Site {} already present".format(site_name)
log.info(msg)
- raise ValueError(msg)
+ raise SiteAlreadyPresentError(msg)
- if reseted_existing:
+ if reset_existing:
return
if not file_id:
@@ -1904,7 +1923,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
(int) - number of failed attempts
"""
_, rec = self._get_site_rec(file.get("sites", []), provider)
- return rec.get("tries", 0)
+ return self._get_tries_count_from_rec(rec)
def _get_progress_dict(self, progress):
"""
diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py
index 85e4e03f77..03f362202f 100644
--- a/openpype/modules/sync_server/utils.py
+++ b/openpype/modules/sync_server/utils.py
@@ -8,6 +8,11 @@ class ResumableError(Exception):
pass
+class SiteAlreadyPresentError(Exception):
+ """Representation has already site skeleton present."""
+ pass
+
+
class SyncStatus:
DO_NOTHING = 0
DO_UPLOAD = 1
diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py
index 47d020104b..3f77a2b7dc 100644
--- a/openpype/modules/timers_manager/timers_manager.py
+++ b/openpype/modules/timers_manager/timers_manager.py
@@ -1,13 +1,14 @@
import os
import platform
-from avalon.api import AvalonMongoDB
from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITrayService,
ILaunchHookPaths
)
+from openpype.pipeline import AvalonMongoDB
+
from .exceptions import InvalidContextError
diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py
index 308be6da64..2e441fbf27 100644
--- a/openpype/pipeline/__init__.py
+++ b/openpype/pipeline/__init__.py
@@ -3,12 +3,15 @@ from .constants import (
HOST_WORKFILE_EXTENSIONS,
)
+from .mongodb import (
+ AvalonMongoDB,
+)
+
from .create import (
BaseCreator,
Creator,
AutoCreator,
CreatedInstance,
-
CreatorError,
LegacyCreator,
@@ -90,13 +93,15 @@ __all__ = (
"AVALON_CONTAINER_ID",
"HOST_WORKFILE_EXTENSIONS",
- "attribute_definitions",
+ # --- MongoDB ---
+ "AvalonMongoDB",
# --- Create ---
"BaseCreator",
"Creator",
"AutoCreator",
"CreatedInstance",
+ "CreatorError",
"CreatorError",
diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py
index 1bef260ec9..06bd639776 100644
--- a/openpype/pipeline/context_tools.py
+++ b/openpype/pipeline/context_tools.py
@@ -11,8 +11,6 @@ import platform
import pyblish.api
from pyblish.lib import MessageHandler
-from avalon import io, Session
-
import openpype
from openpype.modules import load_modules
from openpype.settings import get_project_settings
@@ -24,6 +22,7 @@ from openpype.lib import (
)
from . import (
+ legacy_io,
register_loader_plugin_path,
register_inventory_action,
register_creator_plugin_path,
@@ -57,7 +56,7 @@ def registered_root():
if root:
return root
- root = Session.get("AVALON_PROJECTS")
+ root = legacy_io.Session.get("AVALON_PROJECTS")
if root:
return os.path.normpath(root)
return ""
@@ -74,20 +73,20 @@ def install_host(host):
_is_installed = True
- io.install()
+ legacy_io.install()
missing = list()
for key in ("AVALON_PROJECT", "AVALON_ASSET"):
- if key not in Session:
+ if key not in legacy_io.Session:
missing.append(key)
assert not missing, (
"%s missing from environment, %s" % (
", ".join(missing),
- json.dumps(Session, indent=4, sort_keys=True)
+ json.dumps(legacy_io.Session, indent=4, sort_keys=True)
))
- project_name = Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
log.info("Activating %s.." % project_name)
# Optional host install function
@@ -170,7 +169,7 @@ def uninstall_host():
deregister_host()
- io.uninstall()
+ legacy_io.uninstall()
log.info("Successfully uninstalled Avalon!")
diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py
index 0cc2819172..6f862e0588 100644
--- a/openpype/pipeline/create/context.py
+++ b/openpype/pipeline/create/context.py
@@ -6,8 +6,13 @@ import inspect
from uuid import uuid4
from contextlib import contextmanager
+from openpype.pipeline import legacy_io
+from openpype.pipeline.mongodb import (
+ AvalonMongoDB,
+ session_data_from_environment,
+)
+
from .creator_plugins import (
- BaseCreator,
Creator,
AutoCreator,
discover_creator_plugins,
@@ -659,10 +664,8 @@ class CreateContext:
):
# Create conncetion if is not passed
if dbcon is None:
- import avalon.api
-
- session = avalon.api.session_data_from_environment(True)
- dbcon = avalon.api.AvalonMongoDB(session)
+ session = session_data_from_environment(True)
+ dbcon = AvalonMongoDB(session)
dbcon.install()
self.dbcon = dbcon
@@ -770,12 +773,11 @@ class CreateContext:
"""Give ability to reset avalon context.
Reset is based on optional host implementation of `get_current_context`
- function or using `avalon.api.Session`.
+ function or using `legacy_io.Session`.
Some hosts have ability to change context file without using workfiles
tool but that change is not propagated to
"""
- import avalon.api
project_name = asset_name = task_name = None
if hasattr(self.host, "get_current_context"):
@@ -786,11 +788,11 @@ class CreateContext:
task_name = host_context.get("task_name")
if not project_name:
- project_name = avalon.api.Session.get("AVALON_PROJECT")
+ project_name = legacy_io.Session.get("AVALON_PROJECT")
if not asset_name:
- asset_name = avalon.api.Session.get("AVALON_ASSET")
+ asset_name = legacy_io.Session.get("AVALON_ASSET")
if not task_name:
- task_name = avalon.api.Session.get("AVALON_TASK")
+ task_name = legacy_io.Session.get("AVALON_TASK")
if project_name:
self.dbcon.Session["AVALON_PROJECT"] = project_name
@@ -805,7 +807,6 @@ class CreateContext:
Reloads creators from preregistered paths and can load publish plugins
if it's enabled on context.
"""
- import avalon.api
import pyblish.logic
from openpype.pipeline import OpenPypePyblishPluginMixin
diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py
index 36bccd427e..cbe19da064 100644
--- a/openpype/pipeline/create/creator_plugins.py
+++ b/openpype/pipeline/create/creator_plugins.py
@@ -89,7 +89,9 @@ class BaseCreator:
@property
def log(self):
if self._log is None:
- self._log = logging.getLogger(self.__class__.__name__)
+ from openpype.api import Logger
+
+ self._log = Logger.get_logger(self.__class__.__name__)
return self._log
def _add_instance_to_context(self, instance):
diff --git a/openpype/pipeline/legacy_io.py b/openpype/pipeline/legacy_io.py
new file mode 100644
index 0000000000..c41406b208
--- /dev/null
+++ b/openpype/pipeline/legacy_io.py
@@ -0,0 +1,146 @@
+"""Wrapper around interactions with the database"""
+
+import sys
+import logging
+import functools
+
+from . import schema
+from .mongodb import AvalonMongoDB, session_data_from_environment
+
+module = sys.modules[__name__]
+
+Session = {}
+_is_installed = False
+_connection_object = AvalonMongoDB(Session)
+_mongo_client = None
+_database = database = None
+
+log = logging.getLogger(__name__)
+
+
+def install():
+ """Establish a persistent connection to the database"""
+ if module._is_installed:
+ return
+
+ session = session_data_from_environment(context_keys=True)
+
+ session["schema"] = "openpype:session-2.0"
+ try:
+ schema.validate(session)
+ except schema.ValidationError as e:
+ # TODO(marcus): Make this mandatory
+ log.warning(e)
+
+ _connection_object.Session.update(session)
+ _connection_object.install()
+
+ module._mongo_client = _connection_object.mongo_client
+ module._database = module.database = _connection_object.database
+
+ module._is_installed = True
+
+
+def uninstall():
+ """Close any connection to the database"""
+ module._mongo_client = None
+ module._database = module.database = None
+ module._is_installed = False
+ try:
+ module._connection_object.uninstall()
+ except AttributeError:
+ pass
+
+
+def requires_install(func):
+ @functools.wraps(func)
+ def decorated(*args, **kwargs):
+ if not module._is_installed:
+ install()
+ return func(*args, **kwargs)
+ return decorated
+
+
+@requires_install
+def projects(*args, **kwargs):
+ return _connection_object.projects(*args, **kwargs)
+
+
+@requires_install
+def insert_one(doc, *args, **kwargs):
+ return _connection_object.insert_one(doc, *args, **kwargs)
+
+
+@requires_install
+def insert_many(docs, *args, **kwargs):
+ return _connection_object.insert_many(docs, *args, **kwargs)
+
+
+@requires_install
+def update_one(*args, **kwargs):
+ return _connection_object.update_one(*args, **kwargs)
+
+
+@requires_install
+def update_many(*args, **kwargs):
+ return _connection_object.update_many(*args, **kwargs)
+
+
+@requires_install
+def replace_one(*args, **kwargs):
+ return _connection_object.replace_one(*args, **kwargs)
+
+
+@requires_install
+def replace_many(*args, **kwargs):
+ return _connection_object.replace_many(*args, **kwargs)
+
+
+@requires_install
+def delete_one(*args, **kwargs):
+ return _connection_object.delete_one(*args, **kwargs)
+
+
+@requires_install
+def delete_many(*args, **kwargs):
+ return _connection_object.delete_many(*args, **kwargs)
+
+
+@requires_install
+def find(*args, **kwargs):
+ return _connection_object.find(*args, **kwargs)
+
+
+@requires_install
+def find_one(*args, **kwargs):
+ return _connection_object.find_one(*args, **kwargs)
+
+
+@requires_install
+def distinct(*args, **kwargs):
+ return _connection_object.distinct(*args, **kwargs)
+
+
+@requires_install
+def aggregate(*args, **kwargs):
+ return _connection_object.aggregate(*args, **kwargs)
+
+
+@requires_install
+def save(*args, **kwargs):
+ return _connection_object.save(*args, **kwargs)
+
+
+@requires_install
+def drop(*args, **kwargs):
+ return _connection_object.drop(*args, **kwargs)
+
+
+@requires_install
+def parenthood(*args, **kwargs):
+ return _connection_object.parenthood(*args, **kwargs)
+
+
+@requires_install
+def bulk_write(*args, **kwargs):
+ return _connection_object.bulk_write(*args, **kwargs)
diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py
index cb7c76f133..99e5d11f82 100644
--- a/openpype/pipeline/load/utils.py
+++ b/openpype/pipeline/load/utils.py
@@ -9,10 +9,11 @@ import numbers
import six
from bson.objectid import ObjectId
-from avalon import io, schema
-from avalon.api import Session
-
from openpype.lib import Anatomy
+from openpype.pipeline import (
+ schema,
+ legacy_io,
+)
log = logging.getLogger(__name__)
@@ -59,7 +60,7 @@ def get_repres_contexts(representation_ids, dbcon=None):
"""
if not dbcon:
- dbcon = io
+ dbcon = legacy_io
contexts = {}
if not representation_ids:
@@ -166,7 +167,7 @@ def get_subset_contexts(subset_ids, dbcon=None):
dict: The full representation context by representation id.
"""
if not dbcon:
- dbcon = io
+ dbcon = legacy_io
contexts = {}
if not subset_ids:
@@ -229,10 +230,10 @@ def get_representation_context(representation):
assert representation is not None, "This is a bug"
if isinstance(representation, (six.string_types, ObjectId)):
- representation = io.find_one(
+ representation = legacy_io.find_one(
{"_id": ObjectId(str(representation))})
- version, subset, asset, project = io.parenthood(representation)
+ version, subset, asset, project = legacy_io.parenthood(representation)
assert all([representation, version, subset, asset, project]), (
"This is a bug"
@@ -404,17 +405,17 @@ def update_container(container, version=-1):
"""Update a container"""
# Compute the different version from 'representation'
- current_representation = io.find_one({
+ current_representation = legacy_io.find_one({
"_id": ObjectId(container["representation"])
})
assert current_representation is not None, "This is a bug"
- current_version, subset, asset, project = io.parenthood(
+ current_version, subset, asset, project = legacy_io.parenthood(
current_representation)
if version == -1:
- new_version = io.find_one({
+ new_version = legacy_io.find_one({
"type": "version",
"parent": subset["_id"]
}, sort=[("name", -1)])
@@ -430,11 +431,11 @@ def update_container(container, version=-1):
"type": "version",
"name": version
}
- new_version = io.find_one(version_query)
+ new_version = legacy_io.find_one(version_query)
assert new_version is not None, "This is a bug"
- new_representation = io.find_one({
+ new_representation = legacy_io.find_one({
"type": "representation",
"parent": new_version["_id"],
"name": current_representation["name"]
@@ -481,7 +482,7 @@ def switch_container(container, representation, loader_plugin=None):
))
# Get the new representation to switch to
- new_representation = io.find_one({
+ new_representation = legacy_io.find_one({
"type": "representation",
"_id": representation["_id"],
})
@@ -500,7 +501,7 @@ def get_representation_path_from_context(context):
representation = context['representation']
project_doc = context.get("project")
root = None
- session_project = Session.get("AVALON_PROJECT")
+ session_project = legacy_io.Session.get("AVALON_PROJECT")
if project_doc and project_doc["name"] != session_project:
anatomy = Anatomy(project_doc["name"])
root = anatomy.roots
@@ -529,7 +530,7 @@ def get_representation_path(representation, root=None, dbcon=None):
from openpype.lib import StringTemplate, TemplateUnsolved
if dbcon is None:
- dbcon = io
+ dbcon = legacy_io
if root is None:
from openpype.pipeline import registered_root
diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py
new file mode 100644
index 0000000000..565e26b966
--- /dev/null
+++ b/openpype/pipeline/mongodb.py
@@ -0,0 +1,272 @@
+import os
+import time
+import functools
+import logging
+import pymongo
+from uuid import uuid4
+
+from . import schema
+
+
+def requires_install(func):
+ func_obj = getattr(func, "__self__", None)
+
+ @functools.wraps(func)
+ def decorated(*args, **kwargs):
+ if func_obj is not None:
+ _obj = func_obj
+ else:
+ _obj = args[0]
+ if not _obj.is_installed():
+ if _obj.auto_install:
+ _obj.install()
+ else:
+ raise IOError(
+ "'{}.{}()' requires to run install() first".format(
+ _obj.__class__.__name__, func.__name__
+ )
+ )
+ return func(*args, **kwargs)
+ return decorated
+
+
+def auto_reconnect(func):
+ """Handling auto reconnect in 3 retry times"""
+ retry_times = 3
+ reconnect_msg = "Reconnecting..."
+ func_obj = getattr(func, "__self__", None)
+
+ @functools.wraps(func)
+ def decorated(*args, **kwargs):
+ if func_obj is not None:
+ _obj = func_obj
+ else:
+ _obj = args[0]
+
+ for retry in range(1, retry_times + 1):
+ try:
+ return func(*args, **kwargs)
+ except pymongo.errors.AutoReconnect:
+ if hasattr(_obj, "log"):
+ _obj.log.warning(reconnect_msg)
+ else:
+ print(reconnect_msg)
+
+ if retry >= retry_times:
+ raise
+ time.sleep(0.1)
+ return decorated
+
+
+SESSION_CONTEXT_KEYS = (
+ # Root directory of projects on disk
+ "AVALON_PROJECTS",
+ # Name of current Project
+ "AVALON_PROJECT",
+ # Name of current Asset
+ "AVALON_ASSET",
+ # Name of current task
+ "AVALON_TASK",
+ # Name of current app
+ "AVALON_APP",
+ # Path to working directory
+ "AVALON_WORKDIR",
+ # Optional path to scenes directory (see Work Files API)
+ "AVALON_SCENEDIR"
+)
+
+
+def session_data_from_environment(context_keys=False):
+ session_data = {}
+ if context_keys:
+ for key in SESSION_CONTEXT_KEYS:
+ value = os.environ.get(key)
+ session_data[key] = value or ""
+ else:
+ for key in SESSION_CONTEXT_KEYS:
+ session_data[key] = None
+
+ for key, default_value in (
+ # Name of Avalon in graphical user interfaces
+ # Use this to customise the visual appearance of Avalon
+ # to better integrate with your surrounding pipeline
+ ("AVALON_LABEL", "Avalon"),
+
+ # Used during any connections to the outside world
+ ("AVALON_TIMEOUT", "1000"),
+
+ # Name of database used in MongoDB
+ ("AVALON_DB", "avalon"),
+ ):
+ value = os.environ.get(key) or default_value
+ if value is not None:
+ session_data[key] = value
+
+ return session_data
+
+
+class AvalonMongoDB:
+ def __init__(self, session=None, auto_install=True):
+ self._id = uuid4()
+ self._database = None
+ self.auto_install = auto_install
+ self._installed = False
+
+ if session is None:
+ session = session_data_from_environment(context_keys=False)
+
+ self.Session = session
+
+ self.log = logging.getLogger(self.__class__.__name__)
+
+ def __getattr__(self, attr_name):
+ attr = None
+ if not self.is_installed() and self.auto_install:
+ self.install()
+
+ if not self.is_installed():
+ raise IOError(
+ "'{}.{}()' requires to run install() first".format(
+ self.__class__.__name__, attr_name
+ )
+ )
+
+ project_name = self.active_project()
+ if project_name is None:
+ raise ValueError(
+ "Value of 'Session[\"AVALON_PROJECT\"]' is not set."
+ )
+
+ collection = self._database[project_name]
+ not_set = object()
+ attr = getattr(collection, attr_name, not_set)
+
+ if attr is not_set:
+ # Raise attribute error
+ raise AttributeError(
+ "{} has no attribute '{}'.".format(
+ collection.__class__.__name__, attr_name
+ )
+ )
+
+ # Decorate function
+ if callable(attr):
+ attr = auto_reconnect(attr)
+ return attr
+
+ @property
+ def mongo_client(self):
+ from openpype.lib import OpenPypeMongoConnection
+
+ return OpenPypeMongoConnection.get_mongo_client()
+
+ @property
+ def id(self):
+ return self._id
+
+ @property
+ def database(self):
+ if not self.is_installed() and self.auto_install:
+ self.install()
+
+ if self.is_installed():
+ return self._database
+
+ raise IOError(
+ "'{}.database' requires to run install() first".format(
+ self.__class__.__name__
+ )
+ )
+
+ def is_installed(self):
+ return self._installed
+
+ def install(self):
+ """Establish a persistent connection to the database"""
+ if self.is_installed():
+ return
+
+ self._installed = True
+ self._database = self.mongo_client[str(os.environ["AVALON_DB"])]
+
+ def uninstall(self):
+ """Close any connection to the database"""
+ self._installed = False
+ self._database = None
+
+ @requires_install
+ def active_project(self):
+ """Return the name of the active project"""
+ return self.Session["AVALON_PROJECT"]
+
+ @requires_install
+ @auto_reconnect
+ def projects(self, projection=None, only_active=True):
+ """Iter project documents
+
+ Args:
+ projection (optional): MongoDB query projection operation
+ only_active (optional): Skip inactive projects, default True.
+
+ Returns:
+ Project documents iterator
+
+ """
+ query_filter = {"type": "project"}
+ if only_active:
+ query_filter.update({
+ "$or": [
+ {"data.active": {"$exists": 0}},
+ {"data.active": True},
+ ]
+ })
+
+ for project_name in self._database.collection_names():
+ if project_name in ("system.indexes",):
+ continue
+
+ # Each collection will have exactly one project document
+
+ doc = self._database[project_name].find_one(
+ query_filter, projection=projection
+ )
+ if doc is not None:
+ yield doc
+
+ @auto_reconnect
+ def insert_one(self, item, *args, **kwargs):
+ assert isinstance(item, dict), "item must be of type "
+ schema.validate(item)
+ return self._database[self.active_project()].insert_one(
+ item, *args, **kwargs
+ )
+
+ @auto_reconnect
+ def insert_many(self, items, *args, **kwargs):
+ # check if all items are valid
+ assert isinstance(items, list), "`items` must be of type "
+ for item in items:
+ assert isinstance(item, dict), "`item` must be of type "
+ schema.validate(item)
+
+ return self._database[self.active_project()].insert_many(
+ items, *args, **kwargs
+ )
+
+ def parenthood(self, document):
+ assert document is not None, "This is a bug"
+
+ parents = list()
+
+ while document.get("parent") is not None:
+ document = self.find_one({"_id": document["parent"]})
+ if document is None:
+ break
+
+ if document.get("type") == "hero_version":
+ _document = self.find_one({"_id": document["version_id"]})
+ document["data"] = _document["data"]
+
+ parents.append(document)
+
+ return parents
diff --git a/openpype/pipeline/schema.py b/openpype/pipeline/schema.py
new file mode 100644
index 0000000000..7e96bfe1b1
--- /dev/null
+++ b/openpype/pipeline/schema.py
@@ -0,0 +1,137 @@
+"""Wrapper around :mod:`jsonschema`
+
+Schemas are implicitly loaded from the /schema directory of this project.
+
+Attributes:
+ _cache: Cache of previously loaded schemas
+
+Resources:
+ http://json-schema.org/
+ http://json-schema.org/latest/json-schema-core.html
+ http://spacetelescope.github.io/understanding-json-schema/index.html
+
+"""
+
+import os
+import re
+import json
+import logging
+
+import jsonschema
+import six
+
+log_ = logging.getLogger(__name__)
+
+ValidationError = jsonschema.ValidationError
+SchemaError = jsonschema.SchemaError
+
+_CACHED = False
+
+
+def get_schema_version(schema_name):
+ """Extract version form schema name.
+
+ It is expected that schema name contain only major and minor version.
+
+ Expected name should match to:
+ "{name}:{type}-{major version}.{minor version}"
+ - `name` - must not contain colon
+ - `type` - must not contain dash
+ - major and minor versions must be numbers separated by dot
+
+ Args:
+ schema_name(str): Name of schema that should be parsed.
+
+ Returns:
+ tuple: Contain two values major version as first and minor version as
+ second. When schema does not match parsing regex then `(0, 0)` is
+ returned.
+ """
+ schema_regex = re.compile(r"[^:]+:[^-]+-(\d.\d)")
+ groups = schema_regex.findall(schema_name)
+ if not groups:
+ return 0, 0
+
+ maj_version, min_version = groups[0].split(".")
+ return int(maj_version), int(min_version)
+
+
+def validate(data, schema=None):
+ """Validate `data` with `schema`
+
+ Arguments:
+ data (dict): JSON-compatible data
+ schema (str): DEPRECATED Name of schema. Now included in the data.
+
+ Raises:
+ ValidationError on invalid schema
+
+ """
+ if not _CACHED:
+ _precache()
+
+ root, schema = data["schema"].rsplit(":", 1)
+ # assert root in (
+ # "mindbender-core", # Backwards compatiblity
+ # "avalon-core",
+ # "pype"
+ # )
+
+ if isinstance(schema, six.string_types):
+ schema = _cache[schema + ".json"]
+
+ resolver = jsonschema.RefResolver(
+ "",
+ None,
+ store=_cache,
+ cache_remote=True
+ )
+
+ jsonschema.validate(data,
+ schema,
+ types={"array": (list, tuple)},
+ resolver=resolver)
+
+
+_cache = {
+ # A mock schema for docstring tests
+ "_doctest.json": {
+ "$schema": "http://json-schema.org/schema#",
+
+ "title": "_doctest",
+ "description": "A test schema",
+
+ "type": "object",
+
+ "additionalProperties": False,
+
+ "required": ["key"],
+
+ "properties": {
+ "key": {
+ "description": "A test key",
+ "type": "string"
+ }
+ }
+ }
+}
+
+
+def _precache():
+ """Store available schemas in-memory for reduced disk access"""
+ global _CACHED
+
+ repos_root = os.environ["OPENPYPE_REPOS_ROOT"]
+ schema_dir = os.path.join(repos_root, "schema")
+
+ for schema in os.listdir(schema_dir):
+ if schema.startswith(("_", ".")):
+ continue
+ if not schema.endswith(".json"):
+ continue
+ if not os.path.isfile(os.path.join(schema_dir, schema)):
+ continue
+ with open(os.path.join(schema_dir, schema)) as f:
+ log_.debug("Installing schema '%s'.." % schema)
+ _cache[schema] = json.load(f)
+ _CACHED = True
diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py
index c09dab70eb..ec97b36954 100644
--- a/openpype/pipeline/thumbnail.py
+++ b/openpype/pipeline/thumbnail.py
@@ -2,6 +2,7 @@ import os
import copy
import logging
+from . import legacy_io
from .plugin_discover import (
discover,
register_plugin,
@@ -17,8 +18,7 @@ def get_thumbnail_binary(thumbnail_entity, thumbnail_type, dbcon=None):
resolvers = discover_thumbnail_resolvers()
resolvers = sorted(resolvers, key=lambda cls: cls.priority)
if dbcon is None:
- from avalon import io
- dbcon = io
+ dbcon = legacy_io
for Resolver in resolvers:
available_types = Resolver.thumbnail_types
diff --git a/openpype/plugin.py b/openpype/plugin.py
index 3569936dac..bb9bc2ff85 100644
--- a/openpype/plugin.py
+++ b/openpype/plugin.py
@@ -1,7 +1,6 @@
import tempfile
import os
import pyblish.api
-import avalon.api
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py
index 95001691e2..55fda55d17 100644
--- a/openpype/plugins/load/add_site.py
+++ b/openpype/plugins/load/add_site.py
@@ -1,9 +1,19 @@
from openpype.modules import ModulesManager
from openpype.pipeline import load
+from openpype.lib.avalon_context import get_linked_ids_for_representations
+from openpype.modules.sync_server.utils import SiteAlreadyPresentError
class AddSyncSite(load.LoaderPlugin):
- """Add sync site to representation"""
+ """Add sync site to representation
+
+ If family of synced representation is 'workfile', it looks for all
+ representations which are referenced (loaded) in workfile with content of
+ 'inputLinks'.
+ It doesn't do any checks for site, most common use case is when artist is
+ downloading workfile to his local site, but it might be helpful when
+ artist is re-uploading broken representation on remote site also.
+ """
representations = ["*"]
families = ["*"]
@@ -12,21 +22,42 @@ class AddSyncSite(load.LoaderPlugin):
icon = "download"
color = "#999999"
+ _sync_server = None
+ is_add_site_loader = True
+
+ @property
+ def sync_server(self):
+ if not self._sync_server:
+ manager = ModulesManager()
+ self._sync_server = manager.modules_by_name["sync_server"]
+
+ return self._sync_server
+
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Adding {} to representation: {}".format(
data["site_name"], data["_id"]))
- self.add_site_to_representation(data["project_name"],
- data["_id"],
- data["site_name"])
- self.log.debug("Site added.")
+ family = context["representation"]["context"]["family"]
+ project_name = data["project_name"]
+ repre_id = data["_id"]
+ site_name = data["site_name"]
- @staticmethod
- def add_site_to_representation(project_name, representation_id, site_name):
- """Adds new site to representation_id, resets if exists"""
- manager = ModulesManager()
- sync_server = manager.modules_by_name["sync_server"]
- sync_server.add_site(project_name, representation_id, site_name,
- force=True)
+ self.sync_server.add_site(project_name, repre_id, site_name,
+ force=True)
+
+ if family == "workfile":
+ links = get_linked_ids_for_representations(project_name,
+ [repre_id],
+ link_type="reference")
+ for link_repre_id in links:
+ try:
+ self.sync_server.add_site(project_name, link_repre_id,
+ site_name,
+ force=False)
+ except SiteAlreadyPresentError:
+ # do not add/reset working site for references
+ self.log.debug("Site present", exc_info=True)
+
+ self.log.debug("Site added.")
def filepath_from_context(self, context):
"""No real file loading"""
diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py
index 2789f4ea23..c3e9e9fa0a 100644
--- a/openpype/plugins/load/delete_old_versions.py
+++ b/openpype/plugins/load/delete_old_versions.py
@@ -8,9 +8,8 @@ import ftrack_api
import qargparse
from Qt import QtWidgets, QtCore
-from avalon.api import AvalonMongoDB
from openpype import style
-from openpype.pipeline import load
+from openpype.pipeline import load, AvalonMongoDB
from openpype.lib import StringTemplate
from openpype.api import Anatomy
diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py
index 04080053e3..7df07e3f64 100644
--- a/openpype/plugins/load/delivery.py
+++ b/openpype/plugins/load/delivery.py
@@ -3,9 +3,7 @@ from collections import defaultdict
from Qt import QtWidgets, QtCore, QtGui
-from avalon.api import AvalonMongoDB
-
-from openpype.pipeline import load
+from openpype.pipeline import load, AvalonMongoDB
from openpype.api import Anatomy, config
from openpype import resources, style
diff --git a/openpype/plugins/load/remove_site.py b/openpype/plugins/load/remove_site.py
index adffec9986..c5f442b2f5 100644
--- a/openpype/plugins/load/remove_site.py
+++ b/openpype/plugins/load/remove_site.py
@@ -12,22 +12,26 @@ class RemoveSyncSite(load.LoaderPlugin):
icon = "download"
color = "#999999"
+ _sync_server = None
+ is_remove_site_loader = True
+
+ @property
+ def sync_server(self):
+ if not self._sync_server:
+ manager = ModulesManager()
+ self._sync_server = manager.modules_by_name["sync_server"]
+
+ return self._sync_server
+
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Removing {} on representation: {}".format(
data["site_name"], data["_id"]))
- self.remove_site_on_representation(data["project_name"],
- data["_id"],
- data["site_name"])
+ self.sync_server.remove_site(data["project_name"],
+ data["_id"],
+ data["site_name"],
+ True)
self.log.debug("Site added.")
- @staticmethod
- def remove_site_on_representation(project_name, representation_id,
- site_name):
- manager = ModulesManager()
- sync_server = manager.modules_by_name["sync_server"]
- sync_server.remove_site(project_name, representation_id,
- site_name, True)
-
def filepath_from_context(self, context):
"""No real file loading"""
return ""
diff --git a/openpype/plugins/publish/cleanup_farm.py b/openpype/plugins/publish/cleanup_farm.py
index ab0c6e469e..2c6c1625bb 100644
--- a/openpype/plugins/publish/cleanup_farm.py
+++ b/openpype/plugins/publish/cleanup_farm.py
@@ -3,7 +3,8 @@
import os
import shutil
import pyblish.api
-import avalon.api
+
+from openpype.pipeline import legacy_io
class CleanUpFarm(pyblish.api.ContextPlugin):
@@ -22,7 +23,7 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
def process(self, context):
# Get source host from which farm publishing was started
- src_host_name = avalon.api.Session.get("AVALON_APP")
+ src_host_name = legacy_io.Session.get("AVALON_APP")
self.log.debug("Host name from session is {}".format(src_host_name))
# Skip process if is not in list of source hosts in which this
# plugin should run
diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py
index bd8d9e50c4..0794adfb67 100644
--- a/openpype/plugins/publish/collect_anatomy_context_data.py
+++ b/openpype/plugins/publish/collect_anatomy_context_data.py
@@ -13,11 +13,12 @@ Provides:
"""
import json
+import pyblish.api
+
from openpype.lib import (
get_system_general_anatomy_data
)
-from avalon import api
-import pyblish.api
+from openpype.pipeline import legacy_io
class CollectAnatomyContextData(pyblish.api.ContextPlugin):
@@ -65,7 +66,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
asset_entity = context.data.get("assetEntity")
if asset_entity:
- task_name = api.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
asset_tasks = asset_entity["data"]["tasks"]
task_type = asset_tasks.get(task_name, {}).get("type")
diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py
index 42836e796b..6a6ea170b5 100644
--- a/openpype/plugins/publish/collect_anatomy_instance_data.py
+++ b/openpype/plugins/publish/collect_anatomy_instance_data.py
@@ -25,9 +25,10 @@ import copy
import json
import collections
-from avalon import io
import pyblish.api
+from openpype.pipeline import legacy_io
+
class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
"""Collect Instance specific Anatomy data.
@@ -83,7 +84,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
self.log.debug("Querying asset documents with names: {}".format(
", ".join(["\"{}\"".format(name) for name in asset_names])
))
- asset_docs = io.find({
+ asset_docs = legacy_io.find({
"type": "asset",
"name": {"$in": asset_names}
})
@@ -153,7 +154,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
subset_docs = []
if subset_filters:
- subset_docs = list(io.find({
+ subset_docs = list(legacy_io.find({
"type": "subset",
"$or": subset_filters
}))
@@ -202,7 +203,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
]
last_version_by_subset_id = {}
- for doc in io.aggregate(_pipeline):
+ for doc in legacy_io.aggregate(_pipeline):
subset_id = doc["_id"]
last_version_by_subset_id[subset_id] = doc["name"]
diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py
index c099a2cf75..3e7843407f 100644
--- a/openpype/plugins/publish/collect_avalon_entities.py
+++ b/openpype/plugins/publish/collect_avalon_entities.py
@@ -8,9 +8,10 @@ Provides:
context -> assetEntity - asset entity from database
"""
-from avalon import io, api
import pyblish.api
+from openpype.pipeline import legacy_io
+
class CollectAvalonEntities(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
@@ -19,12 +20,12 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
label = "Collect Avalon Entities"
def process(self, context):
- io.install()
- project_name = api.Session["AVALON_PROJECT"]
- asset_name = api.Session["AVALON_ASSET"]
- task_name = api.Session["AVALON_TASK"]
+ legacy_io.install()
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
- project_entity = io.find_one({
+ project_entity = legacy_io.find_one({
"type": "project",
"name": project_name
})
@@ -38,7 +39,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
if not asset_name:
self.log.info("Context is not set. Can't collect global data.")
return
- asset_entity = io.find_one({
+ asset_entity = legacy_io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py
index 16e3f669c3..f6ead98809 100644
--- a/openpype/plugins/publish/collect_from_create_context.py
+++ b/openpype/plugins/publish/collect_from_create_context.py
@@ -3,7 +3,8 @@
"""
import os
import pyblish.api
-import avalon.api
+
+from openpype.pipeline import legacy_io
class CollectFromCreateContext(pyblish.api.ContextPlugin):
@@ -25,12 +26,12 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
# Update global data to context
context.data.update(create_context.context_data_to_store())
-
+ context.data["newPublishing"] = True
# Update context data
for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"):
value = create_context.dbcon.Session.get(key)
if value is not None:
- avalon.api.Session[key] = value
+ legacy_io.Session[key] = value
os.environ[key] = value
def create_instance(self, context, in_data):
diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py
index efb40407d9..4e94acce4a 100644
--- a/openpype/plugins/publish/collect_hierarchy.py
+++ b/openpype/plugins/publish/collect_hierarchy.py
@@ -1,5 +1,6 @@
import pyblish.api
-import avalon.api as avalon
+
+from openpype.pipeline import legacy_io
class CollectHierarchy(pyblish.api.ContextPlugin):
@@ -19,7 +20,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
def process(self, context):
temp_context = {}
- project_name = avalon.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
final_context = {}
final_context[project_name] = {}
final_context[project_name]['entity_type'] = 'Project'
diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py
index 1005c38b9d..670e57ed10 100644
--- a/openpype/plugins/publish/collect_rendered_files.py
+++ b/openpype/plugins/publish/collect_rendered_files.py
@@ -11,7 +11,8 @@ import os
import json
import pyblish.api
-from avalon import api
+
+from openpype.pipeline import legacy_io
class CollectRenderedFiles(pyblish.api.ContextPlugin):
@@ -150,7 +151,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
session_data["AVALON_WORKDIR"] = remapped
self.log.info("Setting session using data from file")
- api.Session.update(session_data)
+ legacy_io.Session.update(session_data)
os.environ.update(session_data)
session_is_set = True
self._process_path(data, anatomy)
diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py
index 1f509365c7..89df031fb0 100644
--- a/openpype/plugins/publish/collect_resources_path.py
+++ b/openpype/plugins/publish/collect_resources_path.py
@@ -12,7 +12,8 @@ import os
import copy
import pyblish.api
-from avalon import api
+
+from openpype.pipeline import legacy_io
class CollectResourcesPath(pyblish.api.InstancePlugin):
@@ -84,7 +85,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
else:
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
- project_name = api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py
index e54592abb8..f2ade1ac28 100644
--- a/openpype/plugins/publish/collect_scene_loaded_versions.py
+++ b/openpype/plugins/publish/collect_scene_loaded_versions.py
@@ -1,8 +1,11 @@
from bson.objectid import ObjectId
import pyblish.api
-from avalon import io
-from openpype.pipeline import registered_host
+
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
@@ -40,7 +43,10 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
_repr_ids = [ObjectId(c["representation"]) for c in _containers]
version_by_repr = {
str(doc["_id"]): doc["parent"] for doc in
- io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1})
+ legacy_io.find(
+ {"_id": {"$in": _repr_ids}},
+ projection={"parent": 1}
+ )
}
for con in _containers:
diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py
index 41c84103a6..544c763b52 100644
--- a/openpype/plugins/publish/extract_burnin.py
+++ b/openpype/plugins/publish/extract_burnin.py
@@ -16,7 +16,7 @@ from openpype.lib import (
run_openpype_process,
get_transcode_temp_directory,
- convert_for_ffmpeg,
+ convert_input_paths_for_ffmpeg,
should_convert_for_ffmpeg,
CREATE_NO_WINDOW
@@ -187,8 +187,13 @@ class ExtractBurnin(openpype.api.Extractor):
repre_files = repre["files"]
if isinstance(repre_files, (tuple, list)):
filename = repre_files[0]
+ src_filepaths = [
+ os.path.join(src_repre_staging_dir, filename)
+ for filename in repre_files
+ ]
else:
filename = repre_files
+ src_filepaths = [os.path.join(src_repre_staging_dir, filename)]
first_input_path = os.path.join(src_repre_staging_dir, filename)
# Determine if representation requires pre conversion for ffmpeg
@@ -209,11 +214,9 @@ class ExtractBurnin(openpype.api.Extractor):
new_staging_dir = get_transcode_temp_directory()
repre["stagingDir"] = new_staging_dir
- convert_for_ffmpeg(
- first_input_path,
+ convert_input_paths_for_ffmpeg(
+ src_filepaths,
new_staging_dir,
- _temp_data["frameStart"],
- _temp_data["frameEnd"],
self.log
)
diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py
index b062a9c4b5..2f528d4469 100644
--- a/openpype/plugins/publish/extract_hierarchy_avalon.py
+++ b/openpype/plugins/publish/extract_hierarchy_avalon.py
@@ -1,7 +1,10 @@
-import pyblish.api
-from avalon import io
from copy import deepcopy
+import pyblish.api
+
+from openpype.pipeline import legacy_io
+
+
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
"""Create entities in Avalon based on collected data."""
@@ -16,8 +19,8 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
return
hierarchy_context = deepcopy(context.data["hierarchyContext"])
- if not io.Session:
- io.install()
+ if not legacy_io.Session:
+ legacy_io.install()
active_assets = []
# filter only the active publishing insatnces
@@ -78,7 +81,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
update_data = True
# Process project
if entity_type.lower() == "project":
- entity = io.find_one({"type": "project"})
+ entity = legacy_io.find_one({"type": "project"})
# TODO: should be in validator?
assert (entity is not None), "Did not find project in DB"
@@ -95,7 +98,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
)
# Else process assset
else:
- entity = io.find_one({"type": "asset", "name": name})
+ entity = legacy_io.find_one({"type": "asset", "name": name})
if entity:
# Do not override data, only update
cur_entity_data = entity.get("data") or {}
@@ -119,7 +122,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
# Skip updating data
update_data = False
- archived_entities = io.find({
+ archived_entities = legacy_io.find({
"type": "archived_asset",
"name": name
})
@@ -143,7 +146,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if update_data:
# Update entity data with input data
- io.update_many(
+ legacy_io.update_many(
{"_id": entity["_id"]},
{"$set": {"data": data}}
)
@@ -161,7 +164,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
"type": "asset",
"data": data
}
- io.replace_one(
+ legacy_io.replace_one(
{"_id": entity["_id"]},
new_entity
)
@@ -176,9 +179,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
"data": data
}
self.log.debug("Creating asset: {}".format(item))
- entity_id = io.insert_one(item).inserted_id
+ entity_id = legacy_io.insert_one(item).inserted_id
- return io.find_one({"_id": entity_id})
+ return legacy_io.find_one({"_id": entity_id})
def _get_assets(self, input_dict):
""" Returns only asset dictionary.
diff --git a/openpype/plugins/publish/extract_jpeg_exr.py b/openpype/plugins/publish/extract_jpeg_exr.py
index 468ed96199..d6d6854092 100644
--- a/openpype/plugins/publish/extract_jpeg_exr.py
+++ b/openpype/plugins/publish/extract_jpeg_exr.py
@@ -8,7 +8,7 @@ from openpype.lib import (
path_to_subprocess_arg,
get_transcode_temp_directory,
- convert_for_ffmpeg,
+ convert_input_paths_for_ffmpeg,
should_convert_for_ffmpeg
)
@@ -79,11 +79,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
if do_convert:
convert_dir = get_transcode_temp_directory()
filename = os.path.basename(full_input_path)
- convert_for_ffmpeg(
- full_input_path,
+ convert_input_paths_for_ffmpeg(
+ [full_input_path],
convert_dir,
- None,
- None,
self.log
)
full_input_path = os.path.join(convert_dir, filename)
diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py
index d569d82762..f2473839d9 100644
--- a/openpype/plugins/publish/extract_review.py
+++ b/openpype/plugins/publish/extract_review.py
@@ -18,7 +18,7 @@ from openpype.lib import (
path_to_subprocess_arg,
should_convert_for_ffmpeg,
- convert_for_ffmpeg,
+ convert_input_paths_for_ffmpeg,
get_transcode_temp_directory
)
import speedcopy
@@ -194,16 +194,20 @@ class ExtractReview(pyblish.api.InstancePlugin):
src_repre_staging_dir = repre["stagingDir"]
# Receive filepath to first file in representation
first_input_path = None
+ input_filepaths = []
if not self.input_is_sequence(repre):
first_input_path = os.path.join(
src_repre_staging_dir, repre["files"]
)
+ input_filepaths.append(first_input_path)
else:
for filename in repre["files"]:
- first_input_path = os.path.join(
+ filepath = os.path.join(
src_repre_staging_dir, filename
)
- break
+ input_filepaths.append(filepath)
+ if first_input_path is None:
+ first_input_path = filepath
# Skip if file is not set
if first_input_path is None:
@@ -230,13 +234,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
new_staging_dir = get_transcode_temp_directory()
repre["stagingDir"] = new_staging_dir
- frame_start = instance.data["frameStart"]
- frame_end = instance.data["frameEnd"]
- convert_for_ffmpeg(
- first_input_path,
+ convert_input_paths_for_ffmpeg(
+ input_filepaths,
new_staging_dir,
- frame_start,
- frame_end,
self.log
)
diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py
index ded149bdd0..a706b653c4 100644
--- a/openpype/plugins/publish/integrate_hero_version.py
+++ b/openpype/plugins/publish/integrate_hero_version.py
@@ -8,11 +8,14 @@ from bson.objectid import ObjectId
from pymongo import InsertOne, ReplaceOne
import pyblish.api
-from avalon import api, io, schema
from openpype.lib import (
create_hard_link,
filter_profiles
)
+from openpype.pipeline import (
+ schema,
+ legacy_io,
+)
class IntegrateHeroVersion(pyblish.api.InstancePlugin):
@@ -62,7 +65,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
template_key = self._get_template_key(instance)
anatomy = instance.context.data["anatomy"]
- project_name = api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
if template_key not in anatomy.templates:
self.log.warning((
"!!! Anatomy of project \"{}\" does not have set"
@@ -220,7 +223,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
if old_repres_by_name:
old_repres_to_delete = old_repres_by_name
- archived_repres = list(io.find({
+ archived_repres = list(legacy_io.find({
# Check what is type of archived representation
"type": "archived_repsentation",
"parent": new_version_id
@@ -441,7 +444,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
)
if bulk_writes:
- io._database[io.Session["AVALON_PROJECT"]].bulk_write(
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ legacy_io.database[project_name].bulk_write(
bulk_writes
)
@@ -503,7 +507,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
anatomy_filled = anatomy.format(template_data)
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
- project_name = api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
@@ -584,12 +588,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
def version_from_representations(self, repres):
for repre in repres:
- version = io.find_one({"_id": repre["parent"]})
+ version = legacy_io.find_one({"_id": repre["parent"]})
if version:
return version
def current_hero_ents(self, version):
- hero_version = io.find_one({
+ hero_version = legacy_io.find_one({
"parent": version["parent"],
"type": "hero_version"
})
@@ -597,7 +601,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
if not hero_version:
return (None, [])
- hero_repres = list(io.find({
+ hero_repres = list(legacy_io.find({
"parent": hero_version["_id"],
"type": "representation"
}))
diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py
index 11cffc4638..6964f2d938 100644
--- a/openpype/plugins/publish/integrate_inputlinks.py
+++ b/openpype/plugins/publish/integrate_inputlinks.py
@@ -3,7 +3,7 @@ from collections import OrderedDict
from bson.objectid import ObjectId
import pyblish.api
-from avalon import io
+from openpype.pipeline import legacy_io
class IntegrateInputLinks(pyblish.api.ContextPlugin):
@@ -129,5 +129,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin):
if input_links is None:
continue
- io.update_one({"_id": version_doc["_id"]},
- {"$set": {"data.inputLinks": input_links}})
+ legacy_io.update_one(
+ {"_id": version_doc["_id"]},
+ {"$set": {"data.inputLinks": input_links}}
+ )
diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py
index 5dcbb8fabd..bf13a4050e 100644
--- a/openpype/plugins/publish/integrate_new.py
+++ b/openpype/plugins/publish/integrate_new.py
@@ -9,14 +9,13 @@ import six
import re
import shutil
from collections import deque, defaultdict
+from datetime import datetime
from bson.objectid import ObjectId
from pymongo import DeleteOne, InsertOne
import pyblish.api
-from avalon import io
+
import openpype.api
-from datetime import datetime
-# from pype.modules import ModulesManager
from openpype.lib.profiles_filtering import filter_profiles
from openpype.lib import (
prepare_template_data,
@@ -24,6 +23,7 @@ from openpype.lib import (
StringTemplate,
TemplateUnsolved
)
+from openpype.pipeline import legacy_io
# this is needed until speedcopy for linux is fixed
if sys.platform == "win32":
@@ -113,7 +113,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"usdOverride",
"simpleUnrealTexture"
]
- exclude_families = ["clip"]
+ exclude_families = ["clip", "render.farm"]
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "task", "username"
@@ -131,11 +131,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset_grouping_profiles = None
def process(self, instance):
- self.integrated_file_sizes = {}
- if [ef for ef in self.exclude_families
- if instance.data["family"] in ef]:
- return
+ for ef in self.exclude_families:
+ if (
+ instance.data["family"] == ef or
+ ef in instance.data["families"]):
+ self.log.debug("Excluded family '{}' in '{}' or {}".format(
+ ef, instance.data["family"], instance.data["families"]))
+ return
+ self.integrated_file_sizes = {}
try:
self.register(instance)
self.log.info("Integrated Asset in to the database ...")
@@ -152,7 +156,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# Required environment variables
anatomy_data = instance.data["anatomyData"]
- io.install()
+ legacy_io.install()
context = instance.context
@@ -166,7 +170,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
asset_name = instance.data["asset"]
asset_entity = instance.data.get("assetEntity")
if not asset_entity or asset_entity["name"] != context_asset_name:
- asset_entity = io.find_one({
+ asset_entity = legacy_io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
@@ -228,7 +232,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# Ensure at least one file is set up for transfer in staging dir.
repres = instance.data.get("representations")
- assert repres, "Instance has no files to transfer"
+ repres = instance.data.get("representations")
+ msg = "Instance {} has no files to transfer".format(
+ instance.data["family"])
+ assert repres, msg
assert isinstance(repres, (list, tuple)), (
"Instance 'files' must be a list, got: {0} {1}".format(
str(type(repres)), str(repres)
@@ -259,14 +266,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
new_repre_names_low = [_repre["name"].lower() for _repre in repres]
- existing_version = io.find_one({
+ existing_version = legacy_io.find_one({
'type': 'version',
'parent': subset["_id"],
'name': version_number
})
if existing_version is None:
- version_id = io.insert_one(version).inserted_id
+ version_id = legacy_io.insert_one(version).inserted_id
else:
# Check if instance have set `append` mode which cause that
# only replicated representations are set to archive
@@ -274,7 +281,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# Update version data
# TODO query by _id and
- io.update_many({
+ legacy_io.update_many({
'type': 'version',
'parent': subset["_id"],
'name': version_number
@@ -284,7 +291,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
version_id = existing_version['_id']
# Find representations of existing version and archive them
- current_repres = list(io.find({
+ current_repres = list(legacy_io.find({
"type": "representation",
"parent": version_id
}))
@@ -307,14 +314,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# bulk updates
if bulk_writes:
- io._database[io.Session["AVALON_PROJECT"]].bulk_write(
+ project_name = legacy_io.Session["AVALON_PROJECT"]
+ legacy_io.database[project_name].bulk_write(
bulk_writes
)
- version = io.find_one({"_id": version_id})
+ version = legacy_io.find_one({"_id": version_id})
instance.data["versionEntity"] = version
- existing_repres = list(io.find({
+ existing_repres = list(legacy_io.find({
"parent": version_id,
"type": "archived_representation"
}))
@@ -654,12 +662,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
repre_ids_to_remove = []
for repre in existing_repres:
repre_ids_to_remove.append(repre["_id"])
- io.delete_many({"_id": {"$in": repre_ids_to_remove}})
+ legacy_io.delete_many({"_id": {"$in": repre_ids_to_remove}})
for rep in instance.data["representations"]:
self.log.debug("__ rep: {}".format(rep))
- io.insert_many(representations)
+ legacy_io.insert_many(representations)
instance.data["published_representations"] = (
published_representations
)
@@ -761,7 +769,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
def get_subset(self, asset, instance):
subset_name = instance.data["subset"]
- subset = io.find_one({
+ subset = legacy_io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": subset_name
@@ -782,7 +790,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if _family not in families:
families.append(_family)
- _id = io.insert_one({
+ _id = legacy_io.insert_one({
"schema": "openpype:subset-3.0",
"type": "subset",
"name": subset_name,
@@ -792,7 +800,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"parent": asset["_id"]
}).inserted_id
- subset = io.find_one({"_id": _id})
+ subset = legacy_io.find_one({"_id": _id})
# QUESTION Why is changing of group and updating it's
# families in 'get_subset'?
@@ -801,7 +809,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# Update families on subset.
families = [instance.data["family"]]
families.extend(instance.data.get("families", []))
- io.update_many(
+ legacy_io.update_many(
{"type": "subset", "_id": ObjectId(subset["_id"])},
{"$set": {"data.families": families}}
)
@@ -825,7 +833,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset_group = self._get_subset_group(instance)
if subset_group:
- io.update_many({
+ legacy_io.update_many({
'type': 'subset',
'_id': ObjectId(subset_id)
}, {'$set': {'data.subsetGroup': subset_group}})
diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py
index 28a93efb9a..5d6fc561ea 100644
--- a/openpype/plugins/publish/integrate_thumbnail.py
+++ b/openpype/plugins/publish/integrate_thumbnail.py
@@ -8,7 +8,7 @@ import six
import pyblish.api
from bson.objectid import ObjectId
-from avalon import api, io
+from openpype.pipeline import legacy_io
class IntegrateThumbnails(pyblish.api.InstancePlugin):
@@ -38,7 +38,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
)
return
- project_name = api.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
anatomy = instance.context.data["anatomy"]
if "publish" not in anatomy.templates:
@@ -66,11 +66,11 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
)
return
- io.install()
+ legacy_io.install()
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
- version = io.find_one({"_id": thumb_repre["parent"]})
+ version = legacy_io.find_one({"_id": thumb_repre["parent"]})
if not version:
raise AssertionError(
"There does not exist version with id {}".format(
@@ -137,12 +137,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
}
}
# Create thumbnail entity
- io.insert_one(thumbnail_entity)
+ legacy_io.insert_one(thumbnail_entity)
self.log.debug(
"Creating entity in database {}".format(str(thumbnail_entity))
)
# Set thumbnail id for version
- io.update_many(
+ legacy_io.update_many(
{"_id": version["_id"]},
{"$set": {"data.thumbnail_id": thumbnail_id}}
)
@@ -151,7 +151,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
))
asset_entity = instance.data["assetEntity"]
- io.update_many(
+ legacy_io.update_many(
{"_id": asset_entity["_id"]},
{"$set": {"data.thumbnail_id": thumbnail_id}}
)
diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py
index 4a65f3c64a..f9cdaebf0c 100644
--- a/openpype/plugins/publish/validate_editorial_asset_name.py
+++ b/openpype/plugins/publish/validate_editorial_asset_name.py
@@ -1,7 +1,9 @@
-import pyblish.api
-from avalon import io
from pprint import pformat
+import pyblish.api
+
+from openpype.pipeline import legacy_io
+
class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
""" Validating if editorial's asset names are not already created in db.
@@ -24,10 +26,10 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
asset_and_parents = self.get_parents(context)
self.log.debug("__ asset_and_parents: {}".format(asset_and_parents))
- if not io.Session:
- io.install()
+ if not legacy_io.Session:
+ legacy_io.install()
- db_assets = list(io.find(
+ db_assets = list(legacy_io.find(
{"type": "asset"}, {"name": 1, "data.parents": 1}))
self.log.debug("__ db_assets: {}".format(db_assets))
diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py
index e0c8847040..d945a1f697 100644
--- a/openpype/pype_commands.py
+++ b/openpype/pype_commands.py
@@ -25,7 +25,7 @@ class PypeCommands:
Most of its methods are called by :mod:`cli` module.
"""
@staticmethod
- def launch_tray(debug=False):
+ def launch_tray():
PypeLogger.set_process_name("Tray")
from openpype.tools import tray
@@ -125,13 +125,14 @@ class PypeCommands:
if not any(paths):
raise RuntimeError("No publish paths specified")
- env = get_app_environments_for_context(
- os.environ["AVALON_PROJECT"],
- os.environ["AVALON_ASSET"],
- os.environ["AVALON_TASK"],
- os.environ["AVALON_APP_NAME"]
- )
- os.environ.update(env)
+ if os.getenv("AVALON_APP_NAME"):
+ env = get_app_environments_for_context(
+ os.environ["AVALON_PROJECT"],
+ os.environ["AVALON_ASSET"],
+ os.environ["AVALON_TASK"],
+ os.environ["AVALON_APP_NAME"]
+ )
+ os.environ.update(env)
pyblish.api.register_host("shell")
diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py
index 3ba150902e..245fc665f0 100644
--- a/openpype/scripts/fusion_switch_shot.py
+++ b/openpype/scripts/fusion_switch_shot.py
@@ -4,7 +4,6 @@ import sys
import logging
# Pipeline imports
-from avalon import io
from openpype.hosts.fusion import api
import openpype.hosts.fusion.api.lib as fusion_lib
@@ -13,6 +12,7 @@ from openpype.lib import version_up
from openpype.pipeline import (
install_host,
registered_host,
+ legacy_io,
)
from openpype.lib.avalon_context import get_workdir_from_session
@@ -131,7 +131,7 @@ def update_frame_range(comp, representations):
"""
version_ids = [r["parent"] for r in representations]
- versions = io.find({"type": "version", "_id": {"$in": version_ids}})
+ versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}})
versions = list(versions)
start = min(v["data"]["frameStart"] for v in versions)
@@ -162,13 +162,13 @@ def switch(asset_name, filepath=None, new=True):
# Assert asset name exists
# It is better to do this here then to wait till switch_shot does it
- asset = io.find_one({"type": "asset", "name": asset_name})
+ asset = legacy_io.find_one({"type": "asset", "name": asset_name})
assert asset, "Could not find '%s' in the database" % asset_name
# Get current project
- self._project = io.find_one({
+ self._project = legacy_io.find_one({
"type": "project",
- "name": io.Session["AVALON_PROJECT"]
+ "name": legacy_io.Session["AVALON_PROJECT"]
})
# Go to comp
@@ -198,7 +198,7 @@ def switch(asset_name, filepath=None, new=True):
current_comp.Print(message)
# Build the session to switch to
- switch_to_session = io.Session.copy()
+ switch_to_session = legacy_io.Session.copy()
switch_to_session["AVALON_ASSET"] = asset['name']
if new:
diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json
index 1c86509155..7a3f49452e 100644
--- a/openpype/settings/defaults/project_anatomy/imageio.json
+++ b/openpype/settings/defaults/project_anatomy/imageio.json
@@ -185,8 +185,8 @@
"linux": []
},
"renderSpace": "ACEScg",
- "viewName": "ACES 1.0 SDR-video",
- "displayName": "sRGB"
+ "displayName": "sRGB",
+ "viewName": "ACES 1.0 SDR-video"
},
"colorManagementPreference": {
"configFilePath": {
diff --git a/openpype/settings/defaults/project_settings/aftereffects.json b/openpype/settings/defaults/project_settings/aftereffects.json
index 6a9a399069..8083aa0972 100644
--- a/openpype/settings/defaults/project_settings/aftereffects.json
+++ b/openpype/settings/defaults/project_settings/aftereffects.json
@@ -1,4 +1,11 @@
{
+ "create": {
+ "RenderCreator": {
+ "defaults": [
+ "Main"
+ ]
+ }
+ },
"publish": {
"ValidateSceneSettings": {
"enabled": true,
diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json
index deade08c0b..a846a596c2 100644
--- a/openpype/settings/defaults/project_settings/ftrack.json
+++ b/openpype/settings/defaults/project_settings/ftrack.json
@@ -349,6 +349,18 @@
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
+ },
+ {
+ "hosts": [
+ "photoshop"
+ ],
+ "families": [
+ "review"
+ ],
+ "task_types": [],
+ "tasks": [],
+ "add_ftrack_family": true,
+ "advanced_filtering": []
}
]
},
diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json
index 58659d5d41..7317a3da1c 100644
--- a/openpype/settings/defaults/project_settings/global.json
+++ b/openpype/settings/defaults/project_settings/global.json
@@ -315,6 +315,7 @@
"workfile"
],
"hosts": [
+ "aftereffects",
"tvpaint"
],
"task_types": [],
diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json
index bc91a5ea8a..e36232d3f7 100644
--- a/openpype/settings/defaults/project_settings/standalonepublisher.json
+++ b/openpype/settings/defaults/project_settings/standalonepublisher.json
@@ -141,6 +141,14 @@
"defaults": [],
"help": "Texture files with Unreal naming convention"
},
+ "create_vdb": {
+ "name": "vdb",
+ "label": "VDB Volumetric Data",
+ "family": "vdbcache",
+ "icon": "cloud",
+ "defaults": [],
+ "help": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids"
+ },
"__dynamic_keys_labels__": {
"create_workfile": "Workfile",
"create_model": "Model",
@@ -154,7 +162,8 @@
"create_render": "Render",
"create_mov_batch": "Batch Mov",
"create_texture_batch": "Batch Texture",
- "create_simple_unreal_texture": "Simple Unreal Texture"
+ "create_simple_unreal_texture": "Simple Unreal Texture",
+ "create_vdb": "VDB Cache"
}
},
"publish": {
diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json
index e1785f8709..a06947ba77 100644
--- a/openpype/settings/defaults/system_settings/general.json
+++ b/openpype/settings/defaults/system_settings/general.json
@@ -7,6 +7,7 @@
"global": []
}
},
+ "log_to_server": true,
"disk_mapping": {
"windows": [],
"linux": [],
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json
index 4c4cd225ab..1a3eaef540 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json
@@ -5,6 +5,29 @@
"label": "AfterEffects",
"is_file": true,
"children": [
+ {
+ "type": "dict",
+ "collapsible": true,
+ "key": "create",
+ "label": "Creator plugins",
+ "children": [
+ {
+ "type": "dict",
+ "collapsible": true,
+ "key": "RenderCreator",
+ "label": "Create render",
+ "children": [
+ {
+ "type": "list",
+ "key": "defaults",
+ "label": "Default Variants",
+ "object_type": "text",
+ "docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation."
+ }
+ ]
+ }
+ ]
+ },
{
"type": "dict",
"collapsible": true,
diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json
index 695ab8bceb..5b6d8d5d62 100644
--- a/openpype/settings/entities/schemas/system_schema/schema_general.json
+++ b/openpype/settings/entities/schemas/system_schema/schema_general.json
@@ -39,6 +39,11 @@
{
"type": "splitter"
},
+ {
+ "type": "boolean",
+ "key": "log_to_server",
+ "label": "Log to mongo"
+ },
{
"type": "dict",
"key": "disk_mapping",
diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py
index 2109b53b09..c99fc6080b 100644
--- a/openpype/settings/handlers.py
+++ b/openpype/settings/handlers.py
@@ -324,6 +324,7 @@ class MongoSettingsHandler(SettingsHandler):
global_general_keys = (
"openpype_path",
"admin_password",
+ "log_to_server",
"disk_mapping",
"production_version",
"staging_version"
@@ -337,7 +338,7 @@ class MongoSettingsHandler(SettingsHandler):
def __init__(self):
# Get mongo connection
from openpype.lib import OpenPypeMongoConnection
- from avalon.api import AvalonMongoDB
+ from openpype.pipeline import AvalonMongoDB
settings_collection = OpenPypeMongoConnection.get_mongo_client()
diff --git a/openpype/tools/adobe_webserver/app.py b/openpype/tools/adobe_webserver/app.py
index b79d6c6c60..3911baf7ac 100644
--- a/openpype/tools/adobe_webserver/app.py
+++ b/openpype/tools/adobe_webserver/app.py
@@ -16,7 +16,7 @@ from wsrpc_aiohttp import (
WSRPCClient
)
-from avalon import api
+from openpype.pipeline import legacy_io
log = logging.getLogger(__name__)
@@ -80,9 +80,9 @@ class WebServerTool:
loop=asyncio.get_event_loop())
await client.connect()
- project = api.Session["AVALON_PROJECT"]
- asset = api.Session["AVALON_ASSET"]
- task = api.Session["AVALON_TASK"]
+ project = legacy_io.Session["AVALON_PROJECT"]
+ asset = legacy_io.Session["AVALON_ASSET"]
+ task = legacy_io.Session["AVALON_TASK"]
log.info("Sending context change to {}-{}-{}".format(project,
asset,
task))
diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py
index 9e030853bf..3b544bd375 100644
--- a/openpype/tools/context_dialog/window.py
+++ b/openpype/tools/context_dialog/window.py
@@ -2,9 +2,9 @@ import os
import json
from Qt import QtWidgets, QtCore, QtGui
-from avalon.api import AvalonMongoDB
from openpype import style
+from openpype.pipeline import AvalonMongoDB
from openpype.tools.utils.lib import center_window
from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget
from openpype.tools.utils.constants import (
diff --git a/openpype/tools/creator/window.py b/openpype/tools/creator/window.py
index 51cc66e715..e0c329fb78 100644
--- a/openpype/tools/creator/window.py
+++ b/openpype/tools/creator/window.py
@@ -4,16 +4,14 @@ import re
from Qt import QtWidgets, QtCore
-from avalon import api, io
-
from openpype import style
from openpype.api import get_current_project_settings
from openpype.tools.utils.lib import qt_app_context
+from openpype.pipeline import legacy_io
from openpype.pipeline.create import (
SUBSET_NAME_ALLOWED_SYMBOLS,
legacy_create,
CreatorError,
- LegacyCreator,
)
from .model import CreatorsModel
@@ -220,7 +218,7 @@ class CreatorWindow(QtWidgets.QDialog):
asset_doc = None
if creator_plugin:
# Get the asset from the database which match with the name
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"name": asset_name, "type": "asset"},
projection={"_id": 1}
)
@@ -237,9 +235,9 @@ class CreatorWindow(QtWidgets.QDialog):
self._set_valid_state(False)
return
- project_name = io.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
asset_id = asset_doc["_id"]
- task_name = io.Session["AVALON_TASK"]
+ task_name = legacy_io.Session["AVALON_TASK"]
# Calculate subset name with Creator plugin
subset_name = creator_plugin.get_subset_name(
@@ -271,7 +269,7 @@ class CreatorWindow(QtWidgets.QDialog):
self._subset_name_input.setText(subset_name)
# Get all subsets of the current asset
- subset_docs = io.find(
+ subset_docs = legacy_io.find(
{
"type": "subset",
"parent": asset_id
@@ -372,7 +370,7 @@ class CreatorWindow(QtWidgets.QDialog):
self.setStyleSheet(style.load_stylesheet())
def refresh(self):
- self._asset_name_input.setText(io.Session["AVALON_ASSET"])
+ self._asset_name_input.setText(legacy_io.Session["AVALON_ASSET"])
self._creators_model.reset()
@@ -385,7 +383,7 @@ class CreatorWindow(QtWidgets.QDialog):
)
current_index = None
family = None
- task_name = io.Session.get("AVALON_TASK", None)
+ task_name = legacy_io.Session.get("AVALON_TASK", None)
lowered_task_name = task_name.lower()
if task_name:
for _family, _task_names in pype_project_setting.items():
@@ -471,7 +469,7 @@ class CreatorWindow(QtWidgets.QDialog):
self._msg_timer.start()
-def show(debug=False, parent=None):
+def show(parent=None):
"""Display asset creator GUI
Arguments:
@@ -488,24 +486,6 @@ def show(debug=False, parent=None):
except (AttributeError, RuntimeError):
pass
- if debug:
- from avalon import mock
- for creator in mock.creators:
- api.register_plugin(LegacyCreator, creator)
-
- import traceback
- sys.excepthook = lambda typ, val, tb: traceback.print_last()
-
- io.install()
-
- any_project = next(
- project for project in io.projects()
- if project.get("active", True) is not False
- )
-
- api.Session["AVALON_PROJECT"] = any_project["name"]
- module.project = any_project["name"]
-
with qt_app_context():
window = CreatorWindow(parent)
window.refresh()
diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py
index d80b3eabf0..dab6949613 100644
--- a/openpype/tools/launcher/window.py
+++ b/openpype/tools/launcher/window.py
@@ -3,10 +3,9 @@ import logging
from Qt import QtWidgets, QtCore, QtGui
-from avalon.api import AvalonMongoDB
-
from openpype import style
from openpype.api import resources
+from openpype.pipeline import AvalonMongoDB
import qtawesome
from .models import (
diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py
index 328e16205c..7fda6bd6f9 100644
--- a/openpype/tools/libraryloader/app.py
+++ b/openpype/tools/libraryloader/app.py
@@ -2,8 +2,8 @@ import sys
from Qt import QtWidgets, QtCore, QtGui
-from avalon.api import AvalonMongoDB
from openpype import style
+from openpype.pipeline import AvalonMongoDB
from openpype.tools.utils import lib as tools_lib
from openpype.tools.loader.widgets import (
ThumbnailWidget,
diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py
index 146ba7fd10..acf357aa97 100644
--- a/openpype/tools/loader/__main__.py
+++ b/openpype/tools/loader/__main__.py
@@ -19,12 +19,10 @@ def my_exception_hook(exctype, value, traceback):
if __name__ == '__main__':
- os.environ["AVALON_MONGO"] = "mongodb://localhost:27017"
os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017"
os.environ["AVALON_DB"] = "avalon"
os.environ["AVALON_TIMEOUT"] = "1000"
os.environ["OPENPYPE_DEBUG"] = "1"
- os.environ["AVALON_CONFIG"] = "pype"
os.environ["AVALON_ASSET"] = "Jungle"
# Set the exception hook to our wrapping function
diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py
index fad284d82b..bb589c199d 100644
--- a/openpype/tools/loader/app.py
+++ b/openpype/tools/loader/app.py
@@ -1,11 +1,14 @@
import sys
+import traceback
from Qt import QtWidgets, QtCore
-from avalon import api, io
from openpype import style
from openpype.lib import register_event_callback
-from openpype.pipeline import install_openpype_plugins
+from openpype.pipeline import (
+ install_openpype_plugins,
+ legacy_io,
+)
from openpype.tools.utils import (
lib,
PlaceholderLineEdit
@@ -36,14 +39,14 @@ class LoaderWindow(QtWidgets.QDialog):
def __init__(self, parent=None):
super(LoaderWindow, self).__init__(parent)
title = "Asset Loader 2.1"
- project_name = api.Session.get("AVALON_PROJECT")
+ project_name = legacy_io.Session.get("AVALON_PROJECT")
if project_name:
title += " - {}".format(project_name)
self.setWindowTitle(title)
# Groups config
- self.groups_config = lib.GroupsConfig(io)
- self.family_config_cache = lib.FamilyConfigCache(io)
+ self.groups_config = lib.GroupsConfig(legacy_io)
+ self.family_config_cache = lib.FamilyConfigCache(legacy_io)
# Enable minimize and maximize for app
window_flags = QtCore.Qt.Window
@@ -60,13 +63,13 @@ class LoaderWindow(QtWidgets.QDialog):
# Assets widget
assets_widget = MultiSelectAssetsWidget(
- io, parent=left_side_splitter
+ legacy_io, parent=left_side_splitter
)
assets_widget.set_current_asset_btn_visibility(True)
# Families widget
families_filter_view = FamilyListView(
- io, self.family_config_cache, left_side_splitter
+ legacy_io, self.family_config_cache, left_side_splitter
)
left_side_splitter.addWidget(assets_widget)
left_side_splitter.addWidget(families_filter_view)
@@ -76,7 +79,7 @@ class LoaderWindow(QtWidgets.QDialog):
# --- Middle part ---
# Subsets widget
subsets_widget = SubsetWidget(
- io,
+ legacy_io,
self.groups_config,
self.family_config_cache,
tool_name=self.tool_name,
@@ -87,8 +90,12 @@ class LoaderWindow(QtWidgets.QDialog):
thumb_ver_splitter = QtWidgets.QSplitter(main_splitter)
thumb_ver_splitter.setOrientation(QtCore.Qt.Vertical)
- thumbnail_widget = ThumbnailWidget(io, parent=thumb_ver_splitter)
- version_info_widget = VersionWidget(io, parent=thumb_ver_splitter)
+ thumbnail_widget = ThumbnailWidget(
+ legacy_io, parent=thumb_ver_splitter
+ )
+ version_info_widget = VersionWidget(
+ legacy_io, parent=thumb_ver_splitter
+ )
thumb_ver_splitter.addWidget(thumbnail_widget)
thumb_ver_splitter.addWidget(version_info_widget)
@@ -105,7 +112,7 @@ class LoaderWindow(QtWidgets.QDialog):
repres_widget = None
if sync_server_enabled:
repres_widget = RepresentationWidget(
- io, self.tool_name, parent=thumb_ver_splitter
+ legacy_io, self.tool_name, parent=thumb_ver_splitter
)
thumb_ver_splitter.addWidget(repres_widget)
@@ -259,13 +266,15 @@ class LoaderWindow(QtWidgets.QDialog):
# Refresh families config
self._families_filter_view.refresh()
# Change to context asset on context change
- self._assets_widget.select_asset_by_name(io.Session["AVALON_ASSET"])
+ self._assets_widget.select_asset_by_name(
+ legacy_io.Session["AVALON_ASSET"]
+ )
def _refresh(self):
"""Load assets from database"""
# Ensure a project is loaded
- project = io.find_one({"type": "project"}, {"type": 1})
+ project = legacy_io.find_one({"type": "project"}, {"type": 1})
assert project, "Project was not found! This is a bug"
self._assets_widget.refresh()
@@ -562,17 +571,16 @@ def show(debug=False, parent=None, use_context=False):
module.window = None
if debug:
- import traceback
sys.excepthook = lambda typ, val, tb: traceback.print_last()
- io.install()
+ legacy_io.install()
any_project = next(
- project for project in io.projects()
+ project for project in legacy_io.projects()
if project.get("active", True) is not False
)
- api.Session["AVALON_PROJECT"] = any_project["name"]
+ legacy_io.Session["AVALON_PROJECT"] = any_project["name"]
module.project = any_project["name"]
with lib.qt_app_context():
@@ -580,7 +588,7 @@ def show(debug=False, parent=None, use_context=False):
window.show()
if use_context:
- context = {"asset": api.Session["AVALON_ASSET"]}
+ context = {"asset": legacy_io.Session["AVALON_ASSET"]}
window.set_context(context, refresh=True)
else:
window.refresh()
@@ -604,10 +612,10 @@ def cli(args):
print("Entering Project: %s" % project)
- io.install()
+ legacy_io.install()
# Store settings
- api.Session["AVALON_PROJECT"] = project
+ legacy_io.Session["AVALON_PROJECT"] = project
install_openpype_plugins(project)
diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py
index 6cc6fae1fb..8cb8f30013 100644
--- a/openpype/tools/loader/model.py
+++ b/openpype/tools/loader/model.py
@@ -6,8 +6,10 @@ from uuid import uuid4
from Qt import QtCore, QtGui
import qtawesome
-from avalon import schema
-from openpype.pipeline import HeroVersionType
+from openpype.pipeline import (
+ HeroVersionType,
+ schema,
+)
from openpype.style import get_default_entity_icon_color
from openpype.tools.utils.models import TreeModel, Item
diff --git a/openpype/tools/mayalookassigner/app.py b/openpype/tools/mayalookassigner/app.py
index 0e633a21e3..1b6cad77a8 100644
--- a/openpype/tools/mayalookassigner/app.py
+++ b/openpype/tools/mayalookassigner/app.py
@@ -4,8 +4,8 @@ import logging
from Qt import QtWidgets, QtCore
-from avalon import io
from openpype import style
+from openpype.pipeline import legacy_io
from openpype.tools.utils.lib import qt_app_context
from openpype.hosts.maya.api.lib import assign_look_by_version
@@ -227,9 +227,13 @@ class MayaLookAssignerWindow(QtWidgets.QWidget):
continue
# Get the latest version of this asset's look subset
- version = io.find_one({"type": "version",
- "parent": assign_look["_id"]},
- sort=[("name", -1)])
+ version = legacy_io.find_one(
+ {
+ "type": "version",
+ "parent": assign_look["_id"]
+ },
+ sort=[("name", -1)]
+ )
subset_name = assign_look["name"]
self.echo("{} Assigning {} to {}\t".format(prefix,
diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py
index 8fd592d347..d41d8ca5a2 100644
--- a/openpype/tools/mayalookassigner/commands.py
+++ b/openpype/tools/mayalookassigner/commands.py
@@ -5,9 +5,8 @@ import os
from bson.objectid import ObjectId
import maya.cmds as cmds
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
remove_container,
registered_host,
)
@@ -161,8 +160,10 @@ def create_items_from_nodes(nodes):
return asset_view_items
for _id, id_nodes in id_hashes.items():
- asset = io.find_one({"_id": ObjectId(_id)},
- projection={"name": True})
+ asset = legacy_io.find_one(
+ {"_id": ObjectId(_id)},
+ projection={"name": True}
+ )
# Skip if asset id is not found
if not asset:
diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py
index c97664f3cb..3523b24bf3 100644
--- a/openpype/tools/mayalookassigner/vray_proxies.py
+++ b/openpype/tools/mayalookassigner/vray_proxies.py
@@ -11,9 +11,8 @@ from bson.objectid import ObjectId
import alembic.Abc
from maya import cmds
-from avalon import io
-
from openpype.pipeline import (
+ legacy_io,
load_container,
loaders_from_representation,
discover_loader_plugins,
@@ -158,9 +157,11 @@ def get_look_relationships(version_id):
dict: Dictionary of relations.
"""
- json_representation = io.find_one({"type": "representation",
- "parent": version_id,
- "name": "json"})
+ json_representation = legacy_io.find_one({
+ "type": "representation",
+ "parent": version_id,
+ "name": "json"
+ })
# Load relationships
shader_relation = get_representation_path(json_representation)
@@ -184,9 +185,11 @@ def load_look(version_id):
"""
# Get representations of shader file and relationships
- look_representation = io.find_one({"type": "representation",
- "parent": version_id,
- "name": "ma"})
+ look_representation = legacy_io.find_one({
+ "type": "representation",
+ "parent": version_id,
+ "name": "ma"
+ })
# See if representation is already loaded, if so reuse it.
host = registered_host()
@@ -232,15 +235,21 @@ def get_latest_version(asset_id, subset):
RuntimeError: When subset or version doesn't exist.
"""
- subset = io.find_one({"name": subset,
- "parent": ObjectId(asset_id),
- "type": "subset"})
+ subset = legacy_io.find_one({
+ "name": subset,
+ "parent": ObjectId(asset_id),
+ "type": "subset"
+ })
if not subset:
raise RuntimeError("Subset does not exist: %s" % subset)
- version = io.find_one({"type": "version",
- "parent": subset["_id"]},
- sort=[("name", -1)])
+ version = legacy_io.find_one(
+ {
+ "type": "version",
+ "parent": subset["_id"]
+ },
+ sort=[("name", -1)]
+ )
if not version:
raise RuntimeError("Version does not exist.")
diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py
index 1c3ec089f6..871704e13c 100644
--- a/openpype/tools/project_manager/project_manager/model.py
+++ b/openpype/tools/project_manager/project_manager/model.py
@@ -7,6 +7,11 @@ from pymongo import UpdateOne, DeleteOne
from Qt import QtCore, QtGui
+from openpype.lib import (
+ CURRENT_DOC_SCHEMAS,
+ PypeLogger,
+)
+
from .constants import (
IDENTIFIER_ROLE,
ITEM_TYPE_ROLE,
@@ -18,8 +23,6 @@ from .constants import (
)
from .style import ResourceCache
-from openpype.lib import CURRENT_DOC_SCHEMAS
-
class ProjectModel(QtGui.QStandardItemModel):
"""Load possible projects to modify from MongoDB.
@@ -185,6 +188,7 @@ class HierarchyModel(QtCore.QAbstractItemModel):
for key in self.multiselection_columns
}
+ self._log = None
# TODO Reset them on project change
self._current_project = None
self._root_item = None
@@ -194,6 +198,12 @@ class HierarchyModel(QtCore.QAbstractItemModel):
self._reset_root_item()
+ @property
+ def log(self):
+ if self._log is None:
+ self._log = PypeLogger.get_logger("ProjectManagerModel")
+ return self._log
+
@property
def items_by_id(self):
return self._items_by_id
@@ -1367,6 +1377,9 @@ class HierarchyModel(QtCore.QAbstractItemModel):
to_process = collections.deque()
to_process.append(project_item)
+ created_count = 0
+ updated_count = 0
+ removed_count = 0
bulk_writes = []
while to_process:
parent = to_process.popleft()
@@ -1381,6 +1394,7 @@ class HierarchyModel(QtCore.QAbstractItemModel):
insert_list.append(item)
elif item.data(REMOVED_ROLE):
+ removed_count += 1
if item.data(HIERARCHY_CHANGE_ABLE_ROLE):
bulk_writes.append(DeleteOne(
{"_id": item.asset_id}
@@ -1394,6 +1408,7 @@ class HierarchyModel(QtCore.QAbstractItemModel):
else:
update_data = item.update_data()
if update_data:
+ updated_count += 1
bulk_writes.append(UpdateOne(
{"_id": item.asset_id},
update_data
@@ -1406,11 +1421,21 @@ class HierarchyModel(QtCore.QAbstractItemModel):
result = project_col.insert_many(new_docs)
for idx, mongo_id in enumerate(result.inserted_ids):
+ created_count += 1
insert_list[idx].mongo_id = mongo_id
+ if sum([created_count, updated_count, removed_count]) == 0:
+ self.log.info("Nothing has changed")
+ return
+
if bulk_writes:
project_col.bulk_write(bulk_writes)
+ self.log.info((
+ "Save finished."
+ " Created {} | Updated {} | Removed {} asset documents"
+ ).format(created_count, updated_count, removed_count))
+
self.refresh_project()
def copy_mime_data(self, indexes):
@@ -1819,12 +1844,16 @@ class AssetItem(BaseItem):
}
query_projection = {
"_id": 1,
- "data.tasks": 1,
- "data.visualParent": 1,
- "schema": 1,
-
"name": 1,
+ "schema": 1,
"type": 1,
+ "parent": 1,
+
+ "data.visualParent": 1,
+ "data.parents": 1,
+
+ "data.tasks": 1,
+
"data.frameStart": 1,
"data.frameEnd": 1,
"data.fps": 1,
@@ -1835,7 +1864,7 @@ class AssetItem(BaseItem):
"data.clipIn": 1,
"data.clipOut": 1,
"data.pixelAspect": 1,
- "data.tools_env": 1
+ "data.tools_env": 1,
}
def __init__(self, asset_doc):
diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py
index 39ea833961..dc75b30bd7 100644
--- a/openpype/tools/project_manager/project_manager/widgets.py
+++ b/openpype/tools/project_manager/project_manager/widgets.py
@@ -10,11 +10,11 @@ from openpype.lib import (
PROJECT_NAME_REGEX
)
from openpype.style import load_stylesheet
+from openpype.pipeline import AvalonMongoDB
from openpype.tools.utils import (
PlaceholderLineEdit,
get_warning_pixmap
)
-from avalon.api import AvalonMongoDB
from Qt import QtWidgets, QtCore, QtGui
diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py
index bdf32c7415..c281479d4f 100644
--- a/openpype/tools/project_manager/project_manager/window.py
+++ b/openpype/tools/project_manager/project_manager/window.py
@@ -16,6 +16,7 @@ from .style import ResourceCache
from openpype.style import load_stylesheet
from openpype.lib import is_admin_password_required
from openpype.widgets import PasswordDialog
+from openpype.pipeline import AvalonMongoDB
from openpype import resources
from openpype.api import (
@@ -23,7 +24,6 @@ from openpype.api import (
create_project_folders,
Logger
)
-from avalon.api import AvalonMongoDB
class ProjectManagerWindow(QtWidgets.QWidget):
diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py
index f8fd8a911a..8d72020c98 100644
--- a/openpype/tools/sceneinventory/model.py
+++ b/openpype/tools/sceneinventory/model.py
@@ -7,8 +7,9 @@ from Qt import QtCore, QtGui
import qtawesome
from bson.objectid import ObjectId
-from avalon import io, schema
from openpype.pipeline import (
+ legacy_io,
+ schema,
HeroVersionType,
registered_host,
)
@@ -54,7 +55,7 @@ class InventoryModel(TreeModel):
if not self.sync_enabled:
return
- project_name = io.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
active_site = sync_server.get_active_site(project_name)
remote_site = sync_server.get_remote_site(project_name)
@@ -303,32 +304,32 @@ class InventoryModel(TreeModel):
for repre_id, group_dict in sorted(grouped.items()):
group_items = group_dict["items"]
# Get parenthood per group
- representation = io.find_one({"_id": ObjectId(repre_id)})
+ representation = legacy_io.find_one({"_id": ObjectId(repre_id)})
if not representation:
not_found["representation"].append(group_items)
not_found_ids.append(repre_id)
continue
- version = io.find_one({"_id": representation["parent"]})
+ version = legacy_io.find_one({"_id": representation["parent"]})
if not version:
not_found["version"].append(group_items)
not_found_ids.append(repre_id)
continue
elif version["type"] == "hero_version":
- _version = io.find_one({
+ _version = legacy_io.find_one({
"_id": version["version_id"]
})
version["name"] = HeroVersionType(_version["name"])
version["data"] = _version["data"]
- subset = io.find_one({"_id": version["parent"]})
+ subset = legacy_io.find_one({"_id": version["parent"]})
if not subset:
not_found["subset"].append(group_items)
not_found_ids.append(repre_id)
continue
- asset = io.find_one({"_id": subset["parent"]})
+ asset = legacy_io.find_one({"_id": subset["parent"]})
if not asset:
not_found["asset"].append(group_items)
not_found_ids.append(repre_id)
@@ -389,7 +390,7 @@ class InventoryModel(TreeModel):
# Store the highest available version so the model can know
# whether current version is currently up-to-date.
- highest_version = io.find_one({
+ highest_version = legacy_io.find_one({
"type": "version",
"parent": version["parent"]
}, sort=[("name", -1)])
diff --git a/openpype/tools/sceneinventory/switch_dialog.py b/openpype/tools/sceneinventory/switch_dialog.py
index bb3e2615ac..b2d770330f 100644
--- a/openpype/tools/sceneinventory/switch_dialog.py
+++ b/openpype/tools/sceneinventory/switch_dialog.py
@@ -4,7 +4,7 @@ from Qt import QtWidgets, QtCore
import qtawesome
from bson.objectid import ObjectId
-from avalon import io
+from openpype.pipeline import legacy_io
from openpype.pipeline.load import (
discover_loader_plugins,
switch_container,
@@ -151,7 +151,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
repre_ids.add(ObjectId(item["representation"]))
content_loaders.add(item["loader"])
- repres = list(io.find({
+ repres = list(legacy_io.find({
"type": {"$in": ["representation", "archived_representation"]},
"_id": {"$in": list(repre_ids)}
}))
@@ -179,7 +179,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
content_repres[repre_id] = repres_by_id[repre_id]
version_ids.append(repre["parent"])
- versions = io.find({
+ versions = legacy_io.find({
"type": {"$in": ["version", "hero_version"]},
"_id": {"$in": list(set(version_ids))}
})
@@ -198,7 +198,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
else:
subset_ids.append(content_versions[version_id]["parent"])
- subsets = io.find({
+ subsets = legacy_io.find({
"type": {"$in": ["subset", "archived_subset"]},
"_id": {"$in": subset_ids}
})
@@ -220,7 +220,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
asset_ids.append(subset["parent"])
content_subsets[subset_id] = subset
- assets = io.find({
+ assets = legacy_io.find({
"type": {"$in": ["asset", "archived_asset"]},
"_id": {"$in": list(asset_ids)}
})
@@ -472,7 +472,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# Prepare asset document if asset is selected
asset_doc = None
if selected_asset:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": True}
)
@@ -523,7 +523,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
def _get_current_output_repre_ids_xxx(
self, asset_doc, selected_subset, selected_repre
):
- subset_doc = io.find_one(
+ subset_doc = legacy_io.find_one(
{
"type": "subset",
"name": selected_subset,
@@ -537,7 +537,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
if not version_doc:
return []
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": version_doc["_id"],
@@ -548,7 +548,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return [repre_doc["_id"] for repre_doc in repre_docs]
def _get_current_output_repre_ids_xxo(self, asset_doc, selected_subset):
- subset_doc = io.find_one(
+ subset_doc = legacy_io.find_one(
{
"type": "subset",
"parent": asset_doc["_id"],
@@ -563,7 +563,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
for repre_doc in self.content_repres.values():
repre_names.add(repre_doc["name"])
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": subset_doc["_id"],
@@ -578,7 +578,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
for subset_doc in self.content_subsets.values():
susbet_names.add(subset_doc["name"])
- subset_docs = io.find(
+ subset_docs = legacy_io.find(
{
"type": "subset",
"name": {"$in": list(susbet_names)},
@@ -587,7 +587,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
{"_id": True}
)
subset_ids = [subset_doc["_id"] for subset_doc in subset_docs]
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": {"$in": subset_ids},
@@ -606,7 +606,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
subset_name = subset_doc["name"]
repres_by_subset_name[subset_name].add(repre_name)
- subset_docs = list(io.find(
+ subset_docs = list(legacy_io.find(
{
"type": "subset",
"parent": asset_doc["_id"],
@@ -637,7 +637,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
"parent": version_id,
"name": {"$in": list(repre_names)}
})
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{"$or": repre_or_query},
{"_id": True}
)
@@ -646,7 +646,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
def _get_current_output_repre_ids_oxx(
self, selected_subset, selected_repre
):
- subset_docs = list(io.find({
+ subset_docs = list(legacy_io.find({
"type": "subset",
"parent": {"$in": list(self.content_assets.keys())},
"name": selected_subset
@@ -657,7 +657,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
last_version["_id"]
for last_version in last_versions_by_subset_id.values()
]
- repre_docs = io.find({
+ repre_docs = legacy_io.find({
"type": "representation",
"parent": {"$in": last_version_ids},
"name": selected_repre
@@ -666,7 +666,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return [repre_doc["_id"] for repre_doc in repre_docs]
def _get_current_output_repre_ids_oxo(self, selected_subset):
- subset_docs = list(io.find(
+ subset_docs = list(legacy_io.find(
{
"type": "subset",
"parent": {"$in": list(self.content_assets.keys())},
@@ -713,7 +713,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
"parent": last_version_id,
"name": {"$in": list(repre_names)}
})
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"$or": repre_or_query
@@ -724,7 +724,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return [repre_doc["_id"] for repre_doc in repre_docs]
def _get_current_output_repre_ids_oox(self, selected_repre):
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"name": selected_repre,
"parent": {"$in": list(self.content_versions.keys())}
@@ -734,7 +734,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return [repre_doc["_id"] for repre_doc in repre_docs]
def _get_asset_box_values(self):
- asset_docs = io.find(
+ asset_docs = legacy_io.find(
{"type": "asset"},
{"_id": 1, "name": 1}
)
@@ -742,7 +742,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
asset_doc["_id"]: asset_doc["name"]
for asset_doc in asset_docs
}
- subsets = io.find(
+ subsets = legacy_io.find(
{
"type": "subset",
"parent": {"$in": list(asset_names_by_id.keys())}
@@ -762,12 +762,15 @@ class SwitchAssetDialog(QtWidgets.QDialog):
def _get_subset_box_values(self):
selected_asset = self._assets_box.get_valid_value()
if selected_asset:
- asset_doc = io.find_one({"type": "asset", "name": selected_asset})
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": selected_asset
+ })
asset_ids = [asset_doc["_id"]]
else:
asset_ids = list(self.content_assets.keys())
- subsets = io.find(
+ subsets = legacy_io.find(
{
"type": "subset",
"parent": {"$in": asset_ids}
@@ -804,7 +807,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [ ] [ ] [?]
if not selected_asset and not selected_subset:
# Find all representations of selection's subsets
- possible_repres = list(io.find(
+ possible_repres = list(legacy_io.find(
{
"type": "representation",
"parent": {"$in": list(self.content_versions.keys())}
@@ -833,11 +836,11 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [x] [x] [?]
if selected_asset and selected_subset:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": 1}
)
- subset_doc = io.find_one(
+ subset_doc = legacy_io.find_one(
{
"type": "subset",
"name": selected_subset,
@@ -848,7 +851,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
subset_id = subset_doc["_id"]
last_versions_by_subset_id = self.find_last_versions([subset_id])
version_doc = last_versions_by_subset_id.get(subset_id)
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": version_doc["_id"]
@@ -865,7 +868,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [x] [ ] [?]
# If asset only is selected
if selected_asset:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": 1}
)
@@ -876,7 +879,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
subset_names = set()
for subset_doc in self.content_subsets.values():
subset_names.add(subset_doc["name"])
- subset_docs = io.find(
+ subset_docs = legacy_io.find(
{
"type": "subset",
"parent": asset_doc["_id"],
@@ -900,7 +903,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
if not subset_id_by_version_id:
return list()
- repre_docs = list(io.find(
+ repre_docs = list(legacy_io.find(
{
"type": "representation",
"parent": {"$in": list(subset_id_by_version_id.keys())}
@@ -930,7 +933,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return list(available_repres)
# [ ] [x] [?]
- subset_docs = list(io.find(
+ subset_docs = list(legacy_io.find(
{
"type": "subset",
"parent": {"$in": list(self.content_assets.keys())},
@@ -957,7 +960,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
if not subset_id_by_version_id:
return list()
- repre_docs = list(io.find(
+ repre_docs = list(legacy_io.find(
{
"type": "representation",
"parent": {"$in": list(subset_id_by_version_id.keys())}
@@ -1013,11 +1016,11 @@ class SwitchAssetDialog(QtWidgets.QDialog):
return
# [x] [ ] [?]
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": 1}
)
- subset_docs = io.find(
+ subset_docs = legacy_io.find(
{"type": "subset", "parent": asset_doc["_id"]},
{"name": 1}
)
@@ -1048,7 +1051,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
}}
]
last_versions_by_subset_id = dict()
- for doc in io.aggregate(_pipeline):
+ for doc in legacy_io.aggregate(_pipeline):
doc["parent"] = doc["_id"]
doc["_id"] = doc.pop("_version_id")
last_versions_by_subset_id[doc["parent"]] = doc
@@ -1076,11 +1079,11 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [x] [x] [ ]
if selected_asset is not None and selected_subset is not None:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": 1}
)
- subset_doc = io.find_one(
+ subset_doc = legacy_io.find_one(
{
"type": "subset",
"parent": asset_doc["_id"],
@@ -1096,7 +1099,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
validation_state.repre_ok = False
return
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": last_version["_id"]
@@ -1116,11 +1119,11 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [x] [ ] [ ]
if selected_asset is not None:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{"type": "asset", "name": selected_asset},
{"_id": 1}
)
- subset_docs = list(io.find(
+ subset_docs = list(legacy_io.find(
{
"type": "subset",
"parent": asset_doc["_id"]
@@ -1142,7 +1145,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
version_id = last_version["_id"]
subset_id_by_version_id[version_id] = subset_id
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": {"$in": list(subset_id_by_version_id.keys())}
@@ -1173,7 +1176,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
# [ ] [x] [ ]
# Subset documents
- subset_docs = io.find(
+ subset_docs = legacy_io.find(
{
"type": "subset",
"parent": {"$in": list(self.content_assets.keys())},
@@ -1194,7 +1197,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
version_id = last_version["_id"]
subset_id_by_version_id[version_id] = subset_id
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"parent": {"$in": list(subset_id_by_version_id.keys())}
@@ -1225,7 +1228,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
def _on_current_asset(self):
# Set initial asset as current.
- asset_name = io.Session["AVALON_ASSET"]
+ asset_name = legacy_io.Session["AVALON_ASSET"]
index = self._assets_box.findText(
asset_name, QtCore.Qt.MatchFixedString
)
@@ -1243,7 +1246,10 @@ class SwitchAssetDialog(QtWidgets.QDialog):
selected_representation = self._representations_box.get_valid_value()
if selected_asset:
- asset_doc = io.find_one({"type": "asset", "name": selected_asset})
+ asset_doc = legacy_io.find_one({
+ "type": "asset",
+ "name": selected_asset
+ })
asset_docs_by_id = {asset_doc["_id"]: asset_doc}
else:
asset_docs_by_id = self.content_assets
@@ -1262,7 +1268,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
if selected_subset:
subset_query["name"] = selected_subset
- subset_docs = list(io.find(subset_query))
+ subset_docs = list(legacy_io.find(subset_query))
subset_ids = []
subset_docs_by_parent_and_name = collections.defaultdict(dict)
for subset in subset_docs:
@@ -1272,12 +1278,12 @@ class SwitchAssetDialog(QtWidgets.QDialog):
subset_docs_by_parent_and_name[parent_id][name] = subset
# versions
- version_docs = list(io.find({
+ version_docs = list(legacy_io.find({
"type": "version",
"parent": {"$in": subset_ids}
}, sort=[("name", -1)]))
- hero_version_docs = list(io.find({
+ hero_version_docs = list(legacy_io.find({
"type": "hero_version",
"parent": {"$in": subset_ids}
}))
@@ -1297,7 +1303,7 @@ class SwitchAssetDialog(QtWidgets.QDialog):
parent_id = hero_version_doc["parent"]
hero_version_docs_by_parent_id[parent_id] = hero_version_doc
- repre_docs = io.find({
+ repre_docs = legacy_io.find({
"type": "representation",
"parent": {"$in": version_ids}
})
diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py
index 2df6d00406..448e3f4e6f 100644
--- a/openpype/tools/sceneinventory/view.py
+++ b/openpype/tools/sceneinventory/view.py
@@ -6,10 +6,9 @@ from Qt import QtWidgets, QtCore
import qtawesome
from bson.objectid import ObjectId
-from avalon import io
-
from openpype import style
from openpype.pipeline import (
+ legacy_io,
HeroVersionType,
update_container,
remove_container,
@@ -84,7 +83,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
if item_id not in repre_ids:
repre_ids.append(item_id)
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"_id": {"$in": repre_ids}
@@ -98,7 +97,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
if version_id not in version_ids:
version_ids.append(version_id)
- loaded_versions = io.find({
+ loaded_versions = legacy_io.find({
"_id": {"$in": version_ids},
"type": {"$in": ["version", "hero_version"]}
})
@@ -115,7 +114,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
if parent_id not in version_parents:
version_parents.append(parent_id)
- all_versions = io.find({
+ all_versions = legacy_io.find({
"type": {"$in": ["hero_version", "version"]},
"parent": {"$in": version_parents}
})
@@ -151,7 +150,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
if item_id not in repre_ids:
repre_ids.append(item_id)
- repre_docs = io.find(
+ repre_docs = legacy_io.find(
{
"type": "representation",
"_id": {"$in": repre_ids}
@@ -166,7 +165,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
version_id_by_repre_id[repre_doc["_id"]] = version_id
if version_id not in version_ids:
version_ids.append(version_id)
- hero_versions = io.find(
+ hero_versions = legacy_io.find(
{
"_id": {"$in": version_ids},
"type": "hero_version"
@@ -184,7 +183,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
if current_version_id == hero_version_id:
version_id_by_repre_id[_repre_id] = version_id
- version_docs = io.find(
+ version_docs = legacy_io.find(
{
"_id": {"$in": list(version_ids)},
"type": "version"
@@ -367,11 +366,11 @@ class SceneInventoryView(QtWidgets.QTreeView):
repre_ids (list)
side (str): 'active_site'|'remote_site'
"""
- project_name = io.Session["AVALON_PROJECT"]
+ project_name = legacy_io.Session["AVALON_PROJECT"]
active_site = self.sync_server.get_active_site(project_name)
remote_site = self.sync_server.get_remote_site(project_name)
- repre_docs = io.find({
+ repre_docs = legacy_io.find({
"type": "representation",
"_id": {"$in": repre_ids}
})
@@ -661,12 +660,12 @@ class SceneInventoryView(QtWidgets.QTreeView):
# Get available versions for active representation
representation_id = ObjectId(active["representation"])
- representation = io.find_one({"_id": representation_id})
- version = io.find_one({
+ representation = legacy_io.find_one({"_id": representation_id})
+ version = legacy_io.find_one({
"_id": representation["parent"]
})
- versions = list(io.find(
+ versions = list(legacy_io.find(
{
"parent": version["parent"],
"type": "version"
@@ -674,7 +673,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
sort=[("name", 1)]
))
- hero_version = io.find_one({
+ hero_version = legacy_io.find_one({
"parent": version["parent"],
"type": "hero_version"
})
diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py
index b40fbb69e4..054c2a2daa 100644
--- a/openpype/tools/sceneinventory/window.py
+++ b/openpype/tools/sceneinventory/window.py
@@ -3,8 +3,8 @@ import sys
from Qt import QtWidgets, QtCore
import qtawesome
-from avalon import io, api
+from openpype.pipeline import legacy_io
from openpype import style
from openpype.tools.utils.delegates import VersionDelegate
from openpype.tools.utils.lib import (
@@ -72,7 +72,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
control_layout.addWidget(refresh_button)
# endregion control
- family_config_cache = FamilyConfigCache(io)
+ family_config_cache = FamilyConfigCache(legacy_io)
model = InventoryModel(family_config_cache)
proxy = FilterProxyModel()
@@ -91,7 +91,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
view.setColumnWidth(4, 100) # namespace
# apply delegates
- version_delegate = VersionDelegate(io, self)
+ version_delegate = VersionDelegate(legacy_io, self)
column = model.Columns.index("version")
view.setItemDelegateForColumn(column, version_delegate)
@@ -191,17 +191,18 @@ def show(root=None, debug=False, parent=None, items=None):
pass
if debug is True:
- io.install()
+ legacy_io.install()
if not os.environ.get("AVALON_PROJECT"):
any_project = next(
- project for project in io.projects()
+ project for project in legacy_io.projects()
if project.get("active", True) is not False
)
- api.Session["AVALON_PROJECT"] = any_project["name"]
+ project_name = any_project["name"]
else:
- api.Session["AVALON_PROJECT"] = os.environ.get("AVALON_PROJECT")
+ project_name = os.environ.get("AVALON_PROJECT")
+ legacy_io.Session["AVALON_PROJECT"] = project_name
with qt_app_context():
window = SceneInventoryWindow(parent)
diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py
index 6db001f2f6..45c21d5685 100644
--- a/openpype/tools/settings/settings/widgets.py
+++ b/openpype/tools/settings/settings/widgets.py
@@ -1,13 +1,9 @@
-import os
import copy
import uuid
from Qt import QtWidgets, QtCore, QtGui
import qtawesome
-from avalon.mongodb import (
- AvalonMongoConnection,
- AvalonMongoDB
-)
+from openpype.pipeline import AvalonMongoDB
from openpype.style import get_objected_colors
from openpype.tools.utils.widgets import ImageButton
from openpype.tools.utils.lib import paint_image_with_color
@@ -1209,15 +1205,6 @@ class ProjectListWidget(QtWidgets.QWidget):
selected_project = index.data(PROJECT_NAME_ROLE)
break
- mongo_url = os.environ["OPENPYPE_MONGO"]
-
- # Force uninstall of whole avalon connection if url does not match
- # to current environment and set it as environment
- if mongo_url != os.environ["AVALON_MONGO"]:
- AvalonMongoConnection.uninstall(self.dbcon, force=True)
- os.environ["AVALON_MONGO"] = mongo_url
- self.dbcon = None
-
if not self.dbcon:
try:
self.dbcon = AvalonMongoDB()
diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py
index 3630d92c83..1ad5cd119e 100644
--- a/openpype/tools/standalonepublish/app.py
+++ b/openpype/tools/standalonepublish/app.py
@@ -12,7 +12,7 @@ from .widgets import (
from .widgets.constants import HOST_NAME
from openpype import style
from openpype.api import resources
-from avalon.api import AvalonMongoDB
+from openpype.pipeline import AvalonMongoDB
from openpype.modules import ModulesManager
diff --git a/openpype/tools/standalonepublish/publish.py b/openpype/tools/standalonepublish/publish.py
index 582e7eccf8..e1e9edebb9 100644
--- a/openpype/tools/standalonepublish/publish.py
+++ b/openpype/tools/standalonepublish/publish.py
@@ -1,14 +1,14 @@
import os
import sys
-import openpype
import pyblish.api
+from openpype.pipeline import install_openpype_plugins
from openpype.tools.utils.host_tools import show_publish
def main(env):
# Registers pype's Global pyblish plugins
- openpype.install()
+ install_openpype_plugins()
# Register additional paths
addition_paths_str = env.get("PUBLISH_PATHS") or ""
diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py
index 4d7f94f825..fbafc7142a 100644
--- a/openpype/tools/standalonepublish/widgets/widget_components.py
+++ b/openpype/tools/standalonepublish/widgets/widget_components.py
@@ -5,16 +5,18 @@ import random
import string
from Qt import QtWidgets, QtCore
-from . import DropDataFrame
-from .constants import HOST_NAME
-from avalon import io
+
from openpype.api import execute, Logger
+from openpype.pipeline import legacy_io
from openpype.lib import (
get_openpype_execute_args,
apply_project_environments_value
)
-log = Logger().get_logger("standalonepublisher")
+from . import DropDataFrame
+from .constants import HOST_NAME
+
+log = Logger.get_logger("standalonepublisher")
class ComponentsWidget(QtWidgets.QWidget):
@@ -152,18 +154,18 @@ def set_context(project, asset, task):
:type asset: str
'''
os.environ["AVALON_PROJECT"] = project
- io.Session["AVALON_PROJECT"] = project
+ legacy_io.Session["AVALON_PROJECT"] = project
os.environ["AVALON_ASSET"] = asset
- io.Session["AVALON_ASSET"] = asset
+ legacy_io.Session["AVALON_ASSET"] = asset
if not task:
task = ''
os.environ["AVALON_TASK"] = task
- io.Session["AVALON_TASK"] = task
+ legacy_io.Session["AVALON_TASK"] = task
- io.Session["current_dir"] = os.path.normpath(os.getcwd())
+ legacy_io.Session["current_dir"] = os.path.normpath(os.getcwd())
os.environ["AVALON_APP"] = HOST_NAME
- io.Session["AVALON_APP"] = HOST_NAME
+ legacy_io.Session["AVALON_APP"] = HOST_NAME
def cli_publish(data, publish_paths, gui=True):
@@ -171,7 +173,7 @@ def cli_publish(data, publish_paths, gui=True):
os.path.dirname(os.path.dirname(__file__)),
"publish.py"
)
- io.install()
+ legacy_io.install()
# Create hash name folder in temp
chars = "".join([random.choice(string.ascii_letters) for i in range(15)])
@@ -203,6 +205,6 @@ def cli_publish(data, publish_paths, gui=True):
log.info(f"Publish result: {result}")
- io.uninstall()
+ legacy_io.uninstall()
return False
diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py
index c1c59d65b6..e6c7328e88 100644
--- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py
+++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py
@@ -37,6 +37,10 @@ class DropDataFrame(QtWidgets.QFrame):
"video_file": video_extensions
}
+ sequence_types = [
+ ".bgeo", ".vdb"
+ ]
+
def __init__(self, parent):
super().__init__()
self.parent_widget = parent
@@ -176,7 +180,7 @@ class DropDataFrame(QtWidgets.QFrame):
non_collectionable_paths = []
for path in in_paths:
ext = os.path.splitext(path)[1]
- if ext in self.image_extensions:
+ if ext in self.image_extensions or ext in self.sequence_types:
collectionable_paths.append(path)
else:
non_collectionable_paths.append(path)
@@ -289,7 +293,7 @@ class DropDataFrame(QtWidgets.QFrame):
def get_file_data(self, data):
filepath = data['files'][0]
ext = data['ext'].lower()
- output = {}
+ output = {"fps": None}
file_info = None
if 'file_info' in data:
diff --git a/openpype/tools/standalonepublish/widgets/widget_family_desc.py b/openpype/tools/standalonepublish/widgets/widget_family_desc.py
index 79681615b9..2095b332bd 100644
--- a/openpype/tools/standalonepublish/widgets/widget_family_desc.py
+++ b/openpype/tools/standalonepublish/widgets/widget_family_desc.py
@@ -52,6 +52,7 @@ class FamilyDescriptionWidget(QtWidgets.QWidget):
family.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft)
help = QtWidgets.QLabel("help")
+ help.setWordWrap(True)
help.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft)
label_layout.addWidget(family)
diff --git a/openpype/tools/texture_copy/app.py b/openpype/tools/texture_copy/app.py
index 0c3c260e51..fd8d6dc02e 100644
--- a/openpype/tools/texture_copy/app.py
+++ b/openpype/tools/texture_copy/app.py
@@ -1,14 +1,12 @@
import os
import re
import click
-from avalon import io, api
-from pprint import pprint
+
+import speedcopy
from openpype.lib import Terminal
from openpype.api import Anatomy
-
-import shutil
-import speedcopy
+from openpype.pipeline import legacy_io
t = Terminal()
@@ -20,8 +18,8 @@ texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga',
class TextureCopy:
def __init__(self):
- if not io.Session:
- io.install()
+ if not legacy_io.Session:
+ legacy_io.install()
def _get_textures(self, path):
textures = []
@@ -32,14 +30,14 @@ class TextureCopy:
return textures
def _get_project(self, project_name):
- project = io.find_one({
+ project = legacy_io.find_one({
'type': 'project',
'name': project_name
})
return project
def _get_asset(self, asset_name):
- asset = io.find_one({
+ asset = legacy_io.find_one({
'type': 'asset',
'name': asset_name
})
diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py
index a550c88ead..972e89a3ae 100644
--- a/openpype/tools/traypublisher/window.py
+++ b/openpype/tools/traypublisher/window.py
@@ -8,8 +8,10 @@ publishing plugins.
from Qt import QtWidgets, QtCore
-from avalon.api import AvalonMongoDB
-from openpype.pipeline import install_host
+from openpype.pipeline import (
+ install_host,
+ AvalonMongoDB,
+)
from openpype.hosts.traypublisher import (
api as traypublisher
)
diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py
index b0c30f6dfb..d8f4570120 100644
--- a/openpype/tools/utils/host_tools.py
+++ b/openpype/tools/utils/host_tools.py
@@ -4,9 +4,14 @@ It is possible to create `HostToolsHelper` in host implementation or
use singleton approach with global functions (using helper anyway).
"""
import os
-import avalon.api
+
import pyblish.api
-from openpype.pipeline import registered_host
+
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
+
from .lib import qt_app_context
@@ -73,8 +78,8 @@ class HostToolsHelper:
if use_context:
context = {
- "asset": avalon.api.Session["AVALON_ASSET"],
- "task": avalon.api.Session["AVALON_TASK"]
+ "asset": legacy_io.Session["AVALON_ASSET"],
+ "task": legacy_io.Session["AVALON_TASK"]
}
workfiles_tool.set_context(context)
@@ -105,7 +110,7 @@ class HostToolsHelper:
use_context = False
if use_context:
- context = {"asset": avalon.api.Session["AVALON_ASSET"]}
+ context = {"asset": legacy_io.Session["AVALON_ASSET"]}
loader_tool.set_context(context, refresh=True)
else:
loader_tool.refresh()
diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py
index 8e2044482a..20fea6600b 100644
--- a/openpype/tools/utils/lib.py
+++ b/openpype/tools/utils/lib.py
@@ -727,11 +727,11 @@ def is_sync_loader(loader):
def is_remove_site_loader(loader):
- return hasattr(loader, "remove_site_on_representation")
+ return hasattr(loader, "is_remove_site_loader")
def is_add_site_loader(loader):
- return hasattr(loader, "add_site_to_representation")
+ return hasattr(loader, "is_add_site_loader")
class WrappedCallbackItem:
diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py
index 38e1911060..352847ede8 100644
--- a/openpype/tools/workfiles/app.py
+++ b/openpype/tools/workfiles/app.py
@@ -1,9 +1,10 @@
import sys
import logging
-from avalon import api
-
-from openpype.pipeline import registered_host
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
from openpype.tools.utils import qt_app_context
from .window import Window
@@ -52,8 +53,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True):
validate_host_requirements(host)
if debug:
- api.Session["AVALON_ASSET"] = "Mock"
- api.Session["AVALON_TASK"] = "Testing"
+ legacy_io.Session["AVALON_ASSET"] = "Mock"
+ legacy_io.Session["AVALON_TASK"] = "Testing"
with qt_app_context():
window = Window(parent=parent)
@@ -61,8 +62,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True):
if use_context:
context = {
- "asset": api.Session["AVALON_ASSET"],
- "task": api.Session["AVALON_TASK"]
+ "asset": legacy_io.Session["AVALON_ASSET"],
+ "task": legacy_io.Session["AVALON_TASK"]
}
window.set_context(context)
diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py
index bb2ded3b94..977111b71b 100644
--- a/openpype/tools/workfiles/files_widget.py
+++ b/openpype/tools/workfiles/files_widget.py
@@ -4,7 +4,6 @@ import shutil
import Qt
from Qt import QtWidgets, QtCore
-from avalon import io, api
from openpype.tools.utils import PlaceholderLineEdit
from openpype.tools.utils.delegates import PrettyTimeDelegate
@@ -18,7 +17,10 @@ from openpype.lib.avalon_context import (
update_current_task,
compute_session_changes
)
-from openpype.pipeline import registered_host
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
from .model import (
WorkAreaFilesModel,
PublishFilesModel,
@@ -87,7 +89,7 @@ class FilesWidget(QtWidgets.QWidget):
self._task_type = None
# Pype's anatomy object for current project
- self.anatomy = Anatomy(io.Session["AVALON_PROJECT"])
+ self.anatomy = Anatomy(legacy_io.Session["AVALON_PROJECT"])
# Template key used to get work template from anatomy templates
self.template_key = "work"
@@ -147,7 +149,9 @@ class FilesWidget(QtWidgets.QWidget):
workarea_files_view.setColumnWidth(0, 330)
# --- Publish files view ---
- publish_files_model = PublishFilesModel(extensions, io, self.anatomy)
+ publish_files_model = PublishFilesModel(
+ extensions, legacy_io, self.anatomy
+ )
publish_proxy_model = QtCore.QSortFilterProxyModel()
publish_proxy_model.setSourceModel(publish_files_model)
@@ -380,13 +384,13 @@ class FilesWidget(QtWidgets.QWidget):
return None
if self._asset_doc is None:
- self._asset_doc = io.find_one({"_id": self._asset_id})
+ self._asset_doc = legacy_io.find_one({"_id": self._asset_id})
return self._asset_doc
def _get_session(self):
"""Return a modified session for the current asset and task"""
- session = api.Session.copy()
+ session = legacy_io.Session.copy()
self.template_key = get_workfile_template_key(
self._task_type,
session["AVALON_APP"],
@@ -405,7 +409,7 @@ class FilesWidget(QtWidgets.QWidget):
def _enter_session(self):
"""Enter the asset and task session currently selected"""
- session = api.Session.copy()
+ session = legacy_io.Session.copy()
changes = compute_session_changes(
session,
asset=self._get_asset_doc(),
@@ -595,10 +599,10 @@ class FilesWidget(QtWidgets.QWidget):
# Create extra folders
create_workdir_extra_folders(
self._workdir_path,
- api.Session["AVALON_APP"],
+ legacy_io.Session["AVALON_APP"],
self._task_type,
self._task_name,
- api.Session["AVALON_PROJECT"]
+ legacy_io.Session["AVALON_PROJECT"]
)
# Trigger after save events
emit_event(
diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py
index 0a7c7821ba..3e97d6c938 100644
--- a/openpype/tools/workfiles/save_as_dialog.py
+++ b/openpype/tools/workfiles/save_as_dialog.py
@@ -5,13 +5,14 @@ import logging
from Qt import QtWidgets, QtCore
-from avalon import api, io
-
from openpype.lib import (
get_last_workfile_with_version,
get_workdir_data,
)
-from openpype.pipeline import registered_host
+from openpype.pipeline import (
+ registered_host,
+ legacy_io,
+)
from openpype.tools.utils import PlaceholderLineEdit
log = logging.getLogger(__name__)
@@ -24,7 +25,7 @@ def build_workfile_data(session):
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
host_name = session["AVALON_APP"]
- project_doc = io.find_one(
+ project_doc = legacy_io.find_one(
{"type": "project"},
{
"name": True,
@@ -33,7 +34,7 @@ def build_workfile_data(session):
}
)
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{
"type": "asset",
"name": asset_name
@@ -208,7 +209,7 @@ class SaveAsDialog(QtWidgets.QDialog):
if not session:
# Fallback to active session
- session = api.Session
+ session = legacy_io.Session
self.data = build_workfile_data(session)
@@ -283,7 +284,7 @@ class SaveAsDialog(QtWidgets.QDialog):
if current_filepath:
# We match the current filename against the current session
# instead of the session where the user is saving to.
- current_data = build_workfile_data(api.Session)
+ current_data = build_workfile_data(legacy_io.Session)
matcher = CommentMatcher(anatomy, template_key, current_data)
comment = matcher.parse_comment(current_filepath)
if comment:
diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py
index 73e63d30b5..02a22af26c 100644
--- a/openpype/tools/workfiles/window.py
+++ b/openpype/tools/workfiles/window.py
@@ -2,14 +2,13 @@ import os
import datetime
from Qt import QtCore, QtWidgets
-from avalon import io
-
from openpype import style
from openpype.lib import (
get_workfile_doc,
create_workfile_doc,
save_workfile_data_to_doc,
)
+from openpype.pipeline import legacy_io
from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget
from openpype.tools.utils.tasks_widget import TasksWidget
@@ -158,10 +157,12 @@ class Window(QtWidgets.QMainWindow):
home_page_widget = QtWidgets.QWidget(pages_widget)
home_body_widget = QtWidgets.QWidget(home_page_widget)
- assets_widget = SingleSelectAssetsWidget(io, parent=home_body_widget)
+ assets_widget = SingleSelectAssetsWidget(
+ legacy_io, parent=home_body_widget
+ )
assets_widget.set_current_asset_btn_visibility(True)
- tasks_widget = TasksWidget(io, home_body_widget)
+ tasks_widget = TasksWidget(legacy_io, home_body_widget)
files_widget = FilesWidget(home_body_widget)
side_panel = SidePanelWidget(home_body_widget)
@@ -250,7 +251,7 @@ class Window(QtWidgets.QMainWindow):
if asset_id and task_name and filepath:
filename = os.path.split(filepath)[1]
workfile_doc = get_workfile_doc(
- asset_id, task_name, filename, io
+ asset_id, task_name, filename, legacy_io
)
self.side_panel.set_context(
asset_id, task_name, filepath, workfile_doc
@@ -272,7 +273,7 @@ class Window(QtWidgets.QMainWindow):
self._create_workfile_doc(filepath, force=True)
workfile_doc = self._get_current_workfile_doc()
- save_workfile_data_to_doc(workfile_doc, data, io)
+ save_workfile_data_to_doc(workfile_doc, data, legacy_io)
def _get_current_workfile_doc(self, filepath=None):
if filepath is None:
@@ -284,7 +285,7 @@ class Window(QtWidgets.QMainWindow):
filename = os.path.split(filepath)[1]
return get_workfile_doc(
- asset_id, task_name, filename, io
+ asset_id, task_name, filename, legacy_io
)
def _create_workfile_doc(self, filepath, force=False):
@@ -295,9 +296,11 @@ class Window(QtWidgets.QMainWindow):
if not workfile_doc:
workdir, filename = os.path.split(filepath)
asset_id = self.assets_widget.get_selected_asset_id()
- asset_doc = io.find_one({"_id": asset_id})
+ asset_doc = legacy_io.find_one({"_id": asset_id})
task_name = self.tasks_widget.get_selected_task_name()
- create_workfile_doc(asset_doc, task_name, filename, workdir, io)
+ create_workfile_doc(
+ asset_doc, task_name, filename, workdir, legacy_io
+ )
def refresh(self):
# Refresh asset widget
@@ -319,7 +322,7 @@ class Window(QtWidgets.QMainWindow):
self._context_to_set, context = None, self._context_to_set
if "asset" in context:
- asset_doc = io.find_one(
+ asset_doc = legacy_io.find_one(
{
"name": context["asset"],
"type": "asset"
diff --git a/openpype/version.py b/openpype/version.py
index 9e2525e3b8..662adf28ca 100644
--- a/openpype/version.py
+++ b/openpype/version.py
@@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
-__version__ = "3.10.0-nightly.1"
+__version__ = "3.10.0-nightly.2"
diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py
index e661d3d293..9fc33ccbb8 100644
--- a/openpype/widgets/popup.py
+++ b/openpype/widgets/popup.py
@@ -1,16 +1,19 @@
import sys
-import logging
import contextlib
from Qt import QtCore, QtWidgets
-log = logging.getLogger(__name__)
-
class Popup(QtWidgets.QDialog):
+ """A Popup that moves itself to bottom right of screen on show event.
- on_show = QtCore.Signal()
+ The UI contains a message label and a red highlighted button to "show"
+ or perform another custom action from this pop-up.
+
+ """
+
+ on_clicked = QtCore.Signal()
def __init__(self, parent=None, *args, **kwargs):
super(Popup, self).__init__(parent=parent, *args, **kwargs)
@@ -19,32 +22,34 @@ class Popup(QtWidgets.QDialog):
# Layout
layout = QtWidgets.QHBoxLayout(self)
layout.setContentsMargins(10, 5, 10, 10)
+
+ # Increase spacing slightly for readability
+ layout.setSpacing(10)
+
message = QtWidgets.QLabel("")
message.setStyleSheet("""
QLabel {
font-size: 12px;
}
""")
- show = QtWidgets.QPushButton("Show")
- show.setSizePolicy(QtWidgets.QSizePolicy.Maximum,
+ button = QtWidgets.QPushButton("Show")
+ button.setSizePolicy(QtWidgets.QSizePolicy.Maximum,
QtWidgets.QSizePolicy.Maximum)
- show.setStyleSheet("""QPushButton { background-color: #BB0000 }""")
+ button.setStyleSheet("""QPushButton { background-color: #BB0000 }""")
layout.addWidget(message)
- layout.addWidget(show)
+ layout.addWidget(button)
- # Size
+ # Default size
self.resize(400, 40)
- geometry = self.calculate_window_geometry()
- self.setGeometry(geometry)
self.widgets = {
"message": message,
- "show": show,
+ "button": button,
}
# Signals
- show.clicked.connect(self._on_show_clicked)
+ button.clicked.connect(self._on_clicked)
# Set default title
self.setWindowTitle("Popup")
@@ -52,7 +57,10 @@ class Popup(QtWidgets.QDialog):
def setMessage(self, message):
self.widgets['message'].setText(message)
- def _on_show_clicked(self):
+ def setButtonText(self, text):
+ self.widgets["button"].setText(text)
+
+ def _on_clicked(self):
"""Callback for when the 'show' button is clicked.
Raises the parent (if any)
@@ -63,11 +71,19 @@ class Popup(QtWidgets.QDialog):
self.close()
# Trigger the signal
- self.on_show.emit()
+ self.on_clicked.emit()
if parent:
parent.raise_()
+ def showEvent(self, event):
+
+ # Position popup based on contents on show event
+ geo = self.calculate_window_geometry()
+ self.setGeometry(geo)
+
+ return super(Popup, self).showEvent(event)
+
def calculate_window_geometry(self):
"""Respond to status changes
@@ -104,45 +120,29 @@ class Popup(QtWidgets.QDialog):
return QtCore.QRect(x, y, width, height)
-class Popup2(Popup):
+class PopupUpdateKeys(Popup):
+ """Popup with Update Keys checkbox (intended for Maya)"""
- on_show = QtCore.Signal()
+ on_clicked_state = QtCore.Signal(bool)
def __init__(self, parent=None, *args, **kwargs):
Popup.__init__(self, parent=parent, *args, **kwargs)
layout = self.layout()
- # Add toggle
+ # Insert toggle for Update keys
toggle = QtWidgets.QCheckBox("Update Keys")
layout.insertWidget(1, toggle)
self.widgets["toggle"] = toggle
+ self.on_clicked.connect(self.emit_click_with_state)
+
layout.insertStretch(1, 1)
- # Update button text
- fix = self.widgets["show"]
- fix.setText("Fix")
-
- def calculate_window_geometry(self):
- """Respond to status changes
-
- On creation, align window with screen bottom right.
-
- """
- parent_widget = self.parent()
-
- desktop = QtWidgets.QApplication.desktop()
- if parent_widget:
- screen = desktop.screenNumber(parent_widget)
- else:
- screen = desktop.screenNumber(desktop.cursor().pos())
- center_point = desktop.screenGeometry(screen).center()
-
- frame_geo = self.frameGeometry()
- frame_geo.moveCenter(center_point)
-
- return frame_geo
+ def emit_click_with_state(self):
+ """Emit the on_clicked_state signal with the toggled state"""
+ checked = self.widgets["toggle"].isChecked()
+ self.on_clicked_state.emit(checked)
@contextlib.contextmanager
diff --git a/pyproject.toml b/pyproject.toml
index 4c65ac9bda..f32e385e80 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "OpenPype"
-version = "3.10.0-nightly.1" # OpenPype
+version = "3.10.0-nightly.2" # OpenPype
description = "Open VFX and Animation pipeline with support."
authors = ["OpenPype Team "]
license = "MIT License"
diff --git a/repos/avalon-core b/repos/avalon-core
deleted file mode 160000
index 2fa14cea6f..0000000000
--- a/repos/avalon-core
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb
diff --git a/setup.py b/setup.py
index bf42602b52..899e9375c0 100644
--- a/setup.py
+++ b/setup.py
@@ -123,7 +123,6 @@ bin_includes = [
include_files = [
"igniter",
"openpype",
- "repos",
"schema",
"LICENSE",
"README.md"
diff --git a/start.py b/start.py
index f8a01dd9ab..38eb9e9bf4 100644
--- a/start.py
+++ b/start.py
@@ -191,6 +191,51 @@ else:
if os.getenv("OPENPYPE_HEADLESS_MODE") != "1":
os.environ.pop("OPENPYPE_HEADLESS_MODE", None)
+# Enabled logging debug mode when "--debug" is passed
+if "--verbose" in sys.argv:
+ expected_values = (
+ "Expected: notset, debug, info, warning, error, critical"
+ " or integer [0-50]."
+ )
+ idx = sys.argv.index("--verbose")
+ sys.argv.pop(idx)
+ if idx < len(sys.argv):
+ value = sys.argv.pop(idx)
+ else:
+ raise RuntimeError((
+ "Expect value after \"--verbose\" argument. {}"
+ ).format(expected_values))
+
+ log_level = None
+ low_value = value.lower()
+ if low_value.isdigit():
+ log_level = int(low_value)
+ elif low_value == "notset":
+ log_level = 0
+ elif low_value == "debug":
+ log_level = 10
+ elif low_value == "info":
+ log_level = 20
+ elif low_value == "warning":
+ log_level = 30
+ elif low_value == "error":
+ log_level = 40
+ elif low_value == "critical":
+ log_level = 50
+
+ if log_level is None:
+ raise RuntimeError((
+ "Unexpected value after \"--verbose\" argument \"{}\". {}"
+ ).format(value, expected_values))
+
+ os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level)
+
+# Enable debug mode, may affect log level if log level is not defined
+if "--debug" in sys.argv:
+ sys.argv.remove("--debug")
+ os.environ["OPENPYPE_DEBUG"] = "1"
+
+
import igniter # noqa: E402
from igniter import BootstrapRepos # noqa: E402
from igniter.tools import (
@@ -320,6 +365,7 @@ def run_disk_mapping_commands(settings):
destination))
raise
+
def set_avalon_environments():
"""Set avalon specific environments.
@@ -327,28 +373,12 @@ def set_avalon_environments():
before avalon module is imported because avalon works with globals set with
environment variables.
"""
- from openpype import PACKAGE_DIR
- # Path to OpenPype's schema
- schema_path = os.path.join(
- os.path.dirname(PACKAGE_DIR),
- "schema"
- )
- # Avalon mongo URL
- avalon_mongo_url = (
- os.environ.get("AVALON_MONGO")
- or os.environ["OPENPYPE_MONGO"]
- )
avalon_db = os.environ.get("AVALON_DB") or "avalon" # for tests
os.environ.update({
- # Mongo url (use same as OpenPype has)
- "AVALON_MONGO": avalon_mongo_url,
-
- "AVALON_SCHEMA": schema_path,
# Mongo DB name where avalon docs are stored
"AVALON_DB": avalon_db,
# Name of config
- "AVALON_CONFIG": "openpype",
"AVALON_LABEL": "OpenPype"
})
@@ -838,17 +868,15 @@ def _bootstrap_from_code(use_version, use_staging):
version_path = Path(_openpype_root)
os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root
- repos = os.listdir(os.path.join(_openpype_root, "repos"))
- repos = [os.path.join(_openpype_root, "repos", repo) for repo in repos]
- # add self to python paths
- repos.insert(0, _openpype_root)
- for repo in repos:
- sys.path.insert(0, repo)
+ # add self to sys.path of current process
+ # NOTE: this seems to be duplicate of 'add_paths_from_directory'
+ sys.path.insert(0, _openpype_root)
# add venv 'site-packages' to PYTHONPATH
python_path = os.getenv("PYTHONPATH", "")
split_paths = python_path.split(os.pathsep)
- # Add repos as first in list
- split_paths = repos + split_paths
+ # add self to python paths
+ split_paths.insert(0, _openpype_root)
+
# last one should be venv site-packages
# this is slightly convoluted as we can get here from frozen code too
# in case when we are running without any version installed.
@@ -927,6 +955,16 @@ def boot():
_print(">>> run disk mapping command ...")
run_disk_mapping_commands(global_settings)
+ # Logging to server enabled/disabled
+ log_to_server = global_settings.get("log_to_server", True)
+ if log_to_server:
+ os.environ["OPENPYPE_LOG_TO_SERVER"] = "1"
+ log_to_server_msg = "ON"
+ else:
+ os.environ.pop("OPENPYPE_LOG_TO_SERVER", None)
+ log_to_server_msg = "OFF"
+ _print(f">>> Logging to server is turned {log_to_server_msg}")
+
# Get openpype path from database and set it to environment so openpype can
# find its versions there and bootstrap them.
openpype_path = get_openpype_path_from_settings(global_settings)
diff --git a/tests/lib/assert_classes.py b/tests/lib/assert_classes.py
index 7f4d8efc10..9a94f89fd0 100644
--- a/tests/lib/assert_classes.py
+++ b/tests/lib/assert_classes.py
@@ -24,13 +24,14 @@ class DBAssert:
else:
args[key] = val
+ no_of_docs = dbcon.count_documents(args)
+
+ msg = None
args.pop("type")
detail_str = " "
if args:
detail_str = " with '{}'".format(args)
- msg = None
- no_of_docs = dbcon.count_documents(args)
if expected != no_of_docs:
msg = "Not expected no of '{}'{}."\
"Expected {}, found {}".format(queried_type,
diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py
index 0a9da1aca8..7dfbf6fd0d 100644
--- a/tests/lib/testing_classes.py
+++ b/tests/lib/testing_classes.py
@@ -273,8 +273,6 @@ class PublishTest(ModuleUnitTest):
)
os.environ["AVALON_SCHEMA"] = schema_path
- import openpype
- openpype.install()
os.environ["OPENPYPE_EXECUTABLE"] = sys.executable
from openpype.lib import ApplicationManager
diff --git a/tests/unit/igniter/test_bootstrap_repos.py b/tests/unit/igniter/test_bootstrap_repos.py
index 65cd5a2399..10278c4928 100644
--- a/tests/unit/igniter/test_bootstrap_repos.py
+++ b/tests/unit/igniter/test_bootstrap_repos.py
@@ -152,8 +152,6 @@ def test_install_live_repos(fix_bootstrap, printer, monkeypatch, pytestconfig):
openpype_version = fix_bootstrap.create_version_from_live_code()
sep = os.path.sep
expected_paths = [
- f"{openpype_version.path}{sep}repos{sep}avalon-core",
- f"{openpype_version.path}{sep}repos{sep}avalon-unreal-integration",
f"{openpype_version.path}"
]
printer("testing zip creation")
diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md
index 74cb895ac9..53b4799d6e 100644
--- a/website/docs/admin_openpype_commands.md
+++ b/website/docs/admin_openpype_commands.md
@@ -24,7 +24,11 @@ openpype_console --use-version=3.0.0-foo+bar
`--list-versions [--use-staging]` - to list available versions.
-`--validate-version` to validate integrity of given version
+`--validate-version` - to validate integrity of given version
+
+`--verbose` `` - change log verbose level of OpenPype loggers
+
+`--debug` - set debug flag affects logging
For more information [see here](admin_use.md#run-openpype).
@@ -47,13 +51,9 @@ For more information [see here](admin_use.md#run-openpype).
---
### `tray` arguments {#tray-arguments}
-| Argument | Description |
-| --- | --- |
-| `--debug` | print verbose information useful for debugging (works with `openpype_console`) |
-To launch Tray with debugging information:
```shell
-openpype_console tray --debug
+openpype_console tray
```
---
### `launch` arguments {#eventserver-arguments}
@@ -62,7 +62,6 @@ option to specify them.
| Argument | Description |
| --- | --- |
-| `--debug` | print debug info |
| `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) |
| `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) |
| `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) |
@@ -98,12 +97,16 @@ pype launch --app python --project my_project --asset my_asset --task my_task
---
### `publish` arguments {#publish-arguments}
+Run publishing based on metadata passed in json file e.g. on farm.
+
| Argument | Description |
| --- | --- |
-| `--debug` | print more verbose information |
+| `--targets` | define publishing targets (e.g. "farm") |
+| `--gui` (`-g`) | Show publishing |
+| Positional argument | Path to metadata json file |
```shell
-pype publish
+openpype publish --targes farm
```
---
diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md
index 178241ad19..f84905c486 100644
--- a/website/docs/admin_use.md
+++ b/website/docs/admin_use.md
@@ -69,6 +69,22 @@ stored in `checksums` file.
Add `--headless` to run OpenPype without graphical UI (useful on server or on automated tasks, etc.)
:::
+`--verbose` `` - change log verbose level of OpenPype loggers.
+
+Level value can be integer in range `0-50` or one of enum strings `"notset" (0)`, `"debug" (10)`, `"info" (20)`, `"warning" (30)`, `"error" (40)`, `"ciritcal" (50)`. Value is stored to `OPENPYPE_LOG_LEVEL` environment variable for next processes.
+
+```shell
+openpype_console --verbose debug
+```
+
+`--debug` - set debug flag affects logging
+
+Enable debug flag for OpenPype process. Change value of environment variable `OPENPYPE_DEBUG` to `"1"`. At this moment affects only OpenPype loggers. Argument `--verbose` or environment variable `OPENPYPE_LOG_LEVEL` are used in preference to affect log level.
+
+```shell
+openpype_console --debug
+```
+
### Details
When you run OpenPype from executable, few check are made:
diff --git a/website/docs/artist_hosts_photoshop.md b/website/docs/artist_hosts_photoshop.md
index a140170c49..36670054ee 100644
--- a/website/docs/artist_hosts_photoshop.md
+++ b/website/docs/artist_hosts_photoshop.md
@@ -111,3 +111,67 @@ You can switch to a previous version of the image or update to the latest.


+
+
+### New Publisher
+
+All previous screenshot came from regular [pyblish](https://pyblish.com/) process, there is also a different UI available. This process extends existing implementation and adds new functionalities.
+
+To test this in Photoshop, the artist needs first to enable experimental `New publisher` in Settings. (Tray > Settings > Experimental tools)
+
+
+New dialog opens after clicking on `Experimental tools` button in Openpype extension menu.
+
+
+After you click on this button, this dialog will show up.
+
+
+
+You can see the first instance, called `workfileYourTaskName`. (Name depends on studio naming convention for Photoshop's workfiles.). This instance is so called "automatic",
+it was created without instigation by the artist. You shouldn't delete this instance as it might hold necessary values for future publishing, but you can choose to skip it
+from publishing (by toggling the pill button inside of the rectangular object denoting instance).
+
+New publisher allows publishing into different context, just click on a workfile instance, update `Variant`, `Asset` or `Task` in the form in the middle and don't forget to click on the 'Confirm' button.
+
+Similarly to the old publishing approach, you need to create instances for everything you want to publish. You will initiate by clicking on the '+' sign in the bottom left corner.
+
+
+
+In this dialog you can select the family for the published layer or group. Currently only 'image' is implemented.
+
+On right hand side you can see creator attributes:
+- `Create only for selected` - mimics `Use selected` option of regular publish
+- `Create separate instance for each selected` - if separate instance should be created for each layer if multiple selected
+
+
+
+Here you can see a newly created instance of image family. (Name depends on studio naming convention for image family.) You can disable instance from publishing in the same fashion as a workfile instance.
+You could also decide delete instance by selecting it and clicking on a trashcan icon (next to plus button on left button)
+
+Buttons on the bottom right are for:
+- `Refresh publishing` - set publishing process to starting position - useful if previous publish failed, or you changed configuration of a publish
+- `Stop/pause publishing` - if you would like to pause publishing process at any time
+- `Validate` - if you would like to run only collecting and validating phases (nothing will be published yet)
+- `Publish` - standard way how to kick off full publishing process
+
+In the unfortunate case of some error during publishing, you would receive this kind of error dialog.
+
+
+
+In this case there is an issue that you are publishing two or more instances with the same subset name ('imageMaing'). If the error is recoverable by the artist, you should
+see helpful information in a `How to repair?` section or fix it automatically by clicking on a 'Wrench' button on the right if present.
+
+If you would like to ask for help admin or support, you could use any of the three buttons on bottom left:
+- `Copy report` - stash full publishing log to a clipboard
+- `Export and save report` - save log into a file for sending it via mail or any communication tool
+- `Show details` - switches into a more detailed list of published instances and plugins. Similar to the old pyblish list.
+
+If you are able to fix the workfile yourself, use the first button on the right to set the UI to initial state before publish. (Click the `Publish` button to start again.)
+
+New publishing process should be backward compatible, eg. if you have a workfile with instances created in the previous publishing approach, they will be translated automatically and
+could be used right away.
+
+If you would create instances in a new publisher, you cannot use them in the old approach though!
+
+If you would hit on unexpected behaviour with old instances, contact support first, then you could try some steps to recover your publish. Delete instances in New publisher UI, or try `Subset manager` in the extension menu.
+Nuclear option is to purge workfile metadata in `File > File Info > Origin > Headline`. This is only for most determined daredevils though!
diff --git a/website/docs/assets/artist_photoshop_new_publisher_instance.png b/website/docs/assets/artist_photoshop_new_publisher_instance.png
new file mode 100644
index 0000000000..723a032c94
Binary files /dev/null and b/website/docs/assets/artist_photoshop_new_publisher_instance.png differ
diff --git a/website/docs/assets/artist_photoshop_new_publisher_instance_created.png b/website/docs/assets/artist_photoshop_new_publisher_instance_created.png
new file mode 100644
index 0000000000..0cf6d1d18c
Binary files /dev/null and b/website/docs/assets/artist_photoshop_new_publisher_instance_created.png differ
diff --git a/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png b/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png
new file mode 100644
index 0000000000..e34497b77d
Binary files /dev/null and b/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png differ
diff --git a/website/docs/assets/artist_photoshop_new_publisher_workfile.png b/website/docs/assets/artist_photoshop_new_publisher_workfile.png
new file mode 100644
index 0000000000..006206519f
Binary files /dev/null and b/website/docs/assets/artist_photoshop_new_publisher_workfile.png differ
diff --git a/website/docs/assets/experimental_tools_menu.png b/website/docs/assets/experimental_tools_menu.png
new file mode 100644
index 0000000000..79fa8d3655
Binary files /dev/null and b/website/docs/assets/experimental_tools_menu.png differ
diff --git a/website/docs/assets/experimental_tools_settings.png b/website/docs/assets/experimental_tools_settings.png
new file mode 100644
index 0000000000..4d514e8a8f
Binary files /dev/null and b/website/docs/assets/experimental_tools_settings.png differ
diff --git a/website/yarn.lock b/website/yarn.lock
index e01f0c4ef2..04b9dd658b 100644
--- a/website/yarn.lock
+++ b/website/yarn.lock
@@ -2311,9 +2311,9 @@ asap@~2.0.3:
integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=
async@^2.6.2:
- version "2.6.3"
- resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
- integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
+ version "2.6.4"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
+ integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==
dependencies:
lodash "^4.17.14"