🔀 Merge remote-tracking branch 'origin/develop' into maya_collect_instance_remove_handles_fallback_to_context

This commit is contained in:
Ondrej Samohel 2023-01-16 12:13:17 +01:00
commit f473224841
No known key found for this signature in database
GPG key ID: 02376E18990A97C6
1489 changed files with 107834 additions and 25193 deletions

View file

@ -6,6 +6,8 @@ labels: bug
assignees: ''
---
**Running version**
[ex. 3.14.1-nightly.2]
**Describe the bug**
A clear and concise description of what the bug is.

28
.github/workflows/milestone_assign.yml vendored Normal file
View file

@ -0,0 +1,28 @@
name: Milestone - assign to PRs
on:
pull_request_target:
types: [closed]
jobs:
run_if_release:
if: startsWith(github.base_ref, 'release/')
runs-on: ubuntu-latest
steps:
- name: 'Assign Milestone [next-minor]'
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
milestone: 'next-minor'
run_if_develop:
if: ${{ github.base_ref == 'develop' }}
runs-on: ubuntu-latest
steps:
- name: 'Assign Milestone [next-patch]'
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
milestone: 'next-patch'

62
.github/workflows/milestone_create.yml vendored Normal file
View file

@ -0,0 +1,62 @@
name: Milestone - create default
on:
milestone:
types: [closed, edited]
jobs:
generate-next-patch:
runs-on: ubuntu-latest
steps:
- name: 'Get Milestones'
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
env:
MILESTONES: ${{ steps.milestones.outputs.milestones }}
MILESTONE: "next-patch"
- name: Read output
run: |
echo "${{ steps.querymilestone.outputs.number }}"
- name: 'Create `next-patch` milestone'
if: steps.querymilestone.outputs.number == ''
id: createmilestone
uses: "WyriHaximus/github-action-create-milestone@v1"
with:
title: 'next-patch'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
generate-next-minor:
runs-on: ubuntu-latest
steps:
- name: 'Get Milestones'
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
env:
MILESTONES: ${{ steps.milestones.outputs.milestones }}
MILESTONE: "next-minor"
- name: Read output
run: |
echo "${{ steps.querymilestone.outputs.number }}"
- name: 'Create `next-minor` milestone'
if: steps.querymilestone.outputs.number == ''
id: createmilestone
uses: "WyriHaximus/github-action-create-milestone@v1"
with:
title: 'next-minor'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"

View file

@ -37,27 +37,27 @@ jobs:
echo ::set-output name=next_tag::$RESULT
- name: "✏️ Generate full changelog"
if: steps.version_type.outputs.type != 'skip'
id: generate-full-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: "3.0.0"
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
unreleasedLabel: ${{ steps.version.outputs.next_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
# - name: "✏️ Generate full changelog"
# if: steps.version_type.outputs.type != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: "🖨️ Print changelog to console"
if: steps.version_type.outputs.type != 'skip'
@ -85,11 +85,11 @@ jobs:
tags: true
unprotect_reviews: true
- name: 🔨 Merge main back to develop
- name: 🔨 Merge main back to develop
uses: everlytic/branch-merge@1.1.0
if: steps.version_type.outputs.type != 'skip'
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'

View file

@ -2,7 +2,7 @@ name: Stable Release
on:
release:
types:
types:
- prereleased
jobs:
@ -13,7 +13,7 @@ jobs:
steps:
- name: 🚛 Checkout Code
uses: actions/checkout@v2
with:
with:
fetch-depth: 0
- name: Set up Python
@ -33,27 +33,27 @@ jobs:
echo ::set-output name=last_release::$LASTRELEASE
echo ::set-output name=release_tag::$RESULT
- name: "✏️ Generate full changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-full-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: "3.0.0"
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
# - name: "✏️ Generate full changelog"
# if: steps.version.outputs.release_tag != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# futureRelease: ${{ steps.version.outputs.release_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: 💾 Commit and Tag
id: git_commit
@ -73,8 +73,8 @@ jobs:
token: ${{ secrets.ADMIN_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
unprotect_reviews: true
- name: "✏️ Generate last changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-last-changelog
@ -114,11 +114,11 @@ jobs:
with:
tag: "${{ steps.version.outputs.current_version }}"
- name: 🔁 Merge main back to develop
- name: 🔁 Merge main back to develop
if: steps.version.outputs.release_tag != 'skip'
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'

8
.gitignore vendored
View file

@ -102,5 +102,13 @@ website/.docusaurus
.poetry/
.python-version
.editorconfig
.pre-commit-config.yaml
mypy.ini
tools/run_eventserver.*
# Developer tools
tools/dev_*
.github_changelog_generator

7
.gitmodules vendored Normal file
View file

@ -0,0 +1,7 @@
[submodule "tools/modules/powershell/BurntToast"]
path = tools/modules/powershell/BurntToast
url = https://github.com/Windos/BurntToast.git
[submodule "tools/modules/powershell/PSWriteColor"]
path = tools/modules/powershell/PSWriteColor
url = https://github.com/EvotecIT/PSWriteColor.git

File diff suppressed because it is too large Load diff

2176
HISTORY.md

File diff suppressed because it is too large Load diff

View file

@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll
- **Linux**
- **Ubuntu** 20.04 LTS
- **Centos** 7
- **Mac OSX**
- **Mac OSX**
- **10.15** Catalina
- **11.1** Big Sur (using Rosetta2)
@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
**Note that it needs existing virtual environment.**
Developer tools
-------------
In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`).
## Contributors ✨
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):

View file

@ -0,0 +1,18 @@
Addon distribution tool
------------------------
Code in this folder is backend portion of Addon distribution logic for v4 server.
Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons.
Client (running on artist machine) will in the first step ask v4 for list of enabled addons.
(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.)
Next it will compare presence of enabled addon version in local folder. In the case of missing version of
an addon, client will use information in the addon to download (from http/shared local disk/git) zip file
and unzip it.
Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder.
Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably.
This code needs to be independent on Openpype code as much as possible!

View file

@ -0,0 +1,208 @@
import os
from enum import Enum
from abc import abstractmethod
import attr
import logging
import requests
import platform
import shutil
from .file_handler import RemoteFileHandler
from .addon_info import AddonInfo
class UpdateState(Enum):
EXISTS = "exists"
UPDATED = "updated"
FAILED = "failed"
class AddonDownloader:
log = logging.getLogger(__name__)
def __init__(self):
self._downloaders = {}
def register_format(self, downloader_type, downloader):
self._downloaders[downloader_type.value] = downloader
def get_downloader(self, downloader_type):
downloader = self._downloaders.get(downloader_type)
if not downloader:
raise ValueError(f"{downloader_type} not implemented")
return downloader()
@classmethod
@abstractmethod
def download(cls, source, destination):
"""Returns url to downloaded addon zip file.
Args:
source (dict): {type:"http", "url":"https://} ...}
destination (str): local folder to unzip
Returns:
(str) local path to addon zip file
"""
pass
@classmethod
def check_hash(cls, addon_path, addon_hash):
"""Compares 'hash' of downloaded 'addon_url' file.
Args:
addon_path (str): local path to addon zip file
addon_hash (str): sha256 hash of zip file
Raises:
ValueError if hashes doesn't match
"""
if not os.path.exists(addon_path):
raise ValueError(f"{addon_path} doesn't exist.")
if not RemoteFileHandler.check_integrity(addon_path,
addon_hash,
hash_type="sha256"):
raise ValueError(f"{addon_path} doesn't match expected hash.")
@classmethod
def unzip(cls, addon_zip_path, destination):
"""Unzips local 'addon_zip_path' to 'destination'.
Args:
addon_zip_path (str): local path to addon zip file
destination (str): local folder to unzip
"""
RemoteFileHandler.unzip(addon_zip_path, destination)
os.remove(addon_zip_path)
@classmethod
def remove(cls, addon_url):
pass
class OSAddonDownloader(AddonDownloader):
@classmethod
def download(cls, source, destination):
# OS doesnt need to download, unzip directly
addon_url = source["path"].get(platform.system().lower())
if not os.path.exists(addon_url):
raise ValueError("{} is not accessible".format(addon_url))
return addon_url
class HTTPAddonDownloader(AddonDownloader):
CHUNK_SIZE = 100000
@classmethod
def download(cls, source, destination):
source_url = source["url"]
cls.log.debug(f"Downloading {source_url} to {destination}")
file_name = os.path.basename(destination)
_, ext = os.path.splitext(file_name)
if (ext.replace(".", '') not
in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)):
file_name += ".zip"
RemoteFileHandler.download_url(source_url,
destination,
filename=file_name)
return os.path.join(destination, file_name)
def get_addons_info(server_endpoint):
"""Returns list of addon information from Server"""
# TODO temp
# addon_info = AddonInfo(
# **{"name": "openpype_slack",
# "version": "1.0.0",
# "addon_url": "c:/projects/openpype_slack_1.0.0.zip",
# "type": UrlType.FILESYSTEM,
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
#
# http_addon = AddonInfo(
# **{"name": "openpype_slack",
# "version": "1.0.0",
# "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa
# "type": UrlType.HTTP,
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
response = requests.get(server_endpoint)
if not response.ok:
raise Exception(response.text)
addons_info = []
for addon in response.json():
addons_info.append(AddonInfo(**addon))
return addons_info
def update_addon_state(addon_infos, destination_folder, factory,
log=None):
"""Loops through all 'addon_infos', compares local version, unzips.
Loops through server provided list of dictionaries with information about
available addons. Looks if each addon is already present and deployed.
If isn't, addon zip gets downloaded and unzipped into 'destination_folder'.
Args:
addon_infos (list of AddonInfo)
destination_folder (str): local path
factory (AddonDownloader): factory to get appropriate downloader per
addon type
log (logging.Logger)
Returns:
(dict): {"addon_full_name": UpdateState.value
(eg. "exists"|"updated"|"failed")
"""
if not log:
log = logging.getLogger(__name__)
download_states = {}
for addon in addon_infos:
full_name = "{}_{}".format(addon.name, addon.version)
addon_dest = os.path.join(destination_folder, full_name)
if os.path.isdir(addon_dest):
log.debug(f"Addon version folder {addon_dest} already exists.")
download_states[full_name] = UpdateState.EXISTS.value
continue
for source in addon.sources:
download_states[full_name] = UpdateState.FAILED.value
try:
downloader = factory.get_downloader(source.type)
zip_file_path = downloader.download(attr.asdict(source),
addon_dest)
downloader.check_hash(zip_file_path, addon.hash)
downloader.unzip(zip_file_path, addon_dest)
download_states[full_name] = UpdateState.UPDATED.value
break
except Exception:
log.warning(f"Error happened during updating {addon.name}",
exc_info=True)
if os.path.isdir(addon_dest):
log.debug(f"Cleaning {addon_dest}")
shutil.rmtree(addon_dest)
return download_states
def check_addons(server_endpoint, addon_folder, downloaders):
"""Main entry point to compare existing addons with those on server.
Args:
server_endpoint (str): url to v4 server endpoint
addon_folder (str): local dir path for addons
downloaders (AddonDownloader): factory of downloaders
Raises:
(RuntimeError) if any addon failed update
"""
addons_info = get_addons_info(server_endpoint)
result = update_addon_state(addons_info,
addon_folder,
downloaders)
if UpdateState.FAILED.value in result.values():
raise RuntimeError(f"Unable to update some addons {result}")
def cli(*args):
raise NotImplementedError

View file

@ -0,0 +1,80 @@
import attr
from enum import Enum
class UrlType(Enum):
HTTP = "http"
GIT = "git"
FILESYSTEM = "filesystem"
@attr.s
class MultiPlatformPath(object):
windows = attr.ib(default=None)
linux = attr.ib(default=None)
darwin = attr.ib(default=None)
@attr.s
class AddonSource(object):
type = attr.ib()
@attr.s
class LocalAddonSource(AddonSource):
path = attr.ib(default=attr.Factory(MultiPlatformPath))
@attr.s
class WebAddonSource(AddonSource):
url = attr.ib(default=None)
@attr.s
class VersionData(object):
version_data = attr.ib(default=None)
@attr.s
class AddonInfo(object):
"""Object matching json payload from Server"""
name = attr.ib()
version = attr.ib()
title = attr.ib(default=None)
sources = attr.ib(default=attr.Factory(dict))
hash = attr.ib(default=None)
description = attr.ib(default=None)
license = attr.ib(default=None)
authors = attr.ib(default=None)
@classmethod
def from_dict(cls, data):
sources = []
production_version = data.get("productionVersion")
if not production_version:
return
# server payload contains info about all versions
# active addon must have 'productionVersion' and matching version info
version_data = data.get("versions", {})[production_version]
for source in version_data.get("clientSourceInfo", []):
if source.get("type") == UrlType.FILESYSTEM.value:
source_addon = LocalAddonSource(type=source["type"],
path=source["path"])
if source.get("type") == UrlType.HTTP.value:
source_addon = WebAddonSource(type=source["type"],
url=source["url"])
sources.append(source_addon)
return cls(name=data.get("name"),
version=production_version,
sources=sources,
hash=data.get("hash"),
description=data.get("description"),
title=data.get("title"),
license=data.get("license"),
authors=data.get("authors"))

View file

@ -21,7 +21,7 @@ class RemoteFileHandler:
'tar.gz', 'tar.xz', 'tar.bz2']
@staticmethod
def calculate_md5(fpath, chunk_size):
def calculate_md5(fpath, chunk_size=10000):
md5 = hashlib.md5()
with open(fpath, 'rb') as f:
for chunk in iter(lambda: f.read(chunk_size), b''):
@ -33,17 +33,45 @@ class RemoteFileHandler:
return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs)
@staticmethod
def check_integrity(fpath, md5=None):
def calculate_sha256(fpath):
"""Calculate sha256 for content of the file.
Args:
fpath (str): Path to file.
Returns:
str: hex encoded sha256
"""
h = hashlib.sha256()
b = bytearray(128 * 1024)
mv = memoryview(b)
with open(fpath, 'rb', buffering=0) as f:
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
@staticmethod
def check_sha256(fpath, sha256, **kwargs):
return sha256 == RemoteFileHandler.calculate_sha256(fpath, **kwargs)
@staticmethod
def check_integrity(fpath, hash_value=None, hash_type=None):
if not os.path.isfile(fpath):
return False
if md5 is None:
if hash_value is None:
return True
return RemoteFileHandler.check_md5(fpath, md5)
if not hash_type:
raise ValueError("Provide hash type, md5 or sha256")
if hash_type == 'md5':
return RemoteFileHandler.check_md5(fpath, hash_value)
if hash_type == "sha256":
return RemoteFileHandler.check_sha256(fpath, hash_value)
@staticmethod
def download_url(
url, root, filename=None,
md5=None, max_redirect_hops=3
sha256=None, max_redirect_hops=3
):
"""Download a file from a url and place it in root.
Args:
@ -51,7 +79,7 @@ class RemoteFileHandler:
root (str): Directory to place downloaded file in
filename (str, optional): Name to save the file under.
If None, use the basename of the URL
md5 (str, optional): MD5 checksum of the download.
sha256 (str, optional): sha256 checksum of the download.
If None, do not check
max_redirect_hops (int, optional): Maximum number of redirect
hops allowed
@ -64,7 +92,8 @@ class RemoteFileHandler:
os.makedirs(root, exist_ok=True)
# check if file is already present locally
if RemoteFileHandler.check_integrity(fpath, md5):
if RemoteFileHandler.check_integrity(fpath,
sha256, hash_type="sha256"):
print('Using downloaded and verified file: ' + fpath)
return
@ -76,7 +105,7 @@ class RemoteFileHandler:
file_id = RemoteFileHandler._get_google_drive_file_id(url)
if file_id is not None:
return RemoteFileHandler.download_file_from_google_drive(
file_id, root, filename, md5)
file_id, root, filename, sha256)
# download the file
try:
@ -92,20 +121,21 @@ class RemoteFileHandler:
raise e
# check integrity of downloaded file
if not RemoteFileHandler.check_integrity(fpath, md5):
if not RemoteFileHandler.check_integrity(fpath,
sha256, hash_type="sha256"):
raise RuntimeError("File not found or corrupted.")
@staticmethod
def download_file_from_google_drive(file_id, root,
filename=None,
md5=None):
sha256=None):
"""Download a Google Drive file from and place it in root.
Args:
file_id (str): id of file to be downloaded
root (str): Directory to place downloaded file in
filename (str, optional): Name to save the file under.
If None, use the id of the file.
md5 (str, optional): MD5 checksum of the download.
sha256 (str, optional): sha256 checksum of the download.
If None, do not check
"""
# Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa
@ -119,8 +149,8 @@ class RemoteFileHandler:
os.makedirs(root, exist_ok=True)
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath,
md5):
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(
fpath, sha256, hash_type="sha256"):
print('Using downloaded and verified file: ' + fpath)
else:
session = requests.Session()

View file

@ -0,0 +1,167 @@
import pytest
import attr
import tempfile
from common.openpype_common.distribution.addon_distribution import (
AddonDownloader,
OSAddonDownloader,
HTTPAddonDownloader,
AddonInfo,
update_addon_state,
UpdateState
)
from common.openpype_common.distribution.addon_info import UrlType
@pytest.fixture
def addon_downloader():
addon_downloader = AddonDownloader()
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader)
yield addon_downloader
@pytest.fixture
def http_downloader(addon_downloader):
yield addon_downloader.get_downloader(UrlType.HTTP.value)
@pytest.fixture
def temp_folder():
yield tempfile.mkdtemp()
@pytest.fixture
def sample_addon_info():
addon_info = {
"versions": {
"1.0.0": {
"clientPyproject": {
"tool": {
"poetry": {
"dependencies": {
"nxtools": "^1.6",
"orjson": "^3.6.7",
"typer": "^0.4.1",
"email-validator": "^1.1.3",
"python": "^3.10",
"fastapi": "^0.73.0"
}
}
}
},
"hasSettings": True,
"clientSourceInfo": [
{
"type": "http",
"url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa
},
{
"type": "filesystem",
"path": {
"windows": ["P:/sources/some_file.zip",
"W:/sources/some_file.zip"], # noqa
"linux": ["/mnt/srv/sources/some_file.zip"],
"darwin": ["/Volumes/srv/sources/some_file.zip"]
}
}
],
"frontendScopes": {
"project": {
"sidebar": "hierarchy"
}
}
}
},
"description": "",
"title": "Slack addon",
"name": "openpype_slack",
"productionVersion": "1.0.0",
"hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa
}
yield addon_info
def test_register(printer):
addon_downloader = AddonDownloader()
assert len(addon_downloader._downloaders) == 0, "Contains registered"
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
assert len(addon_downloader._downloaders) == 1, "Should contain one"
def test_get_downloader(printer, addon_downloader):
assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa
with pytest.raises(ValueError):
addon_downloader.get_downloader("unknown"), "Shouldn't find"
def test_addon_info(printer, sample_addon_info):
"""Tests parsing of expected payload from v4 server into AadonInfo."""
valid_minimum = {
"name": "openpype_slack",
"productionVersion": "1.0.0",
"versions": {
"1.0.0": {
"clientSourceInfo": [
{
"type": "filesystem",
"path": {
"windows": [
"P:/sources/some_file.zip",
"W:/sources/some_file.zip"],
"linux": [
"/mnt/srv/sources/some_file.zip"],
"darwin": [
"/Volumes/srv/sources/some_file.zip"] # noqa
}
}
]
}
}
}
assert AddonInfo.from_dict(valid_minimum), "Missing required fields"
valid_minimum["versions"].pop("1.0.0")
with pytest.raises(KeyError):
assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa
valid_minimum.pop("productionVersion")
assert not AddonInfo.from_dict(
valid_minimum), "none if not productionVersion" # noqa
addon = AddonInfo.from_dict(sample_addon_info)
assert addon, "Should be created"
assert addon.name == "openpype_slack", "Incorrect name"
assert addon.version == "1.0.0", "Incorrect version"
with pytest.raises(TypeError):
assert addon["name"], "Dict approach not implemented"
addon_as_dict = attr.asdict(addon)
assert addon_as_dict["name"], "Dict approach should work"
def test_update_addon_state(printer, sample_addon_info,
temp_folder, addon_downloader):
"""Tests possible cases of addon update."""
addon_info = AddonInfo.from_dict(sample_addon_info)
orig_hash = addon_info.hash
addon_info.hash = "brokenhash"
result = update_addon_state([addon_info], temp_folder, addon_downloader)
assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \
"Update should failed because of wrong hash"
addon_info.hash = orig_hash
result = update_addon_state([addon_info], temp_folder, addon_downloader)
assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \
"Addon should have been updated"
result = update_addon_state([addon_info], temp_folder, addon_downloader)
assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \
"Addon should already exist"

View file

@ -63,7 +63,8 @@ class OpenPypeVersion(semver.VersionInfo):
"""
staging = False
path = None
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") # noqa: E501
# this should match any string complying with https://semver.org/
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?") # noqa: E501
_installed_version = None
def __init__(self, *args, **kwargs):
@ -122,7 +123,7 @@ class OpenPypeVersion(semver.VersionInfo):
if self.staging:
if kwargs.get("build"):
if "staging" not in kwargs.get("build"):
kwargs["build"] = "{}-staging".format(kwargs.get("build"))
kwargs["build"] = f"{kwargs.get('build')}-staging"
else:
kwargs["build"] = "staging"
@ -136,8 +137,7 @@ class OpenPypeVersion(semver.VersionInfo):
return bool(result and self.staging == other.staging)
def __repr__(self):
return "<{}: {} - path={}>".format(
self.__class__.__name__, str(self), self.path)
return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>"
def __lt__(self, other: OpenPypeVersion):
result = super().__lt__(other)
@ -212,6 +212,8 @@ class OpenPypeVersion(semver.VersionInfo):
OpenPypeVersion: of detected or None.
"""
# strip .zip ext if present
string = re.sub(r"\.zip$", "", string, flags=re.IGNORECASE)
m = re.search(OpenPypeVersion._VERSION_REGEX, string)
if not m:
return None
@ -232,10 +234,7 @@ class OpenPypeVersion(semver.VersionInfo):
return openpype_version
def __hash__(self):
if self.path:
return hash(self.path)
else:
return hash(str(self))
return hash(self.path) if self.path else hash(str(self))
@staticmethod
def is_version_in_dir(
@ -384,7 +383,8 @@ class OpenPypeVersion(semver.VersionInfo):
@classmethod
def get_local_versions(
cls, production: bool = None, staging: bool = None
cls, production: bool = None,
staging: bool = None
) -> List:
"""Get all versions available on this machine.
@ -394,6 +394,10 @@ class OpenPypeVersion(semver.VersionInfo):
Args:
production (bool): Return production versions.
staging (bool): Return staging versions.
Returns:
list: of compatible versions available on the machine.
"""
# Return all local versions if arguments are set to None
if production is None and staging is None:
@ -410,10 +414,10 @@ class OpenPypeVersion(semver.VersionInfo):
if not production and not staging:
return []
# DEPRECATED: backwards compatible way to look for versions in root
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
versions = OpenPypeVersion.get_versions_from_directory(
dir_to_search
)
versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
filtered_versions = []
for version in versions:
if version.is_staging():
@ -425,7 +429,8 @@ class OpenPypeVersion(semver.VersionInfo):
@classmethod
def get_remote_versions(
cls, production: bool = None, staging: bool = None
cls, production: bool = None,
staging: bool = None
) -> List:
"""Get all versions available in OpenPype Path.
@ -435,6 +440,7 @@ class OpenPypeVersion(semver.VersionInfo):
Args:
production (bool): Return production versions.
staging (bool): Return staging versions.
"""
# Return all local versions if arguments are set to None
if production is None and staging is None:
@ -469,6 +475,7 @@ class OpenPypeVersion(semver.VersionInfo):
return []
versions = cls.get_versions_from_directory(dir_to_search)
filtered_versions = []
for version in versions:
if version.is_staging():
@ -479,7 +486,8 @@ class OpenPypeVersion(semver.VersionInfo):
return list(sorted(set(filtered_versions)))
@staticmethod
def get_versions_from_directory(openpype_dir: Path) -> List:
def get_versions_from_directory(
openpype_dir: Path) -> List:
"""Get all detected OpenPype versions in directory.
Args:
@ -492,15 +500,22 @@ class OpenPypeVersion(semver.VersionInfo):
ValueError: if invalid path is specified.
"""
openpype_versions = []
if not openpype_dir.exists() and not openpype_dir.is_dir():
raise ValueError("specified directory is invalid")
return openpype_versions
_openpype_versions = []
# iterate over directory in first level and find all that might
# contain OpenPype.
for item in openpype_dir.iterdir():
# if the item is directory with major.minor version, dive deeper
# if file, strip extension, in case of dir not.
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
_versions = OpenPypeVersion.get_versions_from_directory(
item)
if _versions:
openpype_versions += _versions
# if file exists, strip extension, in case of dir don't.
name = item.name if item.is_dir() else item.stem
result = OpenPypeVersion.version_in_str(name)
@ -519,9 +534,9 @@ class OpenPypeVersion(semver.VersionInfo):
continue
detected_version.path = item
_openpype_versions.append(detected_version)
openpype_versions.append(detected_version)
return sorted(_openpype_versions)
return sorted(openpype_versions)
@staticmethod
def get_installed_version_str() -> str:
@ -550,13 +565,13 @@ class OpenPypeVersion(semver.VersionInfo):
staging: bool = False,
local: bool = None,
remote: bool = None
) -> OpenPypeVersion:
"""Get latest available version.
) -> Union[OpenPypeVersion, None]:
"""Get the latest available version.
The version does not contain information about path and source.
This is utility version to get latest version from all found. Build
version is not listed if staging is enabled.
This is utility version to get the latest version from all found.
Build version is not listed if staging is enabled.
Arguments 'local' and 'remote' define if local and remote repository
versions are used. All versions are used if both are not set (or set
@ -568,6 +583,10 @@ class OpenPypeVersion(semver.VersionInfo):
staging (bool, optional): List staging versions if True.
local (bool, optional): List local versions if True.
remote (bool, optional): List remote versions if True.
Returns:
Latest OpenPypeVersion or None
"""
if local is None and remote is None:
local = True
@ -621,6 +640,21 @@ class OpenPypeVersion(semver.VersionInfo):
return None
return OpenPypeVersion(version=result)
def is_compatible(self, version: OpenPypeVersion):
"""Test build compatibility.
This will simply compare major and minor versions (ignoring patch
and the rest).
Args:
version (OpenPypeVersion): Version to check compatibility with.
Returns:
bool: if the version is compatible
"""
return self.major == version.major and self.minor == version.minor
class BootstrapRepos:
"""Class for bootstrapping local OpenPype installation.
@ -714,9 +748,9 @@ class BootstrapRepos:
self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]:
"""Copy zip created from OpenPype repositories to user data dir.
This detect OpenPype version either in local "live" OpenPype
This detects OpenPype version either in local "live" OpenPype
repository or in user provided path. Then it will zip it in temporary
directory and finally it will move it to destination which is user
directory, and finally it will move it to destination which is user
data directory. Existing files will be replaced.
Args:
@ -727,7 +761,7 @@ class BootstrapRepos:
"""
# if repo dir is not set, we detect local "live" OpenPype repository
# version and use it as a source. Otherwise repo_dir is user
# version and use it as a source. Otherwise, repo_dir is user
# entered location.
if repo_dir:
version = self.get_version(repo_dir)
@ -741,8 +775,9 @@ class BootstrapRepos:
return
# create destination directory
if not self.data_dir.exists():
self.data_dir.mkdir(parents=True)
destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa
if not destination.exists():
destination.mkdir(parents=True)
# create zip inside temporary directory.
with tempfile.TemporaryDirectory() as temp_dir:
@ -770,7 +805,9 @@ class BootstrapRepos:
Path to moved zip on success.
"""
destination = self.data_dir / zip_file.name
version = OpenPypeVersion.version_in_str(zip_file.name)
destination_dir = self.data_dir / f"{version.major}.{version.minor}"
destination = destination_dir / zip_file.name
if destination.exists():
self._print(
@ -781,8 +818,15 @@ class BootstrapRepos:
except Exception as e:
self._print(str(e), LOG_ERROR, exc_info=True)
return None
if not destination_dir.exists():
destination_dir.mkdir(parents=True)
elif not destination_dir.is_dir():
self._print(
"Destination exists but is not directory.", LOG_ERROR)
return None
try:
shutil.move(zip_file.as_posix(), self.data_dir.as_posix())
shutil.move(zip_file.as_posix(), destination_dir.as_posix())
except shutil.Error as e:
self._print(str(e), LOG_ERROR, exc_info=True)
return None
@ -995,6 +1039,16 @@ class BootstrapRepos:
@staticmethod
def _validate_dir(path: Path) -> tuple:
"""Validate checksums in a given path.
Args:
path (Path): path to folder to validate.
Returns:
tuple(bool, str): returns status and reason as a bool
and str in a tuple.
"""
checksums_file = Path(path / "checksums")
if not checksums_file.exists():
# FIXME: This should be set to False sometimes in the future
@ -1076,11 +1130,24 @@ class BootstrapRepos:
sys.path.insert(0, directory.as_posix())
@staticmethod
def find_openpype_version(version, staging):
def find_openpype_version(
version: Union[str, OpenPypeVersion],
staging: bool
) -> Union[OpenPypeVersion, None]:
"""Find location of specified OpenPype version.
Args:
version (Union[str, OpenPypeVersion): Version to find.
staging (bool): Filter staging versions.
Returns:
requested OpenPypeVersion.
"""
installed_version = OpenPypeVersion.get_installed_version()
if isinstance(version, str):
version = OpenPypeVersion(version=version)
installed_version = OpenPypeVersion.get_installed_version()
if installed_version == version:
return installed_version
@ -1107,7 +1174,18 @@ class BootstrapRepos:
return None
@staticmethod
def find_latest_openpype_version(staging):
def find_latest_openpype_version(
staging: bool
) -> Union[OpenPypeVersion, None]:
"""Find the latest available OpenPype version in all location.
Args:
staging (bool): True to look for staging versions.
Returns:
Latest OpenPype version on None if nothing was found.
"""
installed_version = OpenPypeVersion.get_installed_version()
local_versions = OpenPypeVersion.get_local_versions(
staging=staging
@ -1138,7 +1216,8 @@ class BootstrapRepos:
self,
openpype_path: Union[Path, str] = None,
staging: bool = False,
include_zips: bool = False) -> Union[List[OpenPypeVersion], None]:
include_zips: bool = False
) -> Union[List[OpenPypeVersion], None]:
"""Get ordered dict of detected OpenPype version.
Resolution order for OpenPype is following:
@ -1172,30 +1251,38 @@ class BootstrapRepos:
("Finding OpenPype in non-filesystem locations is"
" not implemented yet."))
dir_to_search = self.data_dir
user_versions = self.get_openpype_versions(self.data_dir, staging)
# if we have openpype_path specified, search only there.
# if checks bellow for OPENPYPE_PATH and registry fails, use data_dir
# DEPRECATED: lookup in root of this folder is deprecated in favour
# of major.minor sub-folders.
dirs_to_search = [self.data_dir]
if openpype_path:
dir_to_search = openpype_path
dirs_to_search = [openpype_path]
elif os.getenv("OPENPYPE_PATH") \
and Path(os.getenv("OPENPYPE_PATH")).exists():
# first try OPENPYPE_PATH and if that is not available,
# try registry.
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
else:
if os.getenv("OPENPYPE_PATH"):
if Path(os.getenv("OPENPYPE_PATH")).exists():
dir_to_search = Path(os.getenv("OPENPYPE_PATH"))
else:
try:
registry_dir = Path(
str(self.registry.get_item("openPypePath")))
if registry_dir.exists():
dir_to_search = registry_dir
try:
registry_dir = Path(
str(self.registry.get_item("openPypePath")))
if registry_dir.exists():
dirs_to_search = [registry_dir]
except ValueError:
# nothing found in registry, we'll use data dir
pass
except ValueError:
# nothing found in registry, we'll use data dir
pass
openpype_versions = self.get_openpype_versions(dir_to_search, staging)
openpype_versions += user_versions
openpype_versions = []
for dir_to_search in dirs_to_search:
try:
openpype_versions += self.get_openpype_versions(
dir_to_search, staging)
except ValueError:
# location is invalid, skip it
pass
# remove zip file version if needed.
if not include_zips:
openpype_versions = [
v for v in openpype_versions if v.path.suffix != ".zip"
@ -1308,9 +1395,8 @@ class BootstrapRepos:
raise ValueError(
f"version {version} is not associated with any file")
destination = self.data_dir / version.path.stem
if destination.exists():
assert destination.is_dir()
destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa
if destination.exists() and destination.is_dir():
try:
shutil.rmtree(destination)
except OSError as e:
@ -1379,7 +1465,7 @@ class BootstrapRepos:
else:
dir_name = openpype_version.path.stem
destination = self.data_dir / dir_name
destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa
# test if destination directory already exist, if so lets delete it.
if destination.exists() and force:
@ -1557,9 +1643,10 @@ class BootstrapRepos:
return False
return True
def get_openpype_versions(self,
openpype_dir: Path,
staging: bool = False) -> list:
def get_openpype_versions(
self,
openpype_dir: Path,
staging: bool = False) -> list:
"""Get all detected OpenPype versions in directory.
Args:
@ -1574,14 +1661,20 @@ class BootstrapRepos:
"""
if not openpype_dir.exists() and not openpype_dir.is_dir():
raise ValueError("specified directory is invalid")
raise ValueError(f"specified directory {openpype_dir} is invalid")
_openpype_versions = []
openpype_versions = []
# iterate over directory in first level and find all that might
# contain OpenPype.
for item in openpype_dir.iterdir():
# if the item is directory with major.minor version, dive deeper
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
_versions = self.get_openpype_versions(
item, staging=staging)
if _versions:
openpype_versions += _versions
# if file, strip extension, in case of dir not.
# if it is file, strip extension, in case of dir don't.
name = item.name if item.is_dir() else item.stem
result = OpenPypeVersion.version_in_str(name)
@ -1601,12 +1694,12 @@ class BootstrapRepos:
detected_version.path = item
if staging and detected_version.is_staging():
_openpype_versions.append(detected_version)
openpype_versions.append(detected_version)
if not staging and not detected_version.is_staging():
_openpype_versions.append(detected_version)
openpype_versions.append(detected_version)
return sorted(_openpype_versions)
return sorted(openpype_versions)
class OpenPypeVersionExists(Exception):

View file

@ -388,8 +388,11 @@ class InstallDialog(QtWidgets.QDialog):
install_thread.start()
def _installation_finished(self):
# TODO we should find out why status can be set to 'None'?
# - 'InstallThread.run' should handle all cases so not sure where
# that come from
status = self._install_thread.result()
if status >= 0:
if status is not None and status >= 0:
self._update_progress(100)
QtWidgets.QApplication.processEvents()
self.done(3)

View file

@ -62,7 +62,7 @@ class InstallThread(QThread):
progress_callback=self.set_progress, message=self.message)
local_version = OpenPypeVersion.get_installed_version_str()
# if user did entered nothing, we install OpenPype from local version.
# if user did enter nothing, we install OpenPype from local version.
# zip content of `repos`, copy it to user data dir and append
# version to it.
if not self._path:
@ -93,6 +93,23 @@ class InstallThread(QThread):
detected = bs.find_openpype(include_zips=True)
if detected:
if not OpenPypeVersion.get_installed_version().is_compatible(
detected[-1]):
self.message.emit((
f"Latest detected version {detected[-1]} "
"is not compatible with the currently running "
f"{local_version}"
), True)
self.message.emit((
"Filtering detected versions to compatible ones..."
), False)
detected = [
version for version in detected
if version.is_compatible(
OpenPypeVersion.get_installed_version())
]
if OpenPypeVersion(
version=local_version, path=Path()) < detected[-1]:
self.message.emit((

View file

@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception):
pass
class OpenPypeVersionIncompatible(Exception):
"""OpenPype version is not compatible with the installed one (build)."""
pass
def should_add_certificate_path_to_mongo_url(mongo_url):
"""Check if should add ca certificate to mongo url.

View file

@ -18,7 +18,8 @@ AppPublisher=Orbi Tools s.r.o
AppPublisherURL=http://pype.club
AppSupportURL=http://pype.club
AppUpdatesURL=http://pype.club
DefaultDirName={autopf}\{#MyAppName}
DefaultDirName={autopf}\{#MyAppName}\{#AppVer}
UsePreviousAppDir=no
DisableProgramGroupPage=yes
OutputBaseFilename={#MyAppName}-{#AppVer}-install
AllowCancelDuringInstall=yes
@ -27,7 +28,7 @@ AllowCancelDuringInstall=yes
PrivilegesRequiredOverridesAllowed=dialog
SetupIconFile=igniter\openpype.ico
OutputDir=build\
Compression=lzma
Compression=lzma2
SolidCompression=yes
WizardStyle=modern
@ -37,6 +38,11 @@ Name: "english"; MessagesFile: "compiler:Default.isl"
[Tasks]
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
[InstallDelete]
; clean everything in previous installation folder
Type: filesandordirs; Name: "{app}\*"
[Files]
Source: "build\{#build}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
; NOTE: Don't use "Flags: ignoreversion" on any shared system files

View file

@ -1,42 +1,75 @@
# absolute_import is needed to counter the `module has no cmds error` in Maya
from __future__ import absolute_import
import warnings
import functools
import pyblish.api
def get_errored_instances_from_context(context):
instances = list()
for result in context.data["results"]:
if result["instance"] is None:
# When instance is None we are on the "context" result
continue
if result["error"]:
instances.append(result["instance"])
return instances
class ActionDeprecatedWarning(DeprecationWarning):
pass
def get_errored_plugins_from_data(context):
"""Get all failed validation plugins
Args:
context (object):
Returns:
list of plugins which failed during validation
def deprecated(new_destination):
"""Mark functions as deprecated.
It will result in a warning being emitted when the function is used.
"""
plugins = list()
results = context.data.get("results", [])
for result in results:
if result["success"] is True:
continue
plugins.append(result["plugin"])
func = None
if callable(new_destination):
func = new_destination
new_destination = None
return plugins
def _decorator(decorated_func):
if new_destination is None:
warning_message = (
" Please check content of deprecated function to figure out"
" possible replacement."
)
else:
warning_message = " Please replace your usage with '{}'.".format(
new_destination
)
@functools.wraps(decorated_func)
def wrapper(*args, **kwargs):
warnings.simplefilter("always", ActionDeprecatedWarning)
warnings.warn(
(
"Call to deprecated function '{}'"
"\nFunction was moved or removed.{}"
).format(decorated_func.__name__, warning_message),
category=ActionDeprecatedWarning,
stacklevel=4
)
return decorated_func(*args, **kwargs)
return wrapper
if func is None:
return _decorator
return _decorator(func)
@deprecated("openpype.pipeline.publish.get_errored_instances_from_context")
def get_errored_instances_from_context(context):
"""
Deprecated:
Since 3.14.* will be removed in 3.16.* or later.
"""
from openpype.pipeline.publish import get_errored_instances_from_context
return get_errored_instances_from_context(context)
@deprecated("openpype.pipeline.publish.get_errored_plugins_from_context")
def get_errored_plugins_from_data(context):
"""
Deprecated:
Since 3.14.* will be removed in 3.16.* or later.
"""
from openpype.pipeline.publish import get_errored_plugins_from_context
return get_errored_plugins_from_context(context)
class RepairAction(pyblish.api.Action):
@ -45,6 +78,13 @@ class RepairAction(pyblish.api.Action):
To process the repairing this requires a static `repair(instance)` method
is available on the plugin.
Deprecated:
'RepairAction' and 'RepairContextAction' were moved to
'openpype.pipeline.publish' please change you imports.
There is no "reasonable" way hot mark these classes as deprecated
to show warning of wrong import. Deprecated since 3.14.* will be
removed in 3.16.*
"""
label = "Repair"
on = "failed" # This action is only available on a failed plug-in
@ -71,6 +111,13 @@ class RepairContextAction(pyblish.api.Action):
To process the repairing this requires a static `repair(instance)` method
is available on the plugin.
Deprecated:
'RepairAction' and 'RepairContextAction' were moved to
'openpype.pipeline.publish' please change you imports.
There is no "reasonable" way hot mark these classes as deprecated
to show warning of wrong import. Deprecated since 3.14.* will be
removed in 3.16.*
"""
label = "Repair"
on = "failed" # This action is only available on a failed plug-in

View file

@ -9,13 +9,12 @@ from .settings import (
)
from .lib import (
PypeLogger,
Logger,
Anatomy,
config,
execute,
run_subprocess,
version_up,
get_asset,
get_hierarchy,
get_workdir_data,
get_version_from_path,
get_last_version_from_path,
@ -49,7 +48,6 @@ from .plugin import (
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder,
ValidationException
)
# temporary fix, might
@ -59,8 +57,6 @@ from .action import (
RepairContextAction
)
# for backward compatibility with Pype 2
Logger = PypeLogger
__all__ = [
"get_system_settings",
@ -75,7 +71,6 @@ __all__ = [
"PypeLogger",
"Logger",
"Anatomy",
"config",
"execute",
"get_default_components",
"ApplicationManager",
@ -96,12 +91,9 @@ __all__ = [
"RepairAction",
"RepairContextAction",
"ValidationException",
# get contextual data
"version_up",
"get_asset",
"get_hierarchy",
"get_workdir_data",
"get_version_from_path",
"get_last_version_from_path",

View file

@ -2,7 +2,7 @@
"""Package for handling pype command line arguments."""
import os
import sys
import code
import click
# import sys
@ -29,8 +29,14 @@ def main(ctx):
It wraps different commands together.
"""
if ctx.invoked_subcommand is None:
ctx.invoke(tray)
# Print help if headless mode is used
if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1":
print(ctx.get_help())
sys.exit(0)
else:
ctx.invoke(tray)
@main.command()
@ -40,18 +46,6 @@ def settings(dev):
PypeCommands().launch_settings_gui(dev)
@main.command()
def standalonepublisher():
"""Show Pype Standalone publisher UI."""
PypeCommands().launch_standalone_publisher()
@main.command()
def traypublisher():
"""Show new OpenPype Standalone publisher UI."""
PypeCommands().launch_traypublisher()
@main.command()
def tray():
"""Launch pype tray.
@ -289,6 +283,13 @@ def projectmanager():
PypeCommands().launch_project_manager()
@main.command(context_settings={"ignore_unknown_options": True})
def publish_report_viewer():
from openpype.tools.publisher.publish_report_viewer import main
sys.exit(main())
@main.command()
@click.argument("output_path")
@click.option("--project", help="Define project context")
@ -424,3 +425,45 @@ def pack_project(project, dirpath):
def unpack_project(zipfile, root):
"""Create a package of project with all files and database dump."""
PypeCommands().unpack_project(zipfile, root)
@main.command()
def interactive():
"""Interative (Python like) console.
Helpfull command not only for development to directly work with python
interpreter.
Warning:
Executable 'openpype_gui' on windows won't work.
"""
from openpype.version import __version__
banner = "OpenPype {}\nPython {} on {}".format(
__version__, sys.version, sys.platform
)
code.interact(banner)
@main.command()
@click.option("--build", help="Print only build version",
is_flag=True, default=False)
def version(build):
"""Print OpenPype version."""
from openpype.version import __version__
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
from pathlib import Path
import os
if getattr(sys, 'frozen', False):
local_version = BootstrapRepos.get_version(
Path(os.getenv("OPENPYPE_ROOT")))
else:
local_version = OpenPypeVersion.get_installed_version_str()
if build:
print(local_version)
return
print(f"{__version__} (booted: {local_version})")

View file

@ -1,6 +1,11 @@
from .mongo import (
OpenPypeMongoConnection,
)
from .entities import (
get_projects,
get_project,
get_whole_project,
get_asset_by_id,
get_asset_by_name,
@ -24,20 +29,39 @@ from .entities import (
get_last_version_by_subset_name,
get_output_link_versions,
version_is_latest,
get_representation_by_id,
get_representation_by_name,
get_representations,
get_representation_parents,
get_representations_parents,
get_archived_representations,
get_thumbnail,
get_thumbnails,
get_thumbnail_id_from_source,
get_workfile_info,
)
from .entity_links import (
get_linked_asset_ids,
get_linked_assets,
get_linked_representation_id,
)
from .operations import (
create_project,
)
__all__ = (
"OpenPypeMongoConnection",
"get_projects",
"get_project",
"get_whole_project",
"get_asset_by_id",
"get_asset_by_name",
@ -61,13 +85,24 @@ __all__ = (
"get_last_version_by_subset_name",
"get_output_link_versions",
"version_is_latest",
"get_representation_by_id",
"get_representation_by_name",
"get_representations",
"get_representation_parents",
"get_representations_parents",
"get_archived_representations",
"get_thumbnail",
"get_thumbnails",
"get_thumbnail_id_from_source",
"get_workfile_info",
"get_linked_asset_ids",
"get_linked_assets",
"get_linked_representation_id",
"create_project",
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,241 @@
from .mongo import get_project_connection
from .entities import (
get_assets,
get_asset_by_id,
get_version_by_id,
get_representation_by_id,
convert_id,
)
def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
"""Extract linked asset ids from asset document.
One of asset document or asset id must be passed.
Note:
Asset links now works only from asset to assets.
Args:
asset_doc (dict): Asset document from DB.
Returns:
List[Union[ObjectId, str]]: Asset ids of input links.
"""
output = []
if not asset_doc and not asset_id:
return output
if not asset_doc:
asset_doc = get_asset_by_id(
project_name, asset_id, fields=["data.inputLinks"]
)
input_links = asset_doc["data"].get("inputLinks")
if not input_links:
return output
for item in input_links:
# Backwards compatibility for "_id" key which was replaced with
# "id"
if "_id" in item:
link_id = item["_id"]
else:
link_id = item["id"]
output.append(link_id)
return output
def get_linked_assets(
project_name, asset_doc=None, asset_id=None, fields=None
):
"""Return linked assets based on passed asset document.
One of asset document or asset id must be passed.
Args:
project_name (str): Name of project where to look for queried entities.
asset_doc (Dict[str, Any]): Asset document from database.
asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
asset document.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
Returns:
List[Dict[str, Any]]: Asset documents of input links for passed
asset doc.
"""
if not asset_doc:
if not asset_id:
return []
asset_doc = get_asset_by_id(
project_name,
asset_id,
fields=["data.inputLinks"]
)
if not asset_doc:
return []
link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc)
if not link_ids:
return []
return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
def get_linked_representation_id(
project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
):
"""Returns list of linked ids of particular type (if provided).
One of representation document or representation id must be passed.
Note:
Representation links now works only from representation through version
back to representations.
Args:
project_name (str): Name of project where look for links.
repre_doc (Dict[str, Any]): Representation document.
repre_id (Union[ObjectId, str]): Representation id.
link_type (str): Type of link (e.g. 'reference', ...).
max_depth (int): Limit recursion level. Default: 0
Returns:
List[ObjectId] Linked representation ids.
"""
if repre_doc:
repre_id = repre_doc["_id"]
if repre_id:
repre_id = convert_id(repre_id)
if not repre_id and not repre_doc:
return []
version_id = None
if repre_doc:
version_id = repre_doc.get("parent")
if not version_id:
repre_doc = get_representation_by_id(
project_name, repre_id, fields=["parent"]
)
version_id = repre_doc["parent"]
if not version_id:
return []
version_doc = get_version_by_id(
project_name, version_id, fields=["type", "version_id"]
)
if version_doc["type"] == "hero_version":
version_id = version_doc["version_id"]
if max_depth is None:
max_depth = 0
match = {
"_id": version_id,
# Links are not stored to hero versions at this moment so filter
# is limited to just versions
"type": "version"
}
graph_lookup = {
"from": project_name,
"startWith": "$data.inputLinks.id",
"connectFromField": "data.inputLinks.id",
"connectToField": "_id",
"as": "outputs_recursive",
"depthField": "depth"
}
if max_depth != 0:
# We offset by -1 since 0 basically means no recursion
# but the recursion only happens after the initial lookup
# for outputs.
graph_lookup["maxDepth"] = max_depth - 1
query_pipeline = [
# Match
{"$match": match},
# Recursive graph lookup for inputs
{"$graphLookup": graph_lookup}
]
conn = get_project_connection(project_name)
result = conn.aggregate(query_pipeline)
referenced_version_ids = _process_referenced_pipeline_result(
result, link_type
)
if not referenced_version_ids:
return []
ref_ids = conn.distinct(
"_id",
filter={
"parent": {"$in": list(referenced_version_ids)},
"type": "representation"
}
)
return list(ref_ids)
def _process_referenced_pipeline_result(result, link_type):
"""Filters result from pipeline for particular link_type.
Pipeline cannot use link_type directly in a query.
Returns:
(list)
"""
referenced_version_ids = set()
correctly_linked_ids = set()
for item in result:
input_links = item.get("data", {}).get("inputLinks")
if not input_links:
continue
_filter_input_links(
input_links,
link_type,
correctly_linked_ids
)
# outputs_recursive in random order, sort by depth
outputs_recursive = item.get("outputs_recursive")
if not outputs_recursive:
continue
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
output_links = output.get("data", {}).get("inputLinks")
if not output_links:
continue
# Leaf
if output["_id"] not in correctly_linked_ids:
continue
_filter_input_links(
output_links,
link_type,
correctly_linked_ids
)
referenced_version_ids.add(output["_id"])
return referenced_version_ids
def _filter_input_links(input_links, link_type, correctly_linked_ids):
for input_link in input_links:
if link_type and input_link["type"] != link_type:
continue
link_id = input_link.get("id") or input_link.get("_id")
if link_id is not None:
correctly_linked_ids.add(link_id)

235
openpype/client/mongo.py Normal file
View file

@ -0,0 +1,235 @@
import os
import sys
import time
import logging
import pymongo
import certifi
if sys.version_info[0] == 2:
from urlparse import urlparse, parse_qs
else:
from urllib.parse import urlparse, parse_qs
class MongoEnvNotSet(Exception):
pass
def _decompose_url(url):
"""Decompose mongo url to basic components.
Used for creation of MongoHandler which expect mongo url components as
separated kwargs. Components are at the end not used as we're setting
connection directly this is just a dumb components for MongoHandler
validation pass.
"""
# Use first url from passed url
# - this is because it is possible to pass multiple urls for multiple
# replica sets which would crash on urlparse otherwise
# - please don't use comma in username of password
url = url.split(",")[0]
components = {
"scheme": None,
"host": None,
"port": None,
"username": None,
"password": None,
"auth_db": None
}
result = urlparse(url)
if result.scheme is None:
_url = "mongodb://{}".format(url)
result = urlparse(_url)
components["scheme"] = result.scheme
components["host"] = result.hostname
try:
components["port"] = result.port
except ValueError:
raise RuntimeError("invalid port specified")
components["username"] = result.username
components["password"] = result.password
try:
components["auth_db"] = parse_qs(result.query)['authSource'][0]
except KeyError:
# no auth db provided, mongo will use the one we are connecting to
pass
return components
def get_default_components():
mongo_url = os.environ.get("OPENPYPE_MONGO")
if mongo_url is None:
raise MongoEnvNotSet(
"URL for Mongo logging connection is not set."
)
return _decompose_url(mongo_url)
def should_add_certificate_path_to_mongo_url(mongo_url):
"""Check if should add ca certificate to mongo url.
Since 30.9.2021 cloud mongo requires newer certificates that are not
available on most of workstation. This adds path to certifi certificate
which is valid for it. To add the certificate path url must have scheme
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
"""
parsed = urlparse(mongo_url)
query = parse_qs(parsed.query)
lowered_query_keys = set(key.lower() for key in query.keys())
add_certificate = False
# Check if url 'ssl' or 'tls' are set to 'true'
for key in ("ssl", "tls"):
if key in query and "true" in query["ssl"]:
add_certificate = True
break
# Check if url contains 'mongodb+srv'
if not add_certificate and parsed.scheme == "mongodb+srv":
add_certificate = True
# Check if url does already contain certificate path
if add_certificate and "tlscafile" in lowered_query_keys:
add_certificate = False
return add_certificate
def validate_mongo_connection(mongo_uri):
"""Check if provided mongodb URL is valid.
Args:
mongo_uri (str): URL to validate.
Raises:
ValueError: When port in mongo uri is not valid.
pymongo.errors.InvalidURI: If passed mongo is invalid.
pymongo.errors.ServerSelectionTimeoutError: If connection timeout
passed so probably couldn't connect to mongo server.
"""
client = OpenPypeMongoConnection.create_connection(
mongo_uri, retry_attempts=1
)
client.close()
class OpenPypeMongoConnection:
"""Singleton MongoDB connection.
Keeps MongoDB connections by url.
"""
mongo_clients = {}
log = logging.getLogger("OpenPypeMongoConnection")
@staticmethod
def get_default_mongo_url():
return os.environ["OPENPYPE_MONGO"]
@classmethod
def get_mongo_client(cls, mongo_url=None):
if mongo_url is None:
mongo_url = cls.get_default_mongo_url()
connection = cls.mongo_clients.get(mongo_url)
if connection:
# Naive validation of existing connection
try:
connection.server_info()
with connection.start_session():
pass
except Exception:
connection = None
if not connection:
cls.log.debug("Creating mongo connection to {}".format(mongo_url))
connection = cls.create_connection(mongo_url)
cls.mongo_clients[mongo_url] = connection
return connection
@classmethod
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
parsed = urlparse(mongo_url)
# Force validation of scheme
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
raise pymongo.errors.InvalidURI((
"Invalid URI scheme:"
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
))
if timeout is None:
timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000)
kwargs = {
"serverSelectionTimeoutMS": timeout
}
if should_add_certificate_path_to_mongo_url(mongo_url):
kwargs["ssl_ca_certs"] = certifi.where()
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
if retry_attempts is None:
retry_attempts = 3
elif not retry_attempts:
retry_attempts = 1
last_exc = None
valid = False
t1 = time.time()
for attempt in range(1, retry_attempts + 1):
try:
mongo_client.server_info()
with mongo_client.start_session():
pass
valid = True
break
except Exception as exc:
last_exc = exc
if attempt < retry_attempts:
cls.log.warning(
"Attempt {} failed. Retrying... ".format(attempt)
)
time.sleep(1)
if not valid:
raise last_exc
cls.log.info("Connected to {}, delay {:.3f}s".format(
mongo_url, time.time() - t1
))
return mongo_client
def get_project_database():
db_name = os.environ.get("AVALON_DB") or "avalon"
return OpenPypeMongoConnection.get_mongo_client()[db_name]
def get_project_connection(project_name):
"""Direct access to mongo collection.
We're trying to avoid using direct access to mongo. This should be used
only for Create, Update and Remove operations until there are implemented
api calls for that.
Args:
project_name(str): Project name for which collection should be
returned.
Returns:
pymongo.Collection: Collection realated to passed project.
"""
if not project_name:
raise ValueError("Invalid project name {}".format(str(project_name)))
return get_project_database()[project_name]

39
openpype/client/notes.md Normal file
View file

@ -0,0 +1,39 @@
# Client functionality
## Reason
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
## Queries
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
## Changes
Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data.
### Create
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
### Update
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion.
### Delete
Delete operation need entity id. Entity will be deleted from mongo.
## What (probably) won't be replaced
Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future.
- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data.
- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3.
- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure.
- Code parts that is marked as deprecated in v3 or will be deprecated in v4.
- integrate asset legacy publish plugin - already is legacy kept for safety
- integrate thumbnail - thumbnails will be stored in different way in v4
- input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation".
## Known missing replacements
- change subset group in loader tool
- integrate subset group
- query input links in openpype lib
- create project in openpype lib
- save/create workfile doc in openpype lib
- integrate hero version

View file

@ -0,0 +1,794 @@
import re
import uuid
import copy
import collections
from abc import ABCMeta, abstractmethod, abstractproperty
import six
from bson.objectid import ObjectId
from pymongo import DeleteOne, InsertOne, UpdateOne
from .mongo import get_project_connection
from .entities import get_project
REMOVED_VALUE = object()
PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
PROJECT_NAME_REGEX = re.compile(
"^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS)
)
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
def _create_or_convert_to_mongo_id(mongo_id):
if mongo_id is None:
return ObjectId()
return ObjectId(mongo_id)
def new_project_document(
project_name, project_code, config, data=None, entity_id=None
):
"""Create skeleton data of project document.
Args:
project_name (str): Name of project. Used as identifier of a project.
project_code (str): Shorter version of projet without spaces and
special characters (in most of cases). Should be also considered
as unique name across projects.
config (Dic[str, Any]): Project config consist of roots, templates,
applications and other project Anatomy related data.
data (Dict[str, Any]): Project data with information about it's
attributes (e.g. 'fps' etc.) or integration specific keys.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of project document.
"""
if data is None:
data = {}
data["code"] = project_code
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"name": project_name,
"type": CURRENT_PROJECT_SCHEMA,
"entity_data": data,
"config": config
}
def new_asset_document(
name, project_id, parent_id, parents, data=None, entity_id=None
):
"""Create skeleton data of asset document.
Args:
name (str): Is considered as unique identifier of asset in project.
project_id (Union[str, ObjectId]): Id of project doument.
parent_id (Union[str, ObjectId]): Id of parent asset.
parents (List[str]): List of parent assets names.
data (Dict[str, Any]): Asset document data. Empty dictionary is used
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of asset document.
"""
if data is None:
data = {}
if parent_id is not None:
parent_id = ObjectId(parent_id)
data["visualParent"] = parent_id
data["parents"] = parents
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"type": "asset",
"name": name,
"parent": ObjectId(project_id),
"data": data,
"schema": CURRENT_ASSET_DOC_SCHEMA
}
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
"""Create skeleton data of subset document.
Args:
name (str): Is considered as unique identifier of subset under asset.
family (str): Subset's family.
asset_id (Union[str, ObjectId]): Id of parent asset.
data (Dict[str, Any]): Subset document data. Empty dictionary is used
if not passed. Value of 'family' is used to fill 'family'.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of subset document.
"""
if data is None:
data = {}
data["family"] = family
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"schema": CURRENT_SUBSET_SCHEMA,
"type": "subset",
"name": name,
"data": data,
"parent": asset_id
}
def new_version_doc(version, subset_id, data=None, entity_id=None):
"""Create skeleton data of version document.
Args:
version (int): Is considered as unique identifier of version
under subset.
subset_id (Union[str, ObjectId]): Id of parent subset.
data (Dict[str, Any]): Version document data.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of version document.
"""
if data is None:
data = {}
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"schema": CURRENT_VERSION_SCHEMA,
"type": "version",
"name": int(version),
"parent": subset_id,
"data": data
}
def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None):
"""Create skeleton data of hero version document.
Args:
version_id (ObjectId): Is considered as unique identifier of version
under subset.
subset_id (Union[str, ObjectId]): Id of parent subset.
data (Dict[str, Any]): Version document data.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of version document.
"""
if data is None:
data = {}
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"schema": CURRENT_HERO_VERSION_SCHEMA,
"type": "hero_version",
"version_id": version_id,
"parent": subset_id,
"data": data
}
def new_representation_doc(
name, version_id, context, data=None, entity_id=None
):
"""Create skeleton data of asset document.
Args:
version (int): Is considered as unique identifier of version
under subset.
version_id (Union[str, ObjectId]): Id of parent version.
context (Dict[str, Any]): Representation context used for fill template
of to query.
data (Dict[str, Any]): Representation document data.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of version document.
"""
if data is None:
data = {}
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"schema": CURRENT_REPRESENTATION_SCHEMA,
"type": "representation",
"parent": version_id,
"name": name,
"data": data,
# Imprint shortcut to context for performance reasons.
"context": context
}
def new_thumbnail_doc(data=None, entity_id=None):
"""Create skeleton data of thumbnail document.
Args:
data (Dict[str, Any]): Thumbnail document data.
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
created if not passed.
Returns:
Dict[str, Any]: Skeleton of thumbnail document.
"""
if data is None:
data = {}
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"type": "thumbnail",
"schema": CURRENT_THUMBNAIL_SCHEMA,
"data": data
}
def new_workfile_info_doc(
filename, asset_id, task_name, files, data=None, entity_id=None
):
"""Create skeleton data of workfile info document.
Workfile document is at this moment used primarily for artist notes.
Args:
filename (str): Filename of workfile.
asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
task_name (str): Task under which was workfile created.
files (List[str]): List of rootless filepaths related to workfile.
data (Dict[str, Any]): Additional metadata.
Returns:
Dict[str, Any]: Skeleton of workfile info document.
"""
if not data:
data = {}
return {
"_id": _create_or_convert_to_mongo_id(entity_id),
"type": "workfile",
"parent": ObjectId(asset_id),
"task_name": task_name,
"filename": filename,
"data": data,
"files": files
}
def _prepare_update_data(old_doc, new_doc, replace):
changes = {}
for key, value in new_doc.items():
if key not in old_doc or value != old_doc[key]:
changes[key] = value
if replace:
for key in old_doc.keys():
if key not in new_doc:
changes[key] = REMOVED_VALUE
return changes
def prepare_subset_update_data(old_doc, new_doc, replace=True):
"""Compare two subset documents and prepare update data.
Based on compared values will create update data for 'UpdateOperation'.
Empty output means that documents are identical.
Returns:
Dict[str, Any]: Changes between old and new document.
"""
return _prepare_update_data(old_doc, new_doc, replace)
def prepare_version_update_data(old_doc, new_doc, replace=True):
"""Compare two version documents and prepare update data.
Based on compared values will create update data for 'UpdateOperation'.
Empty output means that documents are identical.
Returns:
Dict[str, Any]: Changes between old and new document.
"""
return _prepare_update_data(old_doc, new_doc, replace)
def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
"""Compare two hero version documents and prepare update data.
Based on compared values will create update data for 'UpdateOperation'.
Empty output means that documents are identical.
Returns:
Dict[str, Any]: Changes between old and new document.
"""
return _prepare_update_data(old_doc, new_doc, replace)
def prepare_representation_update_data(old_doc, new_doc, replace=True):
"""Compare two representation documents and prepare update data.
Based on compared values will create update data for 'UpdateOperation'.
Empty output means that documents are identical.
Returns:
Dict[str, Any]: Changes between old and new document.
"""
return _prepare_update_data(old_doc, new_doc, replace)
def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
"""Compare two workfile info documents and prepare update data.
Based on compared values will create update data for 'UpdateOperation'.
Empty output means that documents are identical.
Returns:
Dict[str, Any]: Changes between old and new document.
"""
return _prepare_update_data(old_doc, new_doc, replace)
@six.add_metaclass(ABCMeta)
class AbstractOperation(object):
"""Base operation class.
Opration represent a call into database. The call can create, change or
remove data.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'asset', 'representation' etc.
"""
def __init__(self, project_name, entity_type):
self._project_name = project_name
self._entity_type = entity_type
self._id = str(uuid.uuid4())
@property
def project_name(self):
return self._project_name
@property
def id(self):
"""Identifier of operation."""
return self._id
@property
def entity_type(self):
return self._entity_type
@abstractproperty
def operation_name(self):
"""Stringified type of operation."""
pass
@abstractmethod
def to_mongo_operation(self):
"""Convert operation to Mongo batch operation."""
pass
def to_data(self):
"""Convert opration to data that can be converted to json or others.
Warning:
Current state returns ObjectId objects which cannot be parsed by
json.
Returns:
Dict[str, Any]: Description of operation.
"""
return {
"id": self._id,
"entity_type": self.entity_type,
"project_name": self.project_name,
"operation": self.operation_name
}
class CreateOperation(AbstractOperation):
"""Opeartion to create an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'asset', 'representation' etc.
data (Dict[str, Any]): Data of entity that will be created.
"""
operation_name = "create"
def __init__(self, project_name, entity_type, data):
super(CreateOperation, self).__init__(project_name, entity_type)
if not data:
data = {}
else:
data = copy.deepcopy(dict(data))
if "_id" not in data:
data["_id"] = ObjectId()
else:
data["_id"] = ObjectId(data["_id"])
self._entity_id = data["_id"]
self._data = data
def __setitem__(self, key, value):
self.set_value(key, value)
def __getitem__(self, key):
return self.data[key]
def set_value(self, key, value):
self.data[key] = value
def get(self, key, *args, **kwargs):
return self.data.get(key, *args, **kwargs)
@property
def entity_id(self):
return self._entity_id
@property
def data(self):
return self._data
def to_mongo_operation(self):
return InsertOne(copy.deepcopy(self._data))
def to_data(self):
output = super(CreateOperation, self).to_data()
output["data"] = copy.deepcopy(self.data)
return output
class UpdateOperation(AbstractOperation):
"""Opeartion to update an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'asset', 'representation' etc.
entity_id (Union[str, ObjectId]): Identifier of an entity.
update_data (Dict[str, Any]): Key -> value changes that will be set in
database. If value is set to 'REMOVED_VALUE' the key will be
removed. Only first level of dictionary is checked (on purpose).
"""
operation_name = "update"
def __init__(self, project_name, entity_type, entity_id, update_data):
super(UpdateOperation, self).__init__(project_name, entity_type)
self._entity_id = ObjectId(entity_id)
self._update_data = update_data
@property
def entity_id(self):
return self._entity_id
@property
def update_data(self):
return self._update_data
def to_mongo_operation(self):
unset_data = {}
set_data = {}
for key, value in self._update_data.items():
if value is REMOVED_VALUE:
unset_data[key] = None
else:
set_data[key] = value
op_data = {}
if unset_data:
op_data["$unset"] = unset_data
if set_data:
op_data["$set"] = set_data
if not op_data:
return None
return UpdateOne(
{"_id": self.entity_id},
op_data
)
def to_data(self):
changes = {}
for key, value in self._update_data.items():
if value is REMOVED_VALUE:
value = None
changes[key] = value
output = super(UpdateOperation, self).to_data()
output.update({
"entity_id": self.entity_id,
"changes": changes
})
return output
class DeleteOperation(AbstractOperation):
"""Opeartion to delete an entity.
Args:
project_name (str): On which project operation will happen.
entity_type (str): Type of entity on which change happens.
e.g. 'asset', 'representation' etc.
entity_id (Union[str, ObjectId]): Entity id that will be removed.
"""
operation_name = "delete"
def __init__(self, project_name, entity_type, entity_id):
super(DeleteOperation, self).__init__(project_name, entity_type)
self._entity_id = ObjectId(entity_id)
@property
def entity_id(self):
return self._entity_id
def to_mongo_operation(self):
return DeleteOne({"_id": self.entity_id})
def to_data(self):
output = super(DeleteOperation, self).to_data()
output["entity_id"] = self.entity_id
return output
class OperationsSession(object):
"""Session storing operations that should happen in an order.
At this moment does not handle anything special can be sonsidered as
stupid list of operations that will happen after each other. If creation
of same entity is there multiple times it's handled in any way and document
values are not validated.
All operations must be related to single project.
Args:
project_name (str): Project name to which are operations related.
"""
def __init__(self):
self._operations = []
def add(self, operation):
"""Add operation to be processed.
Args:
operation (BaseOperation): Operation that should be processed.
"""
if not isinstance(
operation,
(CreateOperation, UpdateOperation, DeleteOperation)
):
raise TypeError("Expected Operation object got {}".format(
str(type(operation))
))
self._operations.append(operation)
def append(self, operation):
"""Add operation to be processed.
Args:
operation (BaseOperation): Operation that should be processed.
"""
self.add(operation)
def extend(self, operations):
"""Add operations to be processed.
Args:
operations (List[BaseOperation]): Operations that should be
processed.
"""
for operation in operations:
self.add(operation)
def remove(self, operation):
"""Remove operation."""
self._operations.remove(operation)
def clear(self):
"""Clear all registered operations."""
self._operations = []
def to_data(self):
return [
operation.to_data()
for operation in self._operations
]
def commit(self):
"""Commit session operations."""
operations, self._operations = self._operations, []
if not operations:
return
operations_by_project = collections.defaultdict(list)
for operation in operations:
operations_by_project[operation.project_name].append(operation)
for project_name, operations in operations_by_project.items():
bulk_writes = []
for operation in operations:
mongo_op = operation.to_mongo_operation()
if mongo_op is not None:
bulk_writes.append(mongo_op)
if bulk_writes:
collection = get_project_connection(project_name)
collection.bulk_write(bulk_writes)
def create_entity(self, project_name, entity_type, data):
"""Fast access to 'CreateOperation'.
Returns:
CreateOperation: Object of update operation.
"""
operation = CreateOperation(project_name, entity_type, data)
self.add(operation)
return operation
def update_entity(self, project_name, entity_type, entity_id, update_data):
"""Fast access to 'UpdateOperation'.
Returns:
UpdateOperation: Object of update operation.
"""
operation = UpdateOperation(
project_name, entity_type, entity_id, update_data
)
self.add(operation)
return operation
def delete_entity(self, project_name, entity_type, entity_id):
"""Fast access to 'DeleteOperation'.
Returns:
DeleteOperation: Object of delete operation.
"""
operation = DeleteOperation(project_name, entity_type, entity_id)
self.add(operation)
return operation
def create_project(project_name, project_code, library_project=False):
"""Create project using OpenPype settings.
This project creation function is not validating project document on
creation. It is because project document is created blindly with only
minimum required information about project which is it's name, code, type
and schema.
Entered project name must be unique and project must not exist yet.
Note:
This function is here to be OP v4 ready but in v3 has more logic
to do. That's why inner imports are in the body.
Args:
project_name(str): New project name. Should be unique.
project_code(str): Project's code should be unique too.
library_project(bool): Project is library project.
Raises:
ValueError: When project name already exists in MongoDB.
Returns:
dict: Created project document.
"""
from openpype.settings import ProjectSettings, SaveWarningExc
from openpype.pipeline.schema import validate
if get_project(project_name, fields=["name"]):
raise ValueError("Project with name \"{}\" already exists".format(
project_name
))
if not PROJECT_NAME_REGEX.match(project_name):
raise ValueError((
"Project name \"{}\" contain invalid characters"
).format(project_name))
project_doc = {
"type": "project",
"name": project_name,
"data": {
"code": project_code,
"library_project": library_project
},
"schema": CURRENT_PROJECT_SCHEMA
}
op_session = OperationsSession()
# Insert document with basic data
create_op = op_session.create_entity(
project_name, project_doc["type"], project_doc
)
op_session.commit()
# Load ProjectSettings for the project and save it to store all attributes
# and Anatomy
try:
project_settings_entity = ProjectSettings(project_name)
project_settings_entity.save()
except SaveWarningExc as exc:
print(str(exc))
except Exception:
op_session.delete_entity(
project_name, project_doc["type"], create_op.entity_id
)
op_session.commit()
raise
project_doc = get_project(project_name)
try:
# Validate created project document
validate(project_doc)
except Exception:
# Remove project if is not valid
op_session.delete_entity(
project_name, project_doc["type"], create_op.entity_id
)
op_session.commit()
raise
return project_doc

View file

@ -19,6 +19,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"hiero",
"houdini",
"nukestudio",
"fusion",
"blender",
"photoshop",
"tvpaint",

View file

@ -0,0 +1,177 @@
import os
import shutil
from time import sleep
from openpype.client.entities import (
get_last_version_by_subset_id,
get_representations,
get_subsets,
)
from openpype.lib import PreLaunchHook
from openpype.lib.local_settings import get_local_site_id
from openpype.lib.profiles_filtering import filter_profiles
from openpype.pipeline.load.utils import get_representation_path
from openpype.settings.lib import get_project_settings
class CopyLastPublishedWorkfile(PreLaunchHook):
"""Copy last published workfile as first workfile.
Prelaunch hook works only if last workfile leads to not existing file.
- That is possible only if it's first version.
"""
# Before `AddLastWorkfileToLaunchArgs`
order = -1
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
def execute(self):
"""Check if local workfile doesn't exist, else copy it.
1- Check if setting for this feature is enabled
2- Check if workfile in work area doesn't exist
3- Check if published workfile exists and is copied locally in publish
4- Substitute copied published workfile as first workfile
Returns:
None: This is a void method.
"""
sync_server = self.modules_manager.get("sync_server")
if not sync_server or not sync_server.enabled:
self.log.debug("Sync server module is not enabled or available")
return
# Check there is no workfile available
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.log.debug(
"Last workfile exists. Skipping {} process.".format(
self.__class__.__name__
)
)
return
# Get data
project_name = self.data["project_name"]
task_name = self.data["task_name"]
task_type = self.data["task_type"]
host_name = self.application.host_name
# Check settings has enabled it
project_settings = get_project_settings(project_name)
profiles = project_settings["global"]["tools"]["Workfiles"][
"last_workfile_on_startup"
]
filter_data = {
"tasks": task_name,
"task_types": task_type,
"hosts": host_name,
}
last_workfile_settings = filter_profiles(profiles, filter_data)
use_last_published_workfile = last_workfile_settings.get(
"use_last_published_workfile"
)
if use_last_published_workfile is None:
self.log.info(
(
"Seems like old version of settings is used."
' Can\'t access custom templates in host "{}".'.format(
host_name
)
)
)
return
elif use_last_published_workfile is False:
self.log.info(
(
'Project "{}" has turned off to use last published'
' workfile as first workfile for host "{}"'.format(
project_name, host_name
)
)
)
return
self.log.info("Trying to fetch last published workfile...")
project_doc = self.data.get("project_doc")
asset_doc = self.data.get("asset_doc")
anatomy = self.data.get("anatomy")
# Check it can proceed
if not project_doc and not asset_doc:
return
# Get subset id
subset_id = next(
(
subset["_id"]
for subset in get_subsets(
project_name,
asset_ids=[asset_doc["_id"]],
fields=["_id", "data.family", "data.families"],
)
if subset["data"].get("family") == "workfile"
# Legacy compatibility
or "workfile" in subset["data"].get("families", {})
),
None,
)
if not subset_id:
self.log.debug(
'No any workfile for asset "{}".'.format(asset_doc["name"])
)
return
# Get workfile representation
last_version_doc = get_last_version_by_subset_id(
project_name, subset_id, fields=["_id"]
)
if not last_version_doc:
self.log.debug("Subset does not have any versions")
return
workfile_representation = next(
(
representation
for representation in get_representations(
project_name, version_ids=[last_version_doc["_id"]]
)
if representation["context"]["task"]["name"] == task_name
),
None,
)
if not workfile_representation:
self.log.debug(
'No published workfile for task "{}" and host "{}".'.format(
task_name, host_name
)
)
return
local_site_id = get_local_site_id()
sync_server.add_site(
project_name,
workfile_representation["_id"],
local_site_id,
force=True,
priority=99,
reset_timer=True,
)
while not sync_server.is_representation_on_site(
project_name, workfile_representation["_id"], local_site_id
):
sleep(5)
# Get paths
published_workfile_path = get_representation_path(
workfile_representation, root=anatomy.roots
)
local_workfile_dir = os.path.dirname(last_workfile)
# Copy file and substitute path
self.data["last_workfile_path"] = shutil.copy(
published_workfile_path, local_workfile_dir
)

View file

@ -1,11 +1,11 @@
import os
import shutil
from openpype.lib import (
PreLaunchHook,
get_custom_workfile_template_by_context,
from openpype.lib import PreLaunchHook
from openpype.settings import get_project_settings
from openpype.pipeline.workfile import (
get_custom_workfile_template,
get_custom_workfile_template_by_string_context
)
from openpype.settings import get_project_settings
class CopyTemplateWorkfile(PreLaunchHook):
@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook):
project_name = self.data["project_name"]
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
host_name = self.application.host_name
project_settings = get_project_settings(project_name)
host_settings = project_settings[self.application.host_name]
workfile_builder_settings = host_settings.get("workfile_builder")
if not workfile_builder_settings:
# TODO remove warning when deprecated
self.log.warning((
"Seems like old version of settings is used."
" Can't access custom templates in host \"{}\"."
).format(self.application.full_label))
return
if not workfile_builder_settings["create_first_version"]:
self.log.info((
"Project \"{}\" has turned off to create first workfile for"
" application \"{}\""
).format(project_name, self.application.full_label))
return
# Backwards compatibility
template_profiles = workfile_builder_settings.get("custom_templates")
if not template_profiles:
self.log.info(
"Custom templates are not filled. Skipping template copy."
)
return
project_doc = self.data.get("project_doc")
asset_doc = self.data.get("asset_doc")
anatomy = self.data.get("anatomy")
if project_doc and asset_doc:
self.log.debug("Started filtering of custom template paths.")
template_path = get_custom_workfile_template_by_context(
template_profiles, project_doc, asset_doc, task_name, anatomy
template_path = get_custom_workfile_template(
project_doc,
asset_doc,
task_name,
host_name,
anatomy,
project_settings
)
else:
@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook):
"Global data collection probably did not execute."
" Using backup solution."
))
dbcon = self.data.get("dbcon")
template_path = get_custom_workfile_template_by_string_context(
template_profiles, project_name, asset_name, task_name,
dbcon, anatomy
project_name,
asset_name,
task_name,
host_name,
anatomy,
project_settings
)
if not template_path:

View file

@ -1,8 +1,6 @@
import os
from openpype.lib import (
PreLaunchHook,
create_workdir_extra_folders
)
from openpype.lib import PreLaunchHook
from openpype.pipeline.workfile import create_workdir_extra_folders
class AddLastWorkfileToLaunchArgs(PreLaunchHook):

View file

@ -1,11 +1,11 @@
from openpype.api import Anatomy
from openpype.client import get_project, get_asset_by_name
from openpype.lib import (
PreLaunchHook,
EnvironmentPrepData,
prepare_app_environments,
prepare_context_environments
)
from openpype.pipeline import AvalonMongoDB
from openpype.pipeline import AvalonMongoDB, Anatomy
class GlobalHostDataHook(PreLaunchHook):
@ -70,7 +70,7 @@ class GlobalHostDataHook(PreLaunchHook):
self.data["dbcon"] = dbcon
# Project document
project_doc = dbcon.find_one({"type": "project"})
project_doc = get_project(project_name)
self.data["project_doc"] = project_doc
asset_name = self.data.get("asset_name")
@ -80,8 +80,5 @@ class GlobalHostDataHook(PreLaunchHook):
)
return
asset_doc = dbcon.find_one({
"type": "asset",
"name": asset_name
})
asset_doc = get_asset_by_name(project_name, asset_name)
self.data["asset_doc"] = asset_doc

24
openpype/host/__init__.py Normal file
View file

@ -0,0 +1,24 @@
from .host import (
HostBase,
)
from .interfaces import (
IWorkfileHost,
ILoadHost,
IPublishHost,
INewPublisher,
)
from .dirmap import HostDirmap
__all__ = (
"HostBase",
"IWorkfileHost",
"ILoadHost",
"IPublishHost",
"INewPublisher",
"HostDirmap",
)

205
openpype/host/dirmap.py Normal file
View file

@ -0,0 +1,205 @@
"""Dirmap functionality used in host integrations inside DCCs.
Idea for current dirmap implementation was used from Maya where is possible to
enter source and destination roots and maya will try each found source
in referenced file replace with each destionation paths. First path which
exists is used.
"""
import os
from abc import ABCMeta, abstractmethod
import six
from openpype.lib import Logger
from openpype.modules import ModulesManager
from openpype.settings import get_project_settings
from openpype.settings.lib import get_site_local_overrides
@six.add_metaclass(ABCMeta)
class HostDirmap(object):
"""Abstract class for running dirmap on a workfile in a host.
Dirmap is used to translate paths inside of host workfile from one
OS to another. (Eg. arstist created workfile on Win, different artists
opens same file on Linux.)
Expects methods to be implemented inside of host:
on_dirmap_enabled: run host code for enabling dirmap
do_dirmap: run host code to do actual remapping
"""
def __init__(
self, host_name, project_name, project_settings=None, sync_module=None
):
self.host_name = host_name
self.project_name = project_name
self._project_settings = project_settings
self._sync_module = sync_module # to limit reinit of Modules
self._log = None
self._mapping = None # cache mapping
@property
def sync_module(self):
if self._sync_module is None:
manager = ModulesManager()
self._sync_module = manager["sync_server"]
return self._sync_module
@property
def project_settings(self):
if self._project_settings is None:
self._project_settings = get_project_settings(self.project_name)
return self._project_settings
@property
def log(self):
if self._log is None:
self._log = Logger.get_logger(self.__class__.__name__)
return self._log
@abstractmethod
def on_enable_dirmap(self):
"""Run host dependent operation for enabling dirmap if necessary."""
pass
@abstractmethod
def dirmap_routine(self, source_path, destination_path):
"""Run host dependent remapping from source_path to destination_path"""
pass
def process_dirmap(self):
# type: (dict) -> None
"""Go through all paths in Settings and set them using `dirmap`.
If artists has Site Sync enabled, take dirmap mapping directly from
Local Settings when artist is syncing workfile locally.
Args:
project_settings (dict): Settings for current project.
"""
if not self._mapping:
self._mapping = self.get_mappings(self.project_settings)
if not self._mapping:
return
self.log.info("Processing directory mapping ...")
self.on_enable_dirmap()
self.log.info("mapping:: {}".format(self._mapping))
for k, sp in enumerate(self._mapping["source-path"]):
dst = self._mapping["destination-path"][k]
try:
print("{} -> {}".format(sp, dst))
self.dirmap_routine(sp, dst)
except IndexError:
# missing corresponding destination path
self.log.error((
"invalid dirmap mapping, missing corresponding"
" destination directory."
))
break
except RuntimeError:
self.log.error(
"invalid path {} -> {}, mapping not registered".format(
sp, dst
)
)
continue
def get_mappings(self, project_settings):
"""Get translation from source-path to destination-path.
It checks if Site Sync is enabled and user chose to use local
site, in that case configuration in Local Settings takes precedence
"""
local_mapping = self._get_local_sync_dirmap(project_settings)
dirmap_label = "{}-dirmap".format(self.host_name)
if (
not self.project_settings[self.host_name].get(dirmap_label)
and not local_mapping
):
return {}
mapping_settings = self.project_settings[self.host_name][dirmap_label]
mapping_enabled = mapping_settings["enabled"] or bool(local_mapping)
if not mapping_enabled:
return {}
mapping = (
local_mapping
or mapping_settings["paths"]
or {}
)
if (
not mapping
or not mapping.get("destination-path")
or not mapping.get("source-path")
):
return {}
return mapping
def _get_local_sync_dirmap(self, project_settings):
"""
Returns dirmap if synch to local project is enabled.
Only valid mapping is from roots of remote site to local site set
in Local Settings.
Args:
project_settings (dict)
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
"""
mapping = {}
if not project_settings["global"]["sync_server"]["enabled"]:
return mapping
project_name = os.getenv("AVALON_PROJECT")
active_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_active_site(project_name))
remote_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_remote_site(project_name))
self.log.debug(
"active {} - remote {}".format(active_site, remote_site)
)
if (
active_site == "local"
and project_name in self.sync_module.get_enabled_projects()
and active_site != remote_site
):
sync_settings = self.sync_module.get_sync_project_setting(
project_name,
exclude_locals=False,
cached=False)
active_overrides = get_site_local_overrides(
project_name, active_site)
remote_overrides = get_site_local_overrides(
project_name, remote_site)
self.log.debug("local overrides {}".format(active_overrides))
self.log.debug("remote overrides {}".format(remote_overrides))
for root_name, active_site_dir in active_overrides.items():
remote_site_dir = (
remote_overrides.get(root_name)
or sync_settings["sites"][remote_site]["root"][root_name]
)
if os.path.isdir(active_site_dir):
if "destination-path" not in mapping:
mapping["destination-path"] = []
mapping["destination-path"].append(active_site_dir)
if "source-path" not in mapping:
mapping["source-path"] = []
mapping["source-path"].append(remote_site_dir)
self.log.debug("local sync mapping:: {}".format(mapping))
return mapping

174
openpype/host/host.py Normal file
View file

@ -0,0 +1,174 @@
import logging
import contextlib
from abc import ABCMeta, abstractproperty
import six
# NOTE can't import 'typing' because of issues in Maya 2020
# - shiboken crashes on 'typing' module import
@six.add_metaclass(ABCMeta)
class HostBase(object):
"""Base of host implementation class.
Host is pipeline implementation of DCC application. This class should help
to identify what must/should/can be implemented for specific functionality.
Compared to 'avalon' concept:
What was before considered as functions in host implementation folder. The
host implementation should primarily care about adding ability of creation
(mark subsets to be published) and optionaly about referencing published
representations as containers.
Host may need extend some functionality like working with workfiles
or loading. Not all host implementations may allow that for those purposes
can be logic extended with implementing functions for the purpose. There
are prepared interfaces to be able identify what must be implemented to
be able use that functionality.
- current statement is that it is not required to inherit from interfaces
but all of the methods are validated (only their existence!)
# Installation of host before (avalon concept):
```python
from openpype.pipeline import install_host
import openpype.hosts.maya.api as host
install_host(host)
```
# Installation of host now:
```python
from openpype.pipeline import install_host
from openpype.hosts.maya.api import MayaHost
host = MayaHost()
install_host(host)
```
Todo:
- move content of 'install_host' as method of this class
- register host object
- install legacy_io
- install global plugin paths
- store registered plugin paths to this object
- handle current context (project, asset, task)
- this must be done in many separated steps
- have it's object of host tools instead of using globals
This implementation will probably change over time when more
functionality and responsibility will be added.
"""
_log = None
def __init__(self):
"""Initialization of host.
Register DCC callbacks, host specific plugin paths, targets etc.
(Part of what 'install' did in 'avalon' concept.)
Note:
At this moment global "installation" must happen before host
installation. Because of this current limitation it is recommended
to implement 'install' method which is triggered after global
'install'.
"""
pass
def install(self):
"""Install host specific functionality.
This is where should be added menu with tools, registered callbacks
and other host integration initialization.
It is called automatically when 'openpype.pipeline.install_host' is
triggered.
"""
pass
@property
def log(self):
if self._log is None:
self._log = logging.getLogger(self.__class__.__name__)
return self._log
@abstractproperty
def name(self):
"""Host name."""
pass
def get_current_context(self):
"""Get current context information.
This method should be used to get current context of host. Usage of
this method can be crutial for host implementations in DCCs where
can be opened multiple workfiles at one moment and change of context
can't be catched properly.
Default implementation returns values from 'legacy_io.Session'.
Returns:
dict: Context with 3 keys 'project_name', 'asset_name' and
'task_name'. All of them can be 'None'.
"""
from openpype.pipeline import legacy_io
if legacy_io.is_installed():
legacy_io.install()
return {
"project_name": legacy_io.Session["AVALON_PROJECT"],
"asset_name": legacy_io.Session["AVALON_ASSET"],
"task_name": legacy_io.Session["AVALON_TASK"]
}
def get_context_title(self):
"""Context title shown for UI purposes.
Should return current context title if possible.
Note:
This method is used only for UI purposes so it is possible to
return some logical title for contextless cases.
Is not meant for "Context menu" label.
Returns:
str: Context title.
None: Default title is used based on UI implementation.
"""
# Use current context to fill the context title
current_context = self.get_current_context()
project_name = current_context["project_name"]
asset_name = current_context["asset_name"]
task_name = current_context["task_name"]
items = []
if project_name:
items.append(project_name)
if asset_name:
items.append(asset_name)
if task_name:
items.append(task_name)
if items:
return "/".join(items)
return None
@contextlib.contextmanager
def maintained_selection(self):
"""Some functionlity will happen but selection should stay same.
This is DCC specific. Some may not allow to implement this ability
that is reason why default implementation is empty context manager.
Yields:
None: Yield when is ready to restore selected at the end.
"""
try:
yield
finally:
pass

386
openpype/host/interfaces.py Normal file
View file

@ -0,0 +1,386 @@
from abc import ABCMeta, abstractmethod
import six
class MissingMethodsError(ValueError):
"""Exception when host miss some required methods for specific workflow.
Args:
host (HostBase): Host implementation where are missing methods.
missing_methods (list[str]): List of missing methods.
"""
def __init__(self, host, missing_methods):
joined_missing = ", ".join(
['"{}"'.format(item) for item in missing_methods]
)
host_name = getattr(host, "name", None)
if not host_name:
try:
host_name = host.__file__.replace("\\", "/").split("/")[-3]
except Exception:
host_name = str(host)
message = (
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
)
super(MissingMethodsError, self).__init__(message)
class ILoadHost:
"""Implementation requirements to be able use reference of representations.
The load plugins can do referencing even without implementation of methods
here, but switch and removement of containers would not be possible.
Questions:
- Is list container dependency of host or load plugins?
- Should this be directly in HostBase?
- how to find out if referencing is available?
- do we need to know that?
"""
@staticmethod
def get_missing_load_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
loading. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Object of host where to look for
required methods.
Returns:
list[str]: Missing method implementations for loading workflow.
"""
if isinstance(host, ILoadHost):
return []
required = ["ls"]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_load_methods(host):
"""Validate implemented methods of "old type" host for load workflow.
Args:
Union[ModuleType, HostBase]: Object of host to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = ILoadHost.get_missing_load_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_containers(self):
"""Retreive referenced containers from scene.
This can be implemented in hosts where referencing can be used.
Todo:
Rename function to something more self explanatory.
Suggestion: 'get_containers'
Returns:
list[dict]: Information about loaded containers.
"""
pass
# --- Deprecated method names ---
def ls(self):
"""Deprecated variant of 'get_containers'.
Todo:
Remove when all usages are replaced.
"""
return self.get_containers()
@six.add_metaclass(ABCMeta)
class IWorkfileHost:
"""Implementation requirements to be able use workfile utils and tool."""
@staticmethod
def get_missing_workfile_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
workfiles. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Object of host where to look for
required methods.
Returns:
list[str]: Missing method implementations for workfiles workflow.
"""
if isinstance(host, IWorkfileHost):
return []
required = [
"open_file",
"save_file",
"current_file",
"has_unsaved_changes",
"file_extensions",
"work_root",
]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_workfile_methods(host):
"""Validate methods of "old type" host for workfiles workflow.
Args:
Union[ModuleType, HostBase]: Object of host to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = IWorkfileHost.get_missing_workfile_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_workfile_extensions(self):
"""Extensions that can be used as save.
Questions:
This could potentially use 'HostDefinition'.
"""
return []
@abstractmethod
def save_workfile(self, dst_path=None):
"""Save currently opened scene.
Args:
dst_path (str): Where the current scene should be saved. Or use
current path if 'None' is passed.
"""
pass
@abstractmethod
def open_workfile(self, filepath):
"""Open passed filepath in the host.
Args:
filepath (str): Path to workfile.
"""
pass
@abstractmethod
def get_current_workfile(self):
"""Retreive path to current opened file.
Returns:
str: Path to file which is currently opened.
None: If nothing is opened.
"""
return None
def workfile_has_unsaved_changes(self):
"""Currently opened scene is saved.
Not all hosts can know if current scene is saved because the API of
DCC does not support it.
Returns:
bool: True if scene is saved and False if has unsaved
modifications.
None: Can't tell if workfiles has modifications.
"""
return None
def work_root(self, session):
"""Modify workdir per host.
Default implementation keeps workdir untouched.
Warnings:
We must handle this modification with more sofisticated way because
this can't be called out of DCC so opening of last workfile
(calculated before DCC is launched) is complicated. Also breaking
defined work template is not a good idea.
Only place where it's really used and can make sense is Maya. There
workspace.mel can modify subfolders where to look for maya files.
Args:
session (dict): Session context data.
Returns:
str: Path to new workdir.
"""
return session["AVALON_WORKDIR"]
# --- Deprecated method names ---
def file_extensions(self):
"""Deprecated variant of 'get_workfile_extensions'.
Todo:
Remove when all usages are replaced.
"""
return self.get_workfile_extensions()
def save_file(self, dst_path=None):
"""Deprecated variant of 'save_workfile'.
Todo:
Remove when all usages are replaced.
"""
self.save_workfile(dst_path)
def open_file(self, filepath):
"""Deprecated variant of 'open_workfile'.
Todo:
Remove when all usages are replaced.
"""
return self.open_workfile(filepath)
def current_file(self):
"""Deprecated variant of 'get_current_workfile'.
Todo:
Remove when all usages are replaced.
"""
return self.get_current_workfile()
def has_unsaved_changes(self):
"""Deprecated variant of 'workfile_has_unsaved_changes'.
Todo:
Remove when all usages are replaced.
"""
return self.workfile_has_unsaved_changes()
class IPublishHost:
"""Functions related to new creation system in new publisher.
New publisher is not storing information only about each created instance
but also some global data. At this moment are data related only to context
publish plugins but that can extend in future.
"""
@staticmethod
def get_missing_publish_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
new publish creation. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Host module where to look for
required methods.
Returns:
list[str]: Missing method implementations for new publsher
workflow.
"""
if isinstance(host, IPublishHost):
return []
required = [
"get_context_data",
"update_context_data",
"get_context_title",
"get_current_context",
]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_publish_methods(host):
"""Validate implemented methods of "old type" host.
Args:
Union[ModuleType, HostBase]: Host module to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = IPublishHost.get_missing_publish_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_context_data(self):
"""Get global data related to creation-publishing from workfile.
These data are not related to any created instance but to whole
publishing context. Not saving/returning them will cause that each
reset of publishing resets all values to default ones.
Context data can contain information about enabled/disabled publish
plugins or other values that can be filled by artist.
Returns:
dict: Context data stored using 'update_context_data'.
"""
pass
@abstractmethod
def update_context_data(self, data, changes):
"""Store global context data to workfile.
Called when some values in context data has changed.
Without storing the values in a way that 'get_context_data' would
return them will each reset of publishing cause loose of filled values
by artist. Best practice is to store values into workfile, if possible.
Args:
data (dict): New data as are.
changes (dict): Only data that has been changed. Each value has
tuple with '(<old>, <new>)' value.
"""
pass
class INewPublisher(IPublishHost):
"""Legacy interface replaced by 'IPublishHost'.
Deprecated:
'INewPublisher' is replaced by 'IPublishHost' please change your
imports.
There is no "reasonable" way hot mark these classes as deprecated
to show warning of wrong import. Deprecated since 3.14.* will be
removed in 3.15.*
"""
pass

View file

@ -1,9 +1,6 @@
def add_implementation_envs(env, _app):
"""Modify environments to contain all required for implementation."""
defaults = {
"OPENPYPE_LOG_NO_COLORS": "True",
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
}
for key, value in defaults.items():
if not env.get(key):
env[key] = value
from .addon import AfterEffectsAddon
__all__ = (
"AfterEffectsAddon",
)

View file

@ -0,0 +1,22 @@
from openpype.modules import OpenPypeModule, IHostAddon
class AfterEffectsAddon(OpenPypeModule, IHostAddon):
name = "aftereffects"
host_name = "aftereffects"
def initialize(self, module_settings):
self.enabled = True
def add_implementation_envs(self, env, _app):
"""Modify environments to contain all required for implementation."""
defaults = {
"OPENPYPE_LOG_NO_COLORS": "True",
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
}
for key, value in defaults.items():
if not env.get(key):
env[key] = value
def get_workfile_extensions(self):
return [".aep"]

View file

@ -10,8 +10,9 @@ from wsrpc_aiohttp import (
WebSocketAsync
)
from Qt import QtCore
from qtpy import QtCore
from openpype.lib import Logger
from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
from openpype.tools.adobe_webserver.app import WebServerTool
@ -84,8 +85,6 @@ class ProcessLauncher(QtCore.QObject):
@property
def log(self):
if self._log is None:
from openpype.api import Logger
self._log = Logger.get_logger("{}-launcher".format(
self.route_name))
return self._log

View file

@ -1,13 +1,16 @@
import os
import sys
import re
import json
import contextlib
import traceback
import logging
from functools import partial
from Qt import QtWidgets
from qtpy import QtWidgets
from openpype.pipeline import install_host
from openpype.lib.remote_publish import headless_publish
from openpype.modules import ModulesManager
from openpype.tools.utils import host_tools
from .launch_logic import ProcessLauncher, get_stub
@ -35,10 +38,18 @@ def main(*subprocess_args):
launcher.start()
if os.environ.get("HEADLESS_PUBLISH"):
launcher.execute_in_main_thread(lambda: headless_publish(
log,
"CloseAE",
os.environ.get("IS_TEST")))
manager = ModulesManager()
webpublisher_addon = manager["webpublisher"]
launcher.execute_in_main_thread(
partial(
webpublisher_addon.headless_publish,
log,
"CloseAE",
os.environ.get("IS_TEST")
)
)
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
save = False
if os.getenv("WORKFILES_SAVE_AS"):
@ -68,3 +79,57 @@ def get_extension_manifest_path():
"CSXS",
"manifest.xml"
)
def get_unique_layer_name(layers, name):
"""
Gets all layer names and if 'name' is present in them, increases
suffix by 1 (eg. creates unique layer name - for Loader)
Args:
layers (list): of strings, names only
name (string): checked value
Returns:
(string): name_00X (without version)
"""
names = {}
for layer in layers:
layer_name = re.sub(r'_\d{3}$', '', layer)
if layer_name in names.keys():
names[layer_name] = names[layer_name] + 1
else:
names[layer_name] = 1
occurrences = names.get(name, 0)
return "{}_{:0>3d}".format(name, occurrences + 1)
def get_background_layers(file_url):
"""
Pulls file name from background json file, enrich with folder url for
AE to be able import files.
Order is important, follows order in json.
Args:
file_url (str): abs url of background json
Returns:
(list): of abs paths to images
"""
with open(file_url) as json_file:
data = json.load(json_file)
layers = list()
bg_folder = os.path.dirname(file_url)
for child in data['children']:
if child.get("filename"):
layers.append(os.path.join(bg_folder, child.get("filename")).
replace("\\", "/"))
else:
for layer in child['children']:
if layer.get("filename"):
layers.append(os.path.join(bg_folder,
layer.get("filename")).
replace("\\", "/"))
return layers

View file

@ -1,12 +1,10 @@
import os
import sys
from Qt import QtWidgets
from qtpy import QtWidgets
import pyblish.api
from openpype import lib
from openpype.api import Logger
from openpype.lib import Logger, register_event_callback
from openpype.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
@ -15,10 +13,10 @@ from openpype.pipeline import (
AVALON_CONTAINER_ID,
legacy_io,
)
from openpype.pipeline.load import any_outdated_containers
import openpype.hosts.aftereffects
from openpype.lib import register_event_callback
from .launch_logic import get_stub
from .launch_logic import get_stub, ConnectionNotEstablishedYet
log = Logger.get_logger(__name__)
@ -111,7 +109,7 @@ def ls():
"""
try:
stub = get_stub() # only after AfterEffects is up
except lib.ConnectionNotEstablishedYet:
except ConnectionNotEstablishedYet:
print("Not connected yet, ignoring")
return
@ -136,7 +134,7 @@ def ls():
def check_inventory():
"""Checks loaded containers if they are of highest version"""
if not lib.any_outdated():
if not any_outdated_containers():
return
# Warn about outdated containers.
@ -284,7 +282,7 @@ def _get_stub():
"""
try:
stub = get_stub() # only after Photoshop is up
except lib.ConnectionNotEstablishedYet:
except ConnectionNotEstablishedYet:
print("Not connected yet, ignoring")
return

View file

@ -1,12 +1,11 @@
"""Host API required Work Files tool"""
import os
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
from .launch_logic import get_stub
def file_extensions():
return HOST_WORKFILE_EXTENSIONS["aftereffects"]
return [".aep"]
def has_unsaved_changes():

View file

@ -17,11 +17,8 @@ class RenderCreator(Creator):
create_allow_context_change = True
def __init__(
self, create_context, system_settings, project_settings, headless=False
):
super(RenderCreator, self).__init__(create_context, system_settings,
project_settings, headless)
def __init__(self, project_settings, *args, **kwargs):
super(RenderCreator, self).__init__(project_settings, *args, **kwargs)
self._default_variants = (project_settings["aftereffects"]
["create"]
["RenderCreator"]

View file

@ -11,6 +11,8 @@ class AEWorkfileCreator(AutoCreator):
identifier = "workfile"
family = "workfile"
default_variant = "Main"
def get_instance_attr_defs(self):
return []
@ -35,7 +37,6 @@ class AEWorkfileCreator(AutoCreator):
existing_instance = instance
break
variant = ''
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
@ -44,15 +45,17 @@ class AEWorkfileCreator(AutoCreator):
if existing_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
self.default_variant, task_name, asset_doc,
project_name, host_name
)
data = {
"asset": asset_name,
"task": task_name,
"variant": variant
"variant": self.default_variant
}
data.update(self.get_dynamic_data(
variant, task_name, asset_doc, project_name, host_name
self.default_variant, task_name, asset_doc,
project_name, host_name
))
new_instance = CreatedInstance(
@ -69,7 +72,9 @@ class AEWorkfileCreator(AutoCreator):
):
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
self.default_variant, task_name, asset_doc,
project_name, host_name
)
existing_instance["asset"] = asset_name
existing_instance["task"] = task_name
existing_instance["subset"] = subset_name

View file

@ -1,14 +1,14 @@
import re
from openpype.lib import (
get_background_layers,
get_unique_layer_name
)
from openpype.pipeline import get_representation_path
from openpype.hosts.aftereffects.api import (
AfterEffectsLoader,
containerise
)
from openpype.hosts.aftereffects.api.lib import (
get_background_layers,
get_unique_layer_name,
)
class BackgroundLoader(AfterEffectsLoader):

View file

@ -1,12 +1,11 @@
import re
from openpype import lib
from openpype.pipeline import get_representation_path
from openpype.hosts.aftereffects.api import (
AfterEffectsLoader,
containerise
)
from openpype.hosts.aftereffects.api.lib import get_unique_layer_name
class FileLoader(AfterEffectsLoader):
@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader):
stub = self.get_stub()
layers = stub.get_items(comps=True, folders=True, footages=True)
existing_layers = [layer.name for layer in layers]
comp_name = lib.get_unique_layer_name(
comp_name = get_unique_layer_name(
existing_layers, "{}_{}".format(context["asset"]["name"], name))
import_options = {}
@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader):
if namespace_from_container != layer_name:
layers = stub.get_items(comps=True)
existing_layers = [layer.name for layer in layers]
layer_name = lib.get_unique_layer_name(
layer_name = get_unique_layer_name(
existing_layers,
"{}_{}".format(context["asset"], context["subset"]))
else: # switching version - keep same name

View file

@ -6,8 +6,8 @@ import attr
import pyblish.api
from openpype.settings import get_project_settings
from openpype.lib import abstract_collect_render
from openpype.lib.abstract_collect_render import RenderInstance
from openpype.pipeline import publish
from openpype.pipeline.publish import RenderInstance
from openpype.hosts.aftereffects.api import get_stub
@ -25,7 +25,7 @@ class AERenderInstance(RenderInstance):
file_name = attr.ib(default=None)
class CollectAERender(abstract_collect_render.AbstractCollectRender):
class CollectAERender(publish.AbstractCollectRender):
order = pyblish.api.CollectorOrder + 0.405
label = "Collect After Effects Render Layers"
@ -102,7 +102,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
attachTo=False,
setMembers='',
publish=True,
renderer='aerender',
name=subset_name,
resolutionWidth=render_q.width,
resolutionHeight=render_q.height,
@ -113,7 +112,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
frameStart=frame_start,
frameEnd=frame_end,
frameStep=1,
toBeRenderedOn='deadline',
fps=fps,
app_version=app_version,
publish_attributes=inst.data.get("publish_attributes", {}),
@ -138,6 +136,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
fam = "render.farm"
if fam not in instance.families:
instance.families.append(fam)
instance.toBeRenderedOn = "deadline"
instance.renderer = "aerender"
instance.farm = True # to skip integrate
instances.append(instance)
instances_to_remove.append(inst)

View file

@ -1,8 +1,8 @@
import os
import pyblish.api
from openpype.lib import get_subset_name_with_asset_doc
from openpype.pipeline import legacy_io
from openpype.pipeline.create import get_subset_name
class CollectWorkfile(pyblish.api.ContextPlugin):
@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
label = "Collect After Effects Workfile Instance"
order = pyblish.api.CollectorOrder + 0.1
default_variant = "Main"
def process(self, context):
existing_instance = None
for instance in context:
@ -69,13 +71,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
# workfile instance
family = "workfile"
subset = get_subset_name_with_asset_doc(
subset = get_subset_name(
family,
"",
self.default_variant,
context.data["anatomyData"]["task"]["name"],
context.data["assetEntity"],
context.data["anatomyData"]["project"]["name"],
host_name=context.data["hostName"]
host_name=context.data["hostName"],
project_settings=context.data["project_settings"]
)
# Create instance
instance = context.create_instance(subset)

View file

@ -2,14 +2,18 @@ import os
import sys
import six
import openpype.api
from openpype.lib import (
get_ffmpeg_tool_path,
run_subprocess,
)
from openpype.pipeline import publish
from openpype.hosts.aftereffects.api import get_stub
class ExtractLocalRender(openpype.api.Extractor):
class ExtractLocalRender(publish.Extractor):
"""Render RenderQueue locally."""
order = openpype.api.Extractor.order - 0.47
order = publish.Extractor.order - 0.47
label = "Extract Local Render"
hosts = ["aftereffects"]
families = ["renderLocal", "render.local"]
@ -53,7 +57,7 @@ class ExtractLocalRender(openpype.api.Extractor):
instance.data["representations"] = [repre_data]
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
# Generate thumbnail.
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
@ -66,7 +70,7 @@ class ExtractLocalRender(openpype.api.Extractor):
]
self.log.debug("Thumbnail args:: {}".format(args))
try:
output = openpype.lib.run_subprocess(args)
output = run_subprocess(args)
except TypeError:
self.log.warning("Error in creating thumbnail")
six.reraise(*sys.exc_info())

View file

@ -1,13 +1,13 @@
import pyblish.api
import openpype.api
from openpype.pipeline import publish
from openpype.hosts.aftereffects.api import get_stub
class ExtractSaveScene(pyblish.api.ContextPlugin):
"""Save scene before extraction."""
order = openpype.api.Extractor.order - 0.48
order = publish.Extractor.order - 0.48
label = "Extract Save Scene"
hosts = ["aftereffects"]

View file

@ -1,6 +1,6 @@
import pyblish.api
from openpype.action import get_errored_plugins_from_data
from openpype.lib import version_up
from openpype.pipeline.publish import get_errored_plugins_from_context
from openpype.hosts.aftereffects.api import get_stub
@ -18,7 +18,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin):
optional = True
def process(self, instance):
errored_plugins = get_errored_plugins_from_data(instance.context)
errored_plugins = get_errored_plugins_from_context(instance.context)
if errored_plugins:
raise RuntimeError(
"Skipping incrementing current file because publishing failed."

View file

@ -1,8 +1,8 @@
import openpype.api
from openpype.pipeline import publish
from openpype.hosts.aftereffects.api import get_stub
class RemovePublishHighlight(openpype.api.Extractor):
class RemovePublishHighlight(publish.Extractor):
"""Clean utf characters which are not working in DL
Published compositions are marked with unicode icon which causes
@ -10,7 +10,7 @@ class RemovePublishHighlight(openpype.api.Extractor):
rendering, add it later back to avoid confusion.
"""
order = openpype.api.Extractor.order - 0.49 # just before save
order = publish.Extractor.order - 0.49 # just before save
label = "Clean render comp"
hosts = ["aftereffects"]
families = ["render.farm"]

View file

@ -1,9 +1,9 @@
import pyblish.api
import openpype.api
from openpype.pipeline import (
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishXmlValidationError,
legacy_io,
)
from openpype.hosts.aftereffects.api import get_stub
@ -50,7 +50,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
label = "Validate Instance Asset"
hosts = ["aftereffects"]
actions = [ValidateInstanceAssetRepair]
order = openpype.api.ValidateContentsOrder
order = ValidateContentsOrder
def process(self, instance):
instance_asset = instance.data["asset"]

View file

@ -1,52 +1,6 @@
import os
from .addon import BlenderAddon
def add_implementation_envs(env, _app):
"""Modify environments to contain all required for implementation."""
# Prepare path to implementation script
implementation_user_script_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"blender_addon"
)
# Add blender implementation script path to PYTHONPATH
python_path = env.get("PYTHONPATH") or ""
python_path_parts = [
path
for path in python_path.split(os.pathsep)
if path
]
python_path_parts.insert(0, implementation_user_script_path)
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
# Modify Blender user scripts path
previous_user_scripts = set()
# Implementation path is added to set for easier paths check inside loops
# - will be removed at the end
previous_user_scripts.add(implementation_user_script_path)
openpype_blender_user_scripts = (
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
)
for path in openpype_blender_user_scripts.split(os.pathsep):
if path:
previous_user_scripts.add(os.path.normpath(path))
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
for path in blender_user_scripts.split(os.pathsep):
if path:
previous_user_scripts.add(os.path.normpath(path))
# Remove implementation path from user script paths as is set to
# `BLENDER_USER_SCRIPTS`
previous_user_scripts.remove(implementation_user_script_path)
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
# Set custom user scripts env
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
previous_user_scripts
)
# Define Qt binding if not defined
if not env.get("QT_PREFERRED_BINDING"):
env["QT_PREFERRED_BINDING"] = "PySide2"
__all__ = (
"BlenderAddon",
)

View file

@ -0,0 +1,72 @@
import os
from openpype.modules import OpenPypeModule, IHostAddon
BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
class BlenderAddon(OpenPypeModule, IHostAddon):
name = "blender"
host_name = "blender"
def initialize(self, module_settings):
self.enabled = True
def add_implementation_envs(self, env, _app):
"""Modify environments to contain all required for implementation."""
# Prepare path to implementation script
implementation_user_script_path = os.path.join(
BLENDER_ROOT_DIR,
"blender_addon"
)
# Add blender implementation script path to PYTHONPATH
python_path = env.get("PYTHONPATH") or ""
python_path_parts = [
path
for path in python_path.split(os.pathsep)
if path
]
python_path_parts.insert(0, implementation_user_script_path)
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
# Modify Blender user scripts path
previous_user_scripts = set()
# Implementation path is added to set for easier paths check inside
# loops - will be removed at the end
previous_user_scripts.add(implementation_user_script_path)
openpype_blender_user_scripts = (
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
)
for path in openpype_blender_user_scripts.split(os.pathsep):
if path:
previous_user_scripts.add(os.path.normpath(path))
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
for path in blender_user_scripts.split(os.pathsep):
if path:
previous_user_scripts.add(os.path.normpath(path))
# Remove implementation path from user script paths as is set to
# `BLENDER_USER_SCRIPTS`
previous_user_scripts.remove(implementation_user_script_path)
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
# Set custom user scripts env
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
previous_user_scripts
)
# Define Qt binding if not defined
if not env.get("QT_PREFERRED_BINDING"):
env["QT_PREFERRED_BINDING"] = "PySide2"
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(BLENDER_ROOT_DIR, "hooks")
]
def get_workfile_extensions(self):
return [".blend"]

View file

@ -2,7 +2,7 @@ import bpy
import pyblish.api
from openpype.api import get_errored_instances_from_context
from openpype.pipeline.publish import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):

View file

@ -6,7 +6,7 @@ from typing import Dict, List, Union
import bpy
import addon_utils
from openpype.api import Logger
from openpype.lib import Logger
from . import pipeline
@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List:
def read(node: bpy.types.bpy_struct_meta_idprop):
"""Return user-defined attributes from `node`"""
data = dict(node.get(pipeline.AVALON_PROPERTY))
data = dict(node.get(pipeline.AVALON_PROPERTY, {}))
# Ignore hidden/internal data
data = {

View file

@ -10,7 +10,7 @@ from pathlib import Path
from types import ModuleType
from typing import Dict, List, Optional, Union
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
import bpy
import bpy.utils.previews
@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict()
# This seems like a good value to keep the Qt app responsive and doesn't slow
# down Blender. At least on macOS I the interace of Blender gets very laggy if
# you make it smaller.
TIMER_INTERVAL: float = 0.01
TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
class BlenderApplication(QtWidgets.QApplication):
@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]:
dialog.setDetailedText(detail)
dialog.exec_()
# Refresh Manager
if GlobalClass.app:
manager = GlobalClass.app.get_window("WM_OT_avalon_manager")
if manager:
manager.refresh()
if not GlobalClass.is_windows:
if OpenFileCacher.opening_file:
return TIMER_INTERVAL
@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator):
self._app = BlenderApplication.get_app()
GlobalClass.app = self._app
bpy.app.timers.register(
_process_app_events,
persistent=True
)
if not bpy.app.timers.is_registered(_process_app_events):
bpy.app.timers.register(
_process_app_events,
persistent=True
)
def execute(self, context):
"""Execute the operator.
@ -220,12 +227,9 @@ class LaunchQtApp(bpy.types.Operator):
self._app.store_window(self.bl_idname, window)
self._window = window
if not isinstance(
self._window,
(QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType)
):
if not isinstance(self._window, (QtWidgets.QWidget, ModuleType)):
raise AttributeError(
"`window` should be a `QDialog or module`. Got: {}".format(
"`window` should be a `QWidget or module`. Got: {}".format(
str(type(window))
)
)
@ -249,9 +253,9 @@ class LaunchQtApp(bpy.types.Operator):
self._window.setWindowFlags(on_top_flags)
self._window.show()
if on_top_flags != origin_flags:
self._window.setWindowFlags(origin_flags)
self._window.show()
# if on_top_flags != origin_flags:
# self._window.setWindowFlags(origin_flags)
# self._window.show()
return {'FINISHED'}

View file

@ -20,8 +20,8 @@ from openpype.pipeline import (
deregister_creator_plugin_path,
AVALON_CONTAINER_ID,
)
from openpype.api import Logger
from openpype.lib import (
Logger,
register_event_callback,
emit_event
)
@ -93,7 +93,7 @@ def set_start_end_frames():
# Default scene settings
frameStart = scene.frame_start
frameEnd = scene.frame_end
fps = scene.render.fps
fps = scene.render.fps / scene.render.fps_base
resolution_x = scene.render.resolution_x
resolution_y = scene.render.resolution_y
@ -116,7 +116,8 @@ def set_start_end_frames():
scene.frame_start = frameStart
scene.frame_end = frameEnd
scene.render.fps = fps
scene.render.fps = round(fps)
scene.render.fps_base = round(fps) / fps
scene.render.resolution_x = resolution_x
scene.render.resolution_y = resolution_y

View file

@ -5,8 +5,6 @@ from typing import List, Optional
import bpy
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
class OpenFileCacher:
"""Store information about opening file.
@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool:
def file_extensions() -> List[str]:
"""Return the supported file extensions for Blender scene files."""
return HOST_WORKFILE_EXTENSIONS["blender"]
return [".blend"]
def work_root(session: dict) -> str:

View file

@ -1,4 +1,10 @@
from openpype.pipeline import install_host
from openpype.hosts.blender import api
install_host(api)
def register():
install_host(api)
def unregister():
pass

View file

@ -1,6 +1,7 @@
import os
import re
import subprocess
from platform import system
from openpype.lib import PreLaunchHook
@ -13,12 +14,9 @@ class InstallPySideToBlender(PreLaunchHook):
For pipeline implementation is required to have Qt binding installed in
blender's python packages.
Prelaunch hook can work only on Windows right now.
"""
app_groups = ["blender"]
platforms = ["windows"]
def execute(self):
# Prelaunch hook is not crucial
@ -34,25 +32,28 @@ class InstallPySideToBlender(PreLaunchHook):
# Get blender's python directory
version_regex = re.compile(r"^[2-3]\.[0-9]+$")
platform = system().lower()
executable = self.launch_context.executable.executable_path
if os.path.basename(executable).lower() != "blender.exe":
expected_executable = "blender"
if platform == "windows":
expected_executable += ".exe"
if os.path.basename(executable).lower() != expected_executable:
self.log.info((
"Executable does not lead to blender.exe file. Can't determine"
" blender's python to check/install PySide2."
f"Executable does not lead to {expected_executable} file."
"Can't determine blender's python to check/install PySide2."
))
return
executable_dir = os.path.dirname(executable)
versions_dir = os.path.dirname(executable)
if platform == "darwin":
versions_dir = os.path.join(
os.path.dirname(versions_dir), "Resources"
)
version_subfolders = []
for name in os.listdir(executable_dir):
fullpath = os.path.join(name, executable_dir)
if not os.path.isdir(fullpath):
continue
if not version_regex.match(name):
continue
version_subfolders.append(name)
for dir_entry in os.scandir(versions_dir):
if dir_entry.is_dir() and version_regex.match(dir_entry.name):
version_subfolders.append(dir_entry.name)
if not version_subfolders:
self.log.info(
@ -72,16 +73,21 @@ class InstallPySideToBlender(PreLaunchHook):
version_subfolder = version_subfolders[0]
pythond_dir = os.path.join(
os.path.dirname(executable),
version_subfolder,
"python"
)
python_dir = os.path.join(versions_dir, version_subfolder, "python")
python_lib = os.path.join(python_dir, "lib")
python_version = "python"
if platform != "windows":
for dir_entry in os.scandir(python_lib):
if dir_entry.is_dir() and dir_entry.name.startswith("python"):
python_lib = dir_entry.path
python_version = dir_entry.name
break
# Change PYTHONPATH to contain blender's packages as first
python_paths = [
os.path.join(pythond_dir, "lib"),
os.path.join(pythond_dir, "lib", "site-packages"),
python_lib,
os.path.join(python_lib, "site-packages"),
]
python_path = self.launch_context.env.get("PYTHONPATH") or ""
for path in python_path.split(os.pathsep):
@ -91,7 +97,15 @@ class InstallPySideToBlender(PreLaunchHook):
self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths)
# Get blender's python executable
python_executable = os.path.join(pythond_dir, "bin", "python.exe")
python_bin = os.path.join(python_dir, "bin")
if platform == "windows":
python_executable = os.path.join(python_bin, "python.exe")
else:
python_executable = os.path.join(python_bin, python_version)
# Check for python with enabled 'pymalloc'
if not os.path.exists(python_executable):
python_executable += "m"
if not os.path.exists(python_executable):
self.log.warning(
"Couldn't find python executable for blender. {}".format(
@ -106,7 +120,15 @@ class InstallPySideToBlender(PreLaunchHook):
return
# Install PySide2 in blender's python
self.install_pyside_windows(python_executable)
if platform == "windows":
result = self.install_pyside_windows(python_executable)
else:
result = self.install_pyside(python_executable)
if result:
self.log.info("Successfully installed PySide2 module to blender.")
else:
self.log.warning("Failed to install PySide2 module to blender.")
def install_pyside_windows(self, python_executable):
"""Install PySide2 python module to blender's python.
@ -144,19 +166,41 @@ class InstallPySideToBlender(PreLaunchHook):
lpDirectory=os.path.dirname(python_executable)
)
process_handle = process_info["hProcess"]
obj = win32event.WaitForSingleObject(
process_handle, win32event.INFINITE
)
win32event.WaitForSingleObject(process_handle, win32event.INFINITE)
returncode = win32process.GetExitCodeProcess(process_handle)
if returncode == 0:
self.log.info(
"Successfully installed PySide2 module to blender."
)
return
return returncode == 0
except pywintypes.error:
pass
self.log.warning("Failed to install PySide2 module to blender.")
def install_pyside(self, python_executable):
"""Install PySide2 python module to blender's python."""
try:
# Parameters
# - use "-m pip" as module pip to install PySide2 and argument
# "--ignore-installed" is to force install module to blender's
# site-packages and make sure it is binary compatible
args = [
python_executable,
"-m",
"pip",
"install",
"--ignore-installed",
"PySide2",
]
process = subprocess.Popen(
args, stdout=subprocess.PIPE, universal_newlines=True
)
process.communicate()
return process.returncode == 0
except PermissionError:
self.log.warning(
"Permission denied with command:"
"\"{}\".".format(" ".join(args))
)
except OSError as error:
self.log.warning(f"OS error has occurred: \"{error}\".")
except subprocess.SubprocessError:
pass
def is_pyside_installed(self, python_executable):
"""Check if PySide2 module is in blender's pip list.
@ -169,7 +213,7 @@ class InstallPySideToBlender(PreLaunchHook):
args = [python_executable, "-m", "pip", "list"]
process = subprocess.Popen(args, stdout=subprocess.PIPE)
stdout, _ = process.communicate()
lines = stdout.decode().split("\r\n")
lines = stdout.decode().split(os.linesep)
# Second line contain dashes that define maximum length of module name.
# Second column of dashes define maximum length of module version.
package_dashes, *_ = lines[1].split(" ")

View file

@ -32,11 +32,6 @@ class CreateCamera(plugin.Creator):
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
camera = bpy.data.cameras.new(subset)
camera_obj = bpy.data.objects.new(subset, camera)
instances.objects.link(camera_obj)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
@ -53,6 +48,11 @@ class CreateCamera(plugin.Creator):
bpy.ops.object.parent_set(keep_transform=True)
else:
plugin.deselect_all()
camera = bpy.data.cameras.new(subset)
camera_obj = bpy.data.objects.new(subset, camera)
instances.objects.link(camera_obj)
camera_obj.select_set(True)
asset_group.select_set(True)
bpy.context.view_layer.objects.active = asset_group

View file

@ -0,0 +1,82 @@
import bpy
from openpype.hosts.blender.api import plugin
def append_workfile(context, fname, do_import):
asset = context['asset']['name']
subset = context['subset']['name']
group_name = plugin.asset_name(asset, subset)
# We need to preserve the original names of the scenes, otherwise,
# if there are duplicate names in the current workfile, the imported
# scenes will be renamed by Blender to avoid conflicts.
original_scene_names = []
with bpy.data.libraries.load(fname) as (data_from, data_to):
for attr in dir(data_to):
if attr == "scenes":
for scene in data_from.scenes:
original_scene_names.append(scene)
setattr(data_to, attr, getattr(data_from, attr))
current_scene = bpy.context.scene
for scene, s_name in zip(data_to.scenes, original_scene_names):
scene.name = f"{group_name}_{s_name}"
if do_import:
collection = bpy.data.collections.new(f"{group_name}_{s_name}")
for obj in scene.objects:
collection.objects.link(obj)
current_scene.collection.children.link(collection)
for coll in scene.collection.children:
collection.children.link(coll)
class AppendBlendLoader(plugin.AssetLoader):
"""Append workfile in Blender (unmanaged)
Warning:
The loaded content will be unmanaged and is *not* visible in the
scene inventory. It's purely intended to merge content into your scene
so you could also use it as a new base.
"""
representations = ["blend"]
families = ["*"]
label = "Append Workfile"
order = 9
icon = "arrow-circle-down"
color = "#775555"
def load(self, context, name=None, namespace=None, data=None):
append_workfile(context, self.fname, False)
# We do not containerize imported content, it remains unmanaged
return
class ImportBlendLoader(plugin.AssetLoader):
"""Import workfile in the current Blender scene (unmanaged)
Warning:
The loaded content will be unmanaged and is *not* visible in the
scene inventory. It's purely intended to merge content into your scene
so you could also use it as a new base.
"""
representations = ["blend"]
families = ["*"]
label = "Import Workfile"
order = 9
icon = "arrow-circle-down"
color = "#775555"
def load(self, context, name=None, namespace=None, data=None):
append_workfile(context, self.fname, True)
# We do not containerize imported content, it remains unmanaged
return

View file

@ -6,12 +6,12 @@ from typing import Dict, List, Optional
import bpy
from openpype import lib
from openpype.pipeline import (
legacy_create,
get_representation_path,
AVALON_CONTAINER_ID,
)
from openpype.pipeline.create import get_legacy_creator_by_name
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import (
AVALON_CONTAINERS,
@ -48,8 +48,14 @@ class BlendLayoutLoader(plugin.AssetLoader):
bpy.data.objects.remove(obj)
def _remove_asset_and_library(self, asset_group):
if not asset_group.get(AVALON_PROPERTY):
return
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
if not libpath:
return
# Check how many assets use the same library
count = 0
for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects:
@ -63,10 +69,12 @@ class BlendLayoutLoader(plugin.AssetLoader):
# If it is the last object to use that library, remove it
if count == 1:
library = bpy.data.libraries.get(bpy.path.basename(libpath))
bpy.data.libraries.remove(library)
if library:
bpy.data.libraries.remove(library)
def _process(
self, libpath, asset_group, group_name, asset, representation, actions
self, libpath, asset_group, group_name, asset, representation,
actions, anim_instances
):
with bpy.data.libraries.load(
libpath, link=True, relative=False
@ -140,12 +148,12 @@ class BlendLayoutLoader(plugin.AssetLoader):
elif local_obj.type == 'ARMATURE':
plugin.prepare_data(local_obj.data)
if action is not None:
if action:
if local_obj.animation_data is None:
local_obj.animation_data_create()
local_obj.animation_data.action = action
elif (local_obj.animation_data and
local_obj.animation_data.action is not None):
local_obj.animation_data.action):
plugin.prepare_data(
local_obj.animation_data.action)
@ -157,19 +165,26 @@ class BlendLayoutLoader(plugin.AssetLoader):
t.id = local_obj
elif local_obj.type == 'EMPTY':
creator_plugin = lib.get_creator_by_name("CreateAnimation")
if not creator_plugin:
raise ValueError("Creator plugin \"CreateAnimation\" was "
"not found.")
if (not anim_instances or
(anim_instances and
local_obj.name not in anim_instances.keys())):
avalon = local_obj.get(AVALON_PROPERTY)
if avalon and avalon.get('family') == 'rig':
creator_plugin = get_legacy_creator_by_name(
"CreateAnimation")
if not creator_plugin:
raise ValueError(
"Creator plugin \"CreateAnimation\" was "
"not found.")
legacy_create(
creator_plugin,
name=local_obj.name.split(':')[-1] + "_animation",
asset=asset,
options={"useSelection": False,
"asset_group": local_obj},
data={"dependencies": representation}
)
legacy_create(
creator_plugin,
name=local_obj.name.split(':')[-1] + "_animation",
asset=asset,
options={"useSelection": False,
"asset_group": local_obj},
data={"dependencies": representation}
)
if not local_obj.get(AVALON_PROPERTY):
local_obj[AVALON_PROPERTY] = dict()
@ -272,7 +287,8 @@ class BlendLayoutLoader(plugin.AssetLoader):
avalon_container.objects.link(asset_group)
objects = self._process(
libpath, asset_group, group_name, asset, representation, None)
libpath, asset_group, group_name, asset, representation,
None, None)
for child in asset_group.children:
if child.get(AVALON_PROPERTY):
@ -352,10 +368,20 @@ class BlendLayoutLoader(plugin.AssetLoader):
return
actions = {}
anim_instances = {}
for obj in asset_group.children:
obj_meta = obj.get(AVALON_PROPERTY)
if obj_meta.get('family') == 'rig':
# Get animation instance
collections = list(obj.users_collection)
for c in collections:
avalon = c.get(AVALON_PROPERTY)
if avalon and avalon.get('family') == 'animation':
anim_instances[obj.name] = c.name
break
# Get armature's action
rig = None
for child in obj.children:
if child.type == 'ARMATURE':
@ -384,9 +410,26 @@ class BlendLayoutLoader(plugin.AssetLoader):
# If it is the last object to use that library, remove it
if count == 1:
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
bpy.data.libraries.remove(library)
if library:
bpy.data.libraries.remove(library)
self._process(str(libpath), asset_group, object_name, actions)
asset = container.get("asset_name").split("_")[0]
self._process(
str(libpath), asset_group, object_name, asset,
str(representation.get("_id")), actions, anim_instances
)
# Link the new objects to the animation collection
for inst in anim_instances.keys():
try:
obj = bpy.data.objects[inst]
bpy.data.collections[anim_instances[inst]].objects.link(obj)
except KeyError:
self.log.info(f"Object {inst} does not exist anymore.")
coll = bpy.data.collections.get(anim_instances[inst])
if (coll):
bpy.data.collections.remove(coll)
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
for child in asset_group.children:

View file

@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
# Camera creation when loading a layout is not necessary for now,
# but the code is worth keeping in case we need it in the future.
# # Create the camera asset and the camera instance
# creator_plugin = lib.get_creator_by_name("CreateCamera")
# creator_plugin = get_legacy_creator_by_name("CreateCamera")
# if not creator_plugin:
# raise ValueError("Creator plugin \"CreateCamera\" was "
# "not found.")

View file

@ -6,12 +6,12 @@ from typing import Dict, List, Optional
import bpy
from openpype import lib
from openpype.pipeline import (
legacy_create,
get_representation_path,
AVALON_CONTAINER_ID,
)
from openpype.pipeline.create import get_legacy_creator_by_name
from openpype.hosts.blender.api import (
plugin,
get_selection,
@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader):
objects = self._process(libpath, asset_group, group_name, action)
if create_animation:
creator_plugin = lib.get_creator_by_name("CreateAnimation")
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
if not creator_plugin:
raise ValueError("Creator plugin \"CreateAnimation\" was "
"not found.")

View file

@ -1,6 +1,19 @@
import os
import bpy
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import workio
class SaveWorkfiledAction(pyblish.api.Action):
"""Save Workfile."""
label = "Save Workfile"
on = "failed"
icon = "save"
def process(self, context, plugin):
bpy.ops.wm.avalon_workfiles()
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
@ -8,12 +21,52 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.5
label = "Blender Current File"
hosts = ['blender']
hosts = ["blender"]
actions = [SaveWorkfiledAction]
def process(self, context):
"""Inject the current working file"""
current_file = bpy.data.filepath
context.data['currentFile'] = current_file
current_file = workio.current_file()
assert current_file != '', "Current file is empty. " \
"Save the file before continuing."
context.data["currentFile"] = current_file
assert current_file, (
"Current file is empty. Save the file before continuing."
)
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
task = legacy_io.Session["AVALON_TASK"]
data = {}
# create instance
instance = context.create_instance(name=filename)
subset = "workfile" + task.capitalize()
data.update({
"subset": subset,
"asset": os.getenv("AVALON_ASSET", None),
"label": subset,
"publish": True,
"family": "workfile",
"families": ["workfile"],
"setMembers": [current_file],
"frameStart": bpy.context.scene.frame_start,
"frameEnd": bpy.context.scene.frame_end,
})
data["representations"] = [{
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": file,
"stagingDir": folder,
}]
instance.data.update(data)
self.log.info("Collected instance: {}".format(file))
self.log.info("Scene path: {}".format(current_file))
self.log.info("staging Dir: {}".format(folder))
self.log.info("subset: {}".format(subset))

View file

@ -2,12 +2,12 @@ import os
import bpy
from openpype import api
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractABC(api.Extractor):
class ExtractABC(publish.Extractor):
"""Extract as ABC."""
label = "Extract ABC"

View file

@ -0,0 +1,72 @@
import os
import bpy
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
class ExtractAnimationABC(publish.Extractor):
"""Extract as ABC."""
label = "Extract Animation ABC"
hosts = ["blender"]
families = ["animation"]
optional = True
def process(self, instance):
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.abc"
filepath = os.path.join(stagingdir, filename)
context = bpy.context
# Perform extraction
self.log.info("Performing extraction..")
plugin.deselect_all()
selected = []
asset_group = None
objects = []
for obj in instance:
if isinstance(obj, bpy.types.Collection):
for child in obj.all_objects:
objects.append(child)
for obj in objects:
children = [o for o in bpy.data.objects if o.parent == obj]
for child in children:
objects.append(child)
for obj in objects:
obj.select_set(True)
selected.append(obj)
context = plugin.create_blender_context(
active=asset_group, selected=selected)
# We export the abc
bpy.ops.wm.alembic_export(
context,
filepath=filepath,
selected=True,
flatten=False
)
plugin.deselect_all()
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -2,10 +2,10 @@ import os
import bpy
import openpype.api
from openpype.pipeline import publish
class ExtractBlend(openpype.api.Extractor):
class ExtractBlend(publish.Extractor):
"""Extract a blend file."""
label = "Extract Blend"

View file

@ -2,10 +2,10 @@ import os
import bpy
import openpype.api
from openpype.pipeline import publish
class ExtractBlendAnimation(openpype.api.Extractor):
class ExtractBlendAnimation(publish.Extractor):
"""Extract a blend file."""
label = "Extract Blend"

View file

@ -2,11 +2,11 @@ import os
import bpy
from openpype import api
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
class ExtractCamera(api.Extractor):
class ExtractCamera(publish.Extractor):
"""Extract as the camera as FBX."""
label = "Extract Camera"

View file

@ -2,12 +2,12 @@ import os
import bpy
from openpype import api
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractFBX(api.Extractor):
class ExtractFBX(publish.Extractor):
"""Extract as FBX."""
label = "Extract FBX"

View file

@ -5,12 +5,12 @@ import bpy
import bpy_extras
import bpy_extras.anim_utils
from openpype import api
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractAnimationFBX(api.Extractor):
class ExtractAnimationFBX(publish.Extractor):
"""Extract as animation."""
label = "Extract FBX"

View file

@ -6,12 +6,12 @@ import bpy_extras
import bpy_extras.anim_utils
from openpype.client import get_representation_by_name
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
import openpype.api
class ExtractLayout(openpype.api.Extractor):
class ExtractLayout(publish.Extractor):
"""Extract a layout."""
label = "Extract Layout"
@ -180,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor):
"rotation": {
"x": asset.rotation_euler.x,
"y": asset.rotation_euler.y,
"z": asset.rotation_euler.z,
"z": asset.rotation_euler.z
},
"scale": {
"x": asset.scale.x,
@ -189,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor):
}
}
json_element["transform_matrix"] = []
for row in list(asset.matrix_world.transposed()):
json_element["transform_matrix"].append(list(row))
json_element["basis"] = [
[1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
]
# Extract the animation as well
if family == "rig":
f, n = self._export_animation(

View file

@ -1,9 +1,11 @@
from typing import List
import mathutils
import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
@ -14,21 +16,18 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
in Unreal and Blender.
"""
order = openpype.api.ValidateContentsOrder
order = ValidateContentsOrder
hosts = ["blender"]
families = ["camera"]
category = "geometry"
version = (0, 1, 0)
label = "Zero Keyframe"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
_identity = mathutils.Matrix()
@classmethod
def get_invalid(cls, instance) -> List:
@staticmethod
def get_invalid(instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
if obj.type == "CAMERA":
for obj in instance:
if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA":
if obj.animation_data and obj.animation_data.action:
action = obj.animation_data.action
frames_set = set()
@ -45,4 +44,5 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Object found in instance is not in Object Mode: {invalid}")
f"Camera must have a keyframe at frame 0: {invalid}"
)

View file

@ -3,13 +3,15 @@ from typing import List
import bpy
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
import openpype.hosts.blender.api.action
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
"""Validate that the current mesh has UV's."""
order = pyblish.api.ValidatorOrder
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
@ -25,7 +27,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
for uv_layer in obj.data.uv_layers:
for polygon in obj.data.polygons:
for loop_index in polygon.loop_indices:
if not uv_layer.data[loop_index].uv:
if (
loop_index >= len(uv_layer.data)
or not uv_layer.data[loop_index].uv
):
return False
return True
@ -33,20 +38,20 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance) -> List:
invalid = []
# TODO (jasper): only check objects in the collection that will be published?
for obj in [
obj for obj in instance]:
try:
if obj.type == 'MESH':
# Make sure we are in object mode.
bpy.ops.object.mode_set(mode='OBJECT')
if not cls.has_uvs(obj):
invalid.append(obj)
except:
continue
for obj in instance:
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
if obj.mode != "OBJECT":
cls.log.warning(
f"Mesh object {obj.name} should be in 'OBJECT' mode"
" to be properly checked."
)
if not cls.has_uvs(obj):
invalid.append(obj)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")
raise RuntimeError(
f"Meshes found in instance without valid UV's: {invalid}"
)

View file

@ -3,28 +3,28 @@ from typing import List
import bpy
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
import openpype.hosts.blender.api.action
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
"""Ensure that meshes don't have a negative scale."""
order = pyblish.api.ValidatorOrder
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh No Negative Scale"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
@staticmethod
def get_invalid(instance) -> List:
invalid = []
# TODO (jasper): only check objects in the collection that will be published?
for obj in [
obj for obj in bpy.data.objects if obj.type == 'MESH'
]:
if any(v < 0 for v in obj.scale):
invalid.append(obj)
for obj in instance:
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
if any(v < 0 for v in obj.scale):
invalid.append(obj)
return invalid
def process(self, instance):

View file

@ -1,7 +1,11 @@
from typing import List
import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
@ -12,20 +16,20 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
"""
order = openpype.api.ValidateContentsOrder
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model", "rig"]
version = (0, 1, 0)
label = "No Colons in names"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance) -> List:
@staticmethod
def get_invalid(instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
for obj in instance:
if ':' in obj.name:
invalid.append(obj)
if obj.type == 'ARMATURE':
if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE':
for bone in obj.data.bones:
if ':' in bone.name:
invalid.append(obj)
@ -36,4 +40,5 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Objects found with colon in name: {invalid}")
f"Objects found with colon in name: {invalid}"
)

View file

@ -1,5 +1,7 @@
from typing import List
import bpy
import pyblish.api
import openpype.hosts.blender.api.action
@ -10,26 +12,21 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder - 0.01
hosts = ["blender"]
families = ["model", "rig", "layout"]
category = "geometry"
label = "Validate Object Mode"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = False
@classmethod
def get_invalid(cls, instance) -> List:
@staticmethod
def get_invalid(instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
try:
if obj.type == 'MESH' or obj.type == 'ARMATURE':
# Check if the object is in object mode.
if not obj.mode == 'OBJECT':
invalid.append(obj)
except Exception:
continue
for obj in instance:
if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT":
invalid.append(obj)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Object found in instance is not in Object Mode: {invalid}")
f"Object found in instance is not in Object Mode: {invalid}"
)

View file

@ -1,9 +1,12 @@
from typing import List
import mathutils
import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
class ValidateTransformZero(pyblish.api.InstancePlugin):
@ -15,10 +18,9 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
"""
order = openpype.api.ValidateContentsOrder
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
version = (0, 1, 0)
label = "Transform Zero"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
@ -28,8 +30,11 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
if obj.matrix_basis != cls._identity:
for obj in instance:
if (
isinstance(obj, bpy.types.Object)
and obj.matrix_basis != cls._identity
):
invalid.append(obj)
return invalid
@ -37,4 +42,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Object found in instance is not in Object Mode: {invalid}")
"Object found in instance has not"
f" transform to zero: {invalid}"
)

View file

@ -0,0 +1,10 @@
from .addon import (
CELACTION_ROOT_DIR,
CelactionAddon,
)
__all__ = (
"CELACTION_ROOT_DIR",
"CelactionAddon",
)

View file

@ -0,0 +1,31 @@
import os
from openpype.modules import OpenPypeModule, IHostAddon
CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
class CelactionAddon(OpenPypeModule, IHostAddon):
name = "celaction"
host_name = "celaction"
def initialize(self, module_settings):
self.enabled = True
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(CELACTION_ROOT_DIR, "hooks")
]
def add_implementation_envs(self, env, _app):
# Set default values if are not already set via settings
defaults = {
"LOGLEVEL": "DEBUG"
}
for key, value in defaults.items():
if not env.get(key):
env[key] = value
def get_workfile_extensions(self):
return [".scn"]

View file

@ -1 +0,0 @@
kwargs = None

View file

@ -1,88 +0,0 @@
import os
import sys
import copy
import argparse
import pyblish.api
import pyblish.util
from openpype.api import Logger
import openpype
import openpype.hosts.celaction
from openpype.hosts.celaction import api as celaction
from openpype.tools.utils import host_tools
from openpype.pipeline import install_openpype_plugins
log = Logger().get_logger("Celaction_cli_publisher")
publish_host = "celaction"
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
def cli():
parser = argparse.ArgumentParser(prog="celaction_publish")
parser.add_argument("--currentFile",
help="Pass file to Context as `currentFile`")
parser.add_argument("--chunk",
help=("Render chanks on farm"))
parser.add_argument("--frameStart",
help=("Start of frame range"))
parser.add_argument("--frameEnd",
help=("End of frame range"))
parser.add_argument("--resolutionWidth",
help=("Width of resolution"))
parser.add_argument("--resolutionHeight",
help=("Height of resolution"))
celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__
def _prepare_publish_environments():
"""Prepares environments based on request data."""
env = copy.deepcopy(os.environ)
project_name = os.getenv("AVALON_PROJECT")
asset_name = os.getenv("AVALON_ASSET")
env["AVALON_PROJECT"] = project_name
env["AVALON_ASSET"] = asset_name
env["AVALON_TASK"] = os.getenv("AVALON_TASK")
env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR")
env["AVALON_APP"] = f"hosts.{publish_host}"
env["AVALON_APP_NAME"] = "celaction/local"
env["PYBLISH_HOSTS"] = publish_host
os.environ.update(env)
def main():
# prepare all environments
_prepare_publish_environments()
# Registers pype's Global pyblish plugins
install_openpype_plugins()
if os.path.exists(PUBLISH_PATH):
log.info(f"Registering path: {PUBLISH_PATH}")
pyblish.api.register_plugin_path(PUBLISH_PATH)
pyblish.api.register_host(publish_host)
return host_tools.show_publish()
if __name__ == "__main__":
cli()
result = main()
sys.exit(not bool(result))

View file

@ -1,122 +0,0 @@
import os
import shutil
import winreg
from openpype.lib import PreLaunchHook
from openpype.hosts.celaction import api as celaction
class CelactionPrelaunchHook(PreLaunchHook):
"""
Bootstrap celacion with pype
"""
workfile_ext = "scn"
app_groups = ["celaction"]
platforms = ["windows"]
def execute(self):
# Add workfile path to launch arguments
workfile_path = self.workfile_path()
if workfile_path:
self.launch_context.launch_args.append(workfile_path)
project_name = self.data["project_name"]
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
# get publish version of celaction
app = "celaction_publish"
# setting output parameters
path = r"Software\CelAction\CelAction2D\User Settings"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\CelAction\\CelAction2D\\User Settings", 0,
winreg.KEY_ALL_ACCESS)
# TODO: this will need to be checked more thoroughly
pype_exe = os.getenv("OPENPYPE_EXECUTABLE")
winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe)
parameters = [
"launch",
f"--app {app}",
f"--project {project_name}",
f"--asset {asset_name}",
f"--task {task_name}",
"--currentFile \\\"\"*SCENE*\"\\\"",
"--chunk 10",
"--frameStart *START*",
"--frameEnd *END*",
"--resolutionWidth *X*",
"--resolutionHeight *Y*",
# "--programDir \"'*PROGPATH*'\""
]
winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
" ".join(parameters))
# setting resolution parameters
path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
path += r"\SubmitOutput"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
# making sure message dialogs don't appear when overwriting
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
path += r"\OverwriteScene"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
path += r"\SceneSaved"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
def workfile_path(self):
workfile_path = self.data["last_workfile_path"]
# copy workfile from template if doesnt exist any on path
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
pype_celaction_dir = os.path.dirname(os.path.dirname(
os.path.abspath(celaction.__file__)
))
template_path = os.path.join(
pype_celaction_dir,
"resources",
"celaction_template_scene.scn"
)
if not os.path.exists(template_path):
self.log.warning(
"Couldn't find workfile template file in {}".format(
template_path
)
)
return
self.log.info(
f"Creating workfile from template: \"{template_path}\""
)
# Copy template workfile to new destinantion
shutil.copy2(
os.path.normpath(template_path),
os.path.normpath(workfile_path)
)
self.log.info(f"Workfile to open: \"{workfile_path}\"")
return workfile_path

View file

@ -0,0 +1,137 @@
import os
import shutil
import winreg
import subprocess
from openpype.lib import PreLaunchHook, get_openpype_execute_args
from openpype.hosts.celaction import scripts
CELACTION_SCRIPTS_DIR = os.path.dirname(
os.path.abspath(scripts.__file__)
)
class CelactionPrelaunchHook(PreLaunchHook):
"""
Bootstrap celacion with pype
"""
app_groups = ["celaction"]
platforms = ["windows"]
def execute(self):
asset_doc = self.data["asset_doc"]
width = asset_doc["data"]["resolutionWidth"]
height = asset_doc["data"]["resolutionHeight"]
# Add workfile path to launch arguments
workfile_path = self.workfile_path()
if workfile_path:
self.launch_context.launch_args.append(workfile_path)
# setting output parameters
path_user_settings = "\\".join([
"Software", "CelAction", "CelAction2D", "User Settings"
])
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, path_user_settings, 0,
winreg.KEY_ALL_ACCESS
)
path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
subproces_args = get_openpype_execute_args("run", path_to_cli)
openpype_executable = subproces_args.pop(0)
winreg.SetValueEx(
hKey,
"SubmitAppTitle",
0,
winreg.REG_SZ,
openpype_executable
)
parameters = subproces_args + [
"--currentFile", "*SCENE*",
"--chunk", "*CHUNK*",
"--frameStart", "*START*",
"--frameEnd", "*END*",
"--resolutionWidth", "*X*",
"--resolutionHeight", "*Y*"
]
winreg.SetValueEx(
hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
subprocess.list2cmdline(parameters)
)
# setting resolution parameters
path_submit = "\\".join([
path_user_settings, "Dialogs", "SubmitOutput"
])
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, path_submit, 0,
winreg.KEY_ALL_ACCESS
)
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width)
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height)
# making sure message dialogs don't appear when overwriting
path_overwrite_scene = "\\".join([
path_user_settings, "Messages", "OverwriteScene"
])
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0,
winreg.KEY_ALL_ACCESS
)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
# set scane as not saved
path_scene_saved = "\\".join([
path_user_settings, "Messages", "SceneSaved"
])
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, path_scene_saved, 0,
winreg.KEY_ALL_ACCESS
)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
def workfile_path(self):
workfile_path = self.data["last_workfile_path"]
# copy workfile from template if doesnt exist any on path
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
template_path = os.path.join(
openpype_celaction_dir,
"resources",
"celaction_template_scene.scn"
)
if not os.path.exists(template_path):
self.log.warning(
"Couldn't find workfile template file in {}".format(
template_path
)
)
return
self.log.info(
f"Creating workfile from template: \"{template_path}\""
)
# Copy template workfile to new destinantion
shutil.copy2(
os.path.normpath(template_path),
os.path.normpath(workfile_path)
)
self.log.info(f"Workfile to open: \"{workfile_path}\"")
return workfile_path

View file

@ -1,113 +0,0 @@
import os
import collections
from pprint import pformat
import pyblish.api
from openpype.client import (
get_subsets,
get_last_versions,
get_representations
)
from openpype.pipeline import legacy_io
class AppendCelactionAudio(pyblish.api.ContextPlugin):
label = "Colect Audio for publishing"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
self.log.info('Collecting Audio Data')
asset_doc = context.data["assetEntity"]
# get all available representations
subsets = self.get_subsets(
asset_doc,
representations=["audio", "wav"]
)
self.log.info(f"subsets is: {pformat(subsets)}")
if not subsets.get("audioMain"):
raise AttributeError("`audioMain` subset does not exist")
reprs = subsets.get("audioMain", {}).get("representations", [])
self.log.info(f"reprs is: {pformat(reprs)}")
repr = next((r for r in reprs), None)
if not repr:
raise "Missing `audioMain` representation"
self.log.info(f"representation is: {repr}")
audio_file = repr.get('data', {}).get('path', "")
if os.path.exists(audio_file):
context.data["audioFile"] = audio_file
self.log.info(
'audio_file: {}, has been added to context'.format(audio_file))
else:
self.log.warning("Couldn't find any audio file on Ftrack.")
def get_subsets(self, asset_doc, representations):
"""
Query subsets with filter on name.
The method will return all found subsets and its defined version
and subsets. Version could be specified with number. Representation
can be filtered.
Arguments:
asset_doct (dict): Asset (shot) mongo document
representations (list): list for all representations
Returns:
dict: subsets with version and representations in keys
"""
# Query all subsets for asset
project_name = legacy_io.active_project()
subset_docs = get_subsets(
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
)
# Collect all subset ids
subset_ids = [
subset_doc["_id"]
for subset_doc in subset_docs
]
# Check if we found anything
assert subset_ids, (
"No subsets found. Check correct filter. "
"Try this for start `r'.*'`: asset: `{}`"
).format(asset_doc["name"])
last_versions_by_subset_id = get_last_versions(
project_name, subset_ids, fields=["_id", "parent"]
)
version_docs_by_id = {}
for version_doc in last_versions_by_subset_id.values():
version_docs_by_id[version_doc["_id"]] = version_doc
repre_docs = get_representations(
project_name,
version_ids=version_docs_by_id.keys(),
representation_names=representations
)
repre_docs_by_version_id = collections.defaultdict(list)
for repre_doc in repre_docs:
version_id = repre_doc["parent"]
repre_docs_by_version_id[version_id].append(repre_doc)
output_dict = {}
for version_id, repre_docs in repre_docs_by_version_id.items():
version_doc = version_docs_by_id[version_id]
subset_id = version_doc["parent"]
subset_doc = last_versions_by_subset_id[subset_id]
# Store queried docs by subset name
output_dict[subset_doc["name"]] = {
"representations": repre_docs,
"version": version_doc
}
return output_dict

View file

@ -1,5 +1,7 @@
import pyblish.api
from openpype.hosts.celaction import api as celaction
import argparse
import sys
from pprint import pformat
class CollectCelactionCliKwargs(pyblish.api.Collector):
@ -9,15 +11,31 @@ class CollectCelactionCliKwargs(pyblish.api.Collector):
order = pyblish.api.Collector.order - 0.1
def process(self, context):
kwargs = celaction.kwargs.copy()
parser = argparse.ArgumentParser(prog="celaction")
parser.add_argument("--currentFile",
help="Pass file to Context as `currentFile`")
parser.add_argument("--chunk",
help=("Render chanks on farm"))
parser.add_argument("--frameStart",
help=("Start of frame range"))
parser.add_argument("--frameEnd",
help=("End of frame range"))
parser.add_argument("--resolutionWidth",
help=("Width of resolution"))
parser.add_argument("--resolutionHeight",
help=("Height of resolution"))
passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__
self.log.info("Storing kwargs: %s" % kwargs)
context.set_data("kwargs", kwargs)
self.log.info("Storing kwargs ...")
self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs)))
# set kwargs to context data
context.set_data("passingKwargs", passing_kwargs)
# get kwargs onto context data as keys with values
for k, v in kwargs.items():
for k, v in passing_kwargs.items():
self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
if k in ["frameStart", "frameEnd"]:
context.data[k] = kwargs[k] = int(v)
context.data[k] = passing_kwargs[k] = int(v)
else:
context.data[k] = v

View file

@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
"version": version
}
celaction_kwargs = context.data.get("kwargs", {})
celaction_kwargs = context.data.get(
"passingKwargs", {})
if celaction_kwargs:
shared_instance_data.update(celaction_kwargs)
@ -52,8 +53,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
"subset": subset,
"label": scene_file,
"family": family,
"families": [family, "ftrack"],
"representations": list()
"families": [],
"representations": []
})
# adding basic script data
@ -72,7 +73,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
self.log.info('Publishing Celaction workfile')
# render instance
family = "render.farm"
subset = f"render{task}Main"
instance = context.create_instance(name=subset)
# getting instance state
@ -81,8 +81,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
# add assetEntity data into instance
instance.data.update({
"label": "{} - farm".format(subset),
"family": family,
"families": [family],
"family": "render.farm",
"families": [],
"subset": subset
})

View file

@ -11,28 +11,31 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
families = ["render.farm"]
# Presets
anatomy_render_key = None
publish_render_metadata = None
output_extension = "png"
anatomy_template_key_render_files = None
anatomy_template_key_metadata = None
def process(self, instance):
anatomy = instance.context.data["anatomy"]
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
anatomy_data["family"] = "render"
padding = anatomy.templates.get("frame_padding", 4)
anatomy_data.update({
"frame": f"%0{padding}d",
"representation": "png"
"family": "render",
"representation": self.output_extension,
"ext": self.output_extension
})
anatomy_filled = anatomy.format(anatomy_data)
# get anatomy rendering keys
anatomy_render_key = self.anatomy_render_key or "render"
publish_render_metadata = self.publish_render_metadata or "render"
r_anatomy_key = self.anatomy_template_key_render_files
m_anatomy_key = self.anatomy_template_key_metadata
# get folder and path for rendering images from celaction
render_dir = anatomy_filled[anatomy_render_key]["folder"]
render_path = anatomy_filled[anatomy_render_key]["path"]
render_dir = anatomy_filled[r_anatomy_key]["folder"]
render_path = anatomy_filled[r_anatomy_key]["path"]
self.log.debug("__ render_path: `{}`".format(render_path))
# create dir if it doesnt exists
try:
@ -46,9 +49,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
instance.data["path"] = render_path
# get anatomy for published renders folder path
if anatomy_filled.get(publish_render_metadata):
if anatomy_filled.get(m_anatomy_key):
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
publish_render_metadata]["folder"]
m_anatomy_key]["folder"]
self.log.info("Metadata render path: `{}`".format(
instance.data["publishRenderMetadataFolder"]
))

View file

@ -0,0 +1,37 @@
import os
import sys
import pyblish.api
import pyblish.util
import openpype.hosts.celaction
from openpype.lib import Logger
from openpype.tools.utils import host_tools
from openpype.pipeline import install_openpype_plugins
log = Logger.get_logger("celaction")
PUBLISH_HOST = "celaction"
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
def main():
# Registers pype's Global pyblish plugins
install_openpype_plugins()
if os.path.exists(PUBLISH_PATH):
log.info(f"Registering path: {PUBLISH_PATH}")
pyblish.api.register_plugin_path(PUBLISH_PATH)
pyblish.api.register_host(PUBLISH_HOST)
pyblish.api.register_target("local")
return host_tools.show_publish()
if __name__ == "__main__":
result = main()
sys.exit(not bool(result))

View file

@ -1,22 +1,10 @@
import os
HOST_DIR = os.path.dirname(
os.path.abspath(__file__)
from .addon import (
HOST_DIR,
FlameAddon,
)
def add_implementation_envs(env, _app):
# Add requirements to DL_PYTHON_HOOK_PATH
pype_root = os.environ["OPENPYPE_REPOS_ROOT"]
env["DL_PYTHON_HOOK_PATH"] = os.path.join(
pype_root, "openpype", "hosts", "flame", "startup")
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
# Set default values if are not already set via settings
defaults = {
"LOGLEVEL": "DEBUG"
}
for key, value in defaults.items():
if not env.get(key):
env[key] = value
__all__ = (
"HOST_DIR",
"FlameAddon",
)

View file

@ -0,0 +1,35 @@
import os
from openpype.modules import OpenPypeModule, IHostAddon
HOST_DIR = os.path.dirname(os.path.abspath(__file__))
class FlameAddon(OpenPypeModule, IHostAddon):
name = "flame"
host_name = "flame"
def initialize(self, module_settings):
self.enabled = True
def add_implementation_envs(self, env, _app):
# Add requirements to DL_PYTHON_HOOK_PATH
env["DL_PYTHON_HOOK_PATH"] = os.path.join(HOST_DIR, "startup")
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
# Set default values if are not already set via settings
defaults = {
"LOGLEVEL": "DEBUG"
}
for key, value in defaults.items():
if not env.get(key):
env[key] = value
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(HOST_DIR, "hooks")
]
def get_workfile_extensions(self):
return [".otoc"]

View file

@ -30,7 +30,8 @@ from .lib import (
maintained_temp_file_path,
get_clip_segment,
get_batch_group_from_desktop,
MediaInfoFile
MediaInfoFile,
TimeEffectMetadata
)
from .utils import (
setup,
@ -50,7 +51,8 @@ from .pipeline import (
)
from .menu import (
FlameMenuProjectConnect,
FlameMenuTimeline
FlameMenuTimeline,
FlameMenuUniversal
)
from .plugin import (
Creator,
@ -107,6 +109,7 @@ __all__ = [
"get_clip_segment",
"get_batch_group_from_desktop",
"MediaInfoFile",
"TimeEffectMetadata",
# pipeline
"install",
@ -129,6 +132,7 @@ __all__ = [
# menu
"FlameMenuProjectConnect",
"FlameMenuTimeline",
"FlameMenuUniversal",
# plugin
"Creator",

Some files were not shown because too many files have changed in this diff Show more