mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into feature/blender-review
This commit is contained in:
commit
880aaad027
1378 changed files with 93840 additions and 21509 deletions
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -6,6 +6,8 @@ labels: bug
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
**Running version**
|
||||||
|
[ex. 3.14.1-nightly.2]
|
||||||
|
|
||||||
**Describe the bug**
|
**Describe the bug**
|
||||||
A clear and concise description of what the bug is.
|
A clear and concise description of what the bug is.
|
||||||
|
|
|
||||||
28
.github/workflows/milestone_assign.yml
vendored
Normal file
28
.github/workflows/milestone_assign.yml
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
name: Milestone - assign to PRs
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [closed]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run_if_release:
|
||||||
|
if: startsWith(github.base_ref, 'release/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Assign Milestone [next-minor]'
|
||||||
|
if: github.event.pull_request.milestone == null
|
||||||
|
uses: zoispag/action-assign-milestone@v1
|
||||||
|
with:
|
||||||
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
milestone: 'next-minor'
|
||||||
|
|
||||||
|
run_if_develop:
|
||||||
|
if: ${{ github.base_ref == 'develop' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Assign Milestone [next-patch]'
|
||||||
|
if: github.event.pull_request.milestone == null
|
||||||
|
uses: zoispag/action-assign-milestone@v1
|
||||||
|
with:
|
||||||
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
milestone: 'next-patch'
|
||||||
62
.github/workflows/milestone_create.yml
vendored
Normal file
62
.github/workflows/milestone_create.yml
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
name: Milestone - create default
|
||||||
|
|
||||||
|
on:
|
||||||
|
milestone:
|
||||||
|
types: [closed, edited]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-next-patch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Get Milestones'
|
||||||
|
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||||
|
id: milestones
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
||||||
|
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||||
|
id: querymilestone
|
||||||
|
env:
|
||||||
|
MILESTONES: ${{ steps.milestones.outputs.milestones }}
|
||||||
|
MILESTONE: "next-patch"
|
||||||
|
|
||||||
|
- name: Read output
|
||||||
|
run: |
|
||||||
|
echo "${{ steps.querymilestone.outputs.number }}"
|
||||||
|
|
||||||
|
- name: 'Create `next-patch` milestone'
|
||||||
|
if: steps.querymilestone.outputs.number == ''
|
||||||
|
id: createmilestone
|
||||||
|
uses: "WyriHaximus/github-action-create-milestone@v1"
|
||||||
|
with:
|
||||||
|
title: 'next-patch'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
||||||
|
generate-next-minor:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Get Milestones'
|
||||||
|
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||||
|
id: milestones
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
||||||
|
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||||
|
id: querymilestone
|
||||||
|
env:
|
||||||
|
MILESTONES: ${{ steps.milestones.outputs.milestones }}
|
||||||
|
MILESTONE: "next-minor"
|
||||||
|
|
||||||
|
- name: Read output
|
||||||
|
run: |
|
||||||
|
echo "${{ steps.querymilestone.outputs.number }}"
|
||||||
|
|
||||||
|
- name: 'Create `next-minor` milestone'
|
||||||
|
if: steps.querymilestone.outputs.number == ''
|
||||||
|
id: createmilestone
|
||||||
|
uses: "WyriHaximus/github-action-create-milestone@v1"
|
||||||
|
with:
|
||||||
|
title: 'next-minor'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
46
.github/workflows/prerelease.yml
vendored
46
.github/workflows/prerelease.yml
vendored
|
|
@ -37,27 +37,27 @@ jobs:
|
||||||
|
|
||||||
echo ::set-output name=next_tag::$RESULT
|
echo ::set-output name=next_tag::$RESULT
|
||||||
|
|
||||||
- name: "✏️ Generate full changelog"
|
# - name: "✏️ Generate full changelog"
|
||||||
if: steps.version_type.outputs.type != 'skip'
|
# if: steps.version_type.outputs.type != 'skip'
|
||||||
id: generate-full-changelog
|
# id: generate-full-changelog
|
||||||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||||
with:
|
# with:
|
||||||
token: ${{ secrets.ADMIN_TOKEN }}
|
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||||
issues: false
|
# issues: false
|
||||||
issuesWoLabels: false
|
# issuesWoLabels: false
|
||||||
sinceTag: "3.0.0"
|
# sinceTag: "3.12.0"
|
||||||
maxIssues: 100
|
# maxIssues: 100
|
||||||
pullRequests: true
|
# pullRequests: true
|
||||||
prWoLabels: false
|
# prWoLabels: false
|
||||||
author: false
|
# author: false
|
||||||
unreleased: true
|
# unreleased: true
|
||||||
compareLink: true
|
# compareLink: true
|
||||||
stripGeneratorNotice: true
|
# stripGeneratorNotice: true
|
||||||
verbose: true
|
# verbose: true
|
||||||
unreleasedLabel: ${{ steps.version.outputs.next_tag }}
|
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
|
||||||
excludeTagsRegex: "CI/.+"
|
# excludeTagsRegex: "CI/.+"
|
||||||
releaseBranch: "main"
|
# releaseBranch: "main"
|
||||||
|
|
||||||
- name: "🖨️ Print changelog to console"
|
- name: "🖨️ Print changelog to console"
|
||||||
if: steps.version_type.outputs.type != 'skip'
|
if: steps.version_type.outputs.type != 'skip'
|
||||||
|
|
@ -85,11 +85,11 @@ jobs:
|
||||||
tags: true
|
tags: true
|
||||||
unprotect_reviews: true
|
unprotect_reviews: true
|
||||||
|
|
||||||
- name: 🔨 Merge main back to develop
|
- name: 🔨 Merge main back to develop
|
||||||
uses: everlytic/branch-merge@1.1.0
|
uses: everlytic/branch-merge@1.1.0
|
||||||
if: steps.version_type.outputs.type != 'skip'
|
if: steps.version_type.outputs.type != 'skip'
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||||
source_ref: 'main'
|
source_ref: 'main'
|
||||||
target_branch: 'develop'
|
target_branch: 'develop'
|
||||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||||
|
|
|
||||||
54
.github/workflows/release.yml
vendored
54
.github/workflows/release.yml
vendored
|
|
@ -2,7 +2,7 @@ name: Stable Release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types:
|
types:
|
||||||
- prereleased
|
- prereleased
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
@ -13,7 +13,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: 🚛 Checkout Code
|
- name: 🚛 Checkout Code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
|
|
@ -33,27 +33,27 @@ jobs:
|
||||||
echo ::set-output name=last_release::$LASTRELEASE
|
echo ::set-output name=last_release::$LASTRELEASE
|
||||||
echo ::set-output name=release_tag::$RESULT
|
echo ::set-output name=release_tag::$RESULT
|
||||||
|
|
||||||
- name: "✏️ Generate full changelog"
|
# - name: "✏️ Generate full changelog"
|
||||||
if: steps.version.outputs.release_tag != 'skip'
|
# if: steps.version.outputs.release_tag != 'skip'
|
||||||
id: generate-full-changelog
|
# id: generate-full-changelog
|
||||||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||||
with:
|
# with:
|
||||||
token: ${{ secrets.ADMIN_TOKEN }}
|
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||||
issues: false
|
# issues: false
|
||||||
issuesWoLabels: false
|
# issuesWoLabels: false
|
||||||
sinceTag: "3.0.0"
|
# sinceTag: "3.12.0"
|
||||||
maxIssues: 100
|
# maxIssues: 100
|
||||||
pullRequests: true
|
# pullRequests: true
|
||||||
prWoLabels: false
|
# prWoLabels: false
|
||||||
author: false
|
# author: false
|
||||||
unreleased: true
|
# unreleased: true
|
||||||
compareLink: true
|
# compareLink: true
|
||||||
stripGeneratorNotice: true
|
# stripGeneratorNotice: true
|
||||||
verbose: true
|
# verbose: true
|
||||||
futureRelease: ${{ steps.version.outputs.release_tag }}
|
# futureRelease: ${{ steps.version.outputs.release_tag }}
|
||||||
excludeTagsRegex: "CI/.+"
|
# excludeTagsRegex: "CI/.+"
|
||||||
releaseBranch: "main"
|
# releaseBranch: "main"
|
||||||
|
|
||||||
- name: 💾 Commit and Tag
|
- name: 💾 Commit and Tag
|
||||||
id: git_commit
|
id: git_commit
|
||||||
|
|
@ -73,8 +73,8 @@ jobs:
|
||||||
token: ${{ secrets.ADMIN_TOKEN }}
|
token: ${{ secrets.ADMIN_TOKEN }}
|
||||||
branch: main
|
branch: main
|
||||||
tags: true
|
tags: true
|
||||||
unprotect_reviews: true
|
unprotect_reviews: true
|
||||||
|
|
||||||
- name: "✏️ Generate last changelog"
|
- name: "✏️ Generate last changelog"
|
||||||
if: steps.version.outputs.release_tag != 'skip'
|
if: steps.version.outputs.release_tag != 'skip'
|
||||||
id: generate-last-changelog
|
id: generate-last-changelog
|
||||||
|
|
@ -114,11 +114,11 @@ jobs:
|
||||||
with:
|
with:
|
||||||
tag: "${{ steps.version.outputs.current_version }}"
|
tag: "${{ steps.version.outputs.current_version }}"
|
||||||
|
|
||||||
- name: 🔁 Merge main back to develop
|
- name: 🔁 Merge main back to develop
|
||||||
if: steps.version.outputs.release_tag != 'skip'
|
if: steps.version.outputs.release_tag != 'skip'
|
||||||
uses: everlytic/branch-merge@1.1.0
|
uses: everlytic/branch-merge@1.1.0
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||||
source_ref: 'main'
|
source_ref: 'main'
|
||||||
target_branch: 'develop'
|
target_branch: 'develop'
|
||||||
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
||||||
|
|
|
||||||
8
.gitignore
vendored
8
.gitignore
vendored
|
|
@ -102,5 +102,13 @@ website/.docusaurus
|
||||||
|
|
||||||
.poetry/
|
.poetry/
|
||||||
.python-version
|
.python-version
|
||||||
|
.editorconfig
|
||||||
|
.pre-commit-config.yaml
|
||||||
|
mypy.ini
|
||||||
|
|
||||||
tools/run_eventserver.*
|
tools/run_eventserver.*
|
||||||
|
|
||||||
|
# Developer tools
|
||||||
|
tools/dev_*
|
||||||
|
|
||||||
|
.github_changelog_generator
|
||||||
|
|
|
||||||
5
.gitmodules
vendored
5
.gitmodules
vendored
|
|
@ -4,7 +4,4 @@
|
||||||
|
|
||||||
[submodule "tools/modules/powershell/PSWriteColor"]
|
[submodule "tools/modules/powershell/PSWriteColor"]
|
||||||
path = tools/modules/powershell/PSWriteColor
|
path = tools/modules/powershell/PSWriteColor
|
||||||
url = https://github.com/EvotecIT/PSWriteColor.git
|
url = https://github.com/EvotecIT/PSWriteColor.git
|
||||||
[submodule "vendor/configs/OpenColorIO-Configs"]
|
|
||||||
path = vendor/configs/OpenColorIO-Configs
|
|
||||||
url = https://github.com/imageworks/OpenColorIO-Configs
|
|
||||||
1950
CHANGELOG.md
1950
CHANGELOG.md
File diff suppressed because it is too large
Load diff
2124
HISTORY.md
2124
HISTORY.md
File diff suppressed because it is too large
Load diff
10
README.md
10
README.md
|
|
@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll
|
||||||
- **Linux**
|
- **Linux**
|
||||||
- **Ubuntu** 20.04 LTS
|
- **Ubuntu** 20.04 LTS
|
||||||
- **Centos** 7
|
- **Centos** 7
|
||||||
- **Mac OSX**
|
- **Mac OSX**
|
||||||
- **10.15** Catalina
|
- **10.15** Catalina
|
||||||
- **11.1** Big Sur (using Rosetta2)
|
- **11.1** Big Sur (using Rosetta2)
|
||||||
|
|
||||||
|
|
@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
|
||||||
|
|
||||||
**Note that it needs existing virtual environment.**
|
**Note that it needs existing virtual environment.**
|
||||||
|
|
||||||
|
|
||||||
|
Developer tools
|
||||||
|
-------------
|
||||||
|
|
||||||
|
In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Contributors ✨
|
## Contributors ✨
|
||||||
|
|
||||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||||
|
|
|
||||||
18
common/openpype_common/distribution/README.md
Normal file
18
common/openpype_common/distribution/README.md
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
Addon distribution tool
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Code in this folder is backend portion of Addon distribution logic for v4 server.
|
||||||
|
|
||||||
|
Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons.
|
||||||
|
|
||||||
|
Client (running on artist machine) will in the first step ask v4 for list of enabled addons.
|
||||||
|
(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.)
|
||||||
|
Next it will compare presence of enabled addon version in local folder. In the case of missing version of
|
||||||
|
an addon, client will use information in the addon to download (from http/shared local disk/git) zip file
|
||||||
|
and unzip it.
|
||||||
|
|
||||||
|
Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder.
|
||||||
|
|
||||||
|
Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably.
|
||||||
|
|
||||||
|
This code needs to be independent on Openpype code as much as possible!
|
||||||
208
common/openpype_common/distribution/addon_distribution.py
Normal file
208
common/openpype_common/distribution/addon_distribution.py
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
import os
|
||||||
|
from enum import Enum
|
||||||
|
from abc import abstractmethod
|
||||||
|
import attr
|
||||||
|
import logging
|
||||||
|
import requests
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from .file_handler import RemoteFileHandler
|
||||||
|
from .addon_info import AddonInfo
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateState(Enum):
|
||||||
|
EXISTS = "exists"
|
||||||
|
UPDATED = "updated"
|
||||||
|
FAILED = "failed"
|
||||||
|
|
||||||
|
|
||||||
|
class AddonDownloader:
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._downloaders = {}
|
||||||
|
|
||||||
|
def register_format(self, downloader_type, downloader):
|
||||||
|
self._downloaders[downloader_type.value] = downloader
|
||||||
|
|
||||||
|
def get_downloader(self, downloader_type):
|
||||||
|
downloader = self._downloaders.get(downloader_type)
|
||||||
|
if not downloader:
|
||||||
|
raise ValueError(f"{downloader_type} not implemented")
|
||||||
|
return downloader()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def download(cls, source, destination):
|
||||||
|
"""Returns url to downloaded addon zip file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source (dict): {type:"http", "url":"https://} ...}
|
||||||
|
destination (str): local folder to unzip
|
||||||
|
Returns:
|
||||||
|
(str) local path to addon zip file
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def check_hash(cls, addon_path, addon_hash):
|
||||||
|
"""Compares 'hash' of downloaded 'addon_url' file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
addon_path (str): local path to addon zip file
|
||||||
|
addon_hash (str): sha256 hash of zip file
|
||||||
|
Raises:
|
||||||
|
ValueError if hashes doesn't match
|
||||||
|
"""
|
||||||
|
if not os.path.exists(addon_path):
|
||||||
|
raise ValueError(f"{addon_path} doesn't exist.")
|
||||||
|
if not RemoteFileHandler.check_integrity(addon_path,
|
||||||
|
addon_hash,
|
||||||
|
hash_type="sha256"):
|
||||||
|
raise ValueError(f"{addon_path} doesn't match expected hash.")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def unzip(cls, addon_zip_path, destination):
|
||||||
|
"""Unzips local 'addon_zip_path' to 'destination'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
addon_zip_path (str): local path to addon zip file
|
||||||
|
destination (str): local folder to unzip
|
||||||
|
"""
|
||||||
|
RemoteFileHandler.unzip(addon_zip_path, destination)
|
||||||
|
os.remove(addon_zip_path)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def remove(cls, addon_url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OSAddonDownloader(AddonDownloader):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def download(cls, source, destination):
|
||||||
|
# OS doesnt need to download, unzip directly
|
||||||
|
addon_url = source["path"].get(platform.system().lower())
|
||||||
|
if not os.path.exists(addon_url):
|
||||||
|
raise ValueError("{} is not accessible".format(addon_url))
|
||||||
|
return addon_url
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPAddonDownloader(AddonDownloader):
|
||||||
|
CHUNK_SIZE = 100000
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def download(cls, source, destination):
|
||||||
|
source_url = source["url"]
|
||||||
|
cls.log.debug(f"Downloading {source_url} to {destination}")
|
||||||
|
file_name = os.path.basename(destination)
|
||||||
|
_, ext = os.path.splitext(file_name)
|
||||||
|
if (ext.replace(".", '') not
|
||||||
|
in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)):
|
||||||
|
file_name += ".zip"
|
||||||
|
RemoteFileHandler.download_url(source_url,
|
||||||
|
destination,
|
||||||
|
filename=file_name)
|
||||||
|
|
||||||
|
return os.path.join(destination, file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_addons_info(server_endpoint):
|
||||||
|
"""Returns list of addon information from Server"""
|
||||||
|
# TODO temp
|
||||||
|
# addon_info = AddonInfo(
|
||||||
|
# **{"name": "openpype_slack",
|
||||||
|
# "version": "1.0.0",
|
||||||
|
# "addon_url": "c:/projects/openpype_slack_1.0.0.zip",
|
||||||
|
# "type": UrlType.FILESYSTEM,
|
||||||
|
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
|
||||||
|
#
|
||||||
|
# http_addon = AddonInfo(
|
||||||
|
# **{"name": "openpype_slack",
|
||||||
|
# "version": "1.0.0",
|
||||||
|
# "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa
|
||||||
|
# "type": UrlType.HTTP,
|
||||||
|
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
|
||||||
|
|
||||||
|
response = requests.get(server_endpoint)
|
||||||
|
if not response.ok:
|
||||||
|
raise Exception(response.text)
|
||||||
|
|
||||||
|
addons_info = []
|
||||||
|
for addon in response.json():
|
||||||
|
addons_info.append(AddonInfo(**addon))
|
||||||
|
return addons_info
|
||||||
|
|
||||||
|
|
||||||
|
def update_addon_state(addon_infos, destination_folder, factory,
|
||||||
|
log=None):
|
||||||
|
"""Loops through all 'addon_infos', compares local version, unzips.
|
||||||
|
|
||||||
|
Loops through server provided list of dictionaries with information about
|
||||||
|
available addons. Looks if each addon is already present and deployed.
|
||||||
|
If isn't, addon zip gets downloaded and unzipped into 'destination_folder'.
|
||||||
|
Args:
|
||||||
|
addon_infos (list of AddonInfo)
|
||||||
|
destination_folder (str): local path
|
||||||
|
factory (AddonDownloader): factory to get appropriate downloader per
|
||||||
|
addon type
|
||||||
|
log (logging.Logger)
|
||||||
|
Returns:
|
||||||
|
(dict): {"addon_full_name": UpdateState.value
|
||||||
|
(eg. "exists"|"updated"|"failed")
|
||||||
|
"""
|
||||||
|
if not log:
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
download_states = {}
|
||||||
|
for addon in addon_infos:
|
||||||
|
full_name = "{}_{}".format(addon.name, addon.version)
|
||||||
|
addon_dest = os.path.join(destination_folder, full_name)
|
||||||
|
|
||||||
|
if os.path.isdir(addon_dest):
|
||||||
|
log.debug(f"Addon version folder {addon_dest} already exists.")
|
||||||
|
download_states[full_name] = UpdateState.EXISTS.value
|
||||||
|
continue
|
||||||
|
|
||||||
|
for source in addon.sources:
|
||||||
|
download_states[full_name] = UpdateState.FAILED.value
|
||||||
|
try:
|
||||||
|
downloader = factory.get_downloader(source.type)
|
||||||
|
zip_file_path = downloader.download(attr.asdict(source),
|
||||||
|
addon_dest)
|
||||||
|
downloader.check_hash(zip_file_path, addon.hash)
|
||||||
|
downloader.unzip(zip_file_path, addon_dest)
|
||||||
|
download_states[full_name] = UpdateState.UPDATED.value
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
log.warning(f"Error happened during updating {addon.name}",
|
||||||
|
exc_info=True)
|
||||||
|
if os.path.isdir(addon_dest):
|
||||||
|
log.debug(f"Cleaning {addon_dest}")
|
||||||
|
shutil.rmtree(addon_dest)
|
||||||
|
|
||||||
|
return download_states
|
||||||
|
|
||||||
|
|
||||||
|
def check_addons(server_endpoint, addon_folder, downloaders):
|
||||||
|
"""Main entry point to compare existing addons with those on server.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
server_endpoint (str): url to v4 server endpoint
|
||||||
|
addon_folder (str): local dir path for addons
|
||||||
|
downloaders (AddonDownloader): factory of downloaders
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
(RuntimeError) if any addon failed update
|
||||||
|
"""
|
||||||
|
addons_info = get_addons_info(server_endpoint)
|
||||||
|
result = update_addon_state(addons_info,
|
||||||
|
addon_folder,
|
||||||
|
downloaders)
|
||||||
|
if UpdateState.FAILED.value in result.values():
|
||||||
|
raise RuntimeError(f"Unable to update some addons {result}")
|
||||||
|
|
||||||
|
|
||||||
|
def cli(*args):
|
||||||
|
raise NotImplementedError
|
||||||
80
common/openpype_common/distribution/addon_info.py
Normal file
80
common/openpype_common/distribution/addon_info.py
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
import attr
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class UrlType(Enum):
|
||||||
|
HTTP = "http"
|
||||||
|
GIT = "git"
|
||||||
|
FILESYSTEM = "filesystem"
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class MultiPlatformPath(object):
|
||||||
|
windows = attr.ib(default=None)
|
||||||
|
linux = attr.ib(default=None)
|
||||||
|
darwin = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class AddonSource(object):
|
||||||
|
type = attr.ib()
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class LocalAddonSource(AddonSource):
|
||||||
|
path = attr.ib(default=attr.Factory(MultiPlatformPath))
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class WebAddonSource(AddonSource):
|
||||||
|
url = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class VersionData(object):
|
||||||
|
version_data = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class AddonInfo(object):
|
||||||
|
"""Object matching json payload from Server"""
|
||||||
|
name = attr.ib()
|
||||||
|
version = attr.ib()
|
||||||
|
title = attr.ib(default=None)
|
||||||
|
sources = attr.ib(default=attr.Factory(dict))
|
||||||
|
hash = attr.ib(default=None)
|
||||||
|
description = attr.ib(default=None)
|
||||||
|
license = attr.ib(default=None)
|
||||||
|
authors = attr.ib(default=None)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data):
|
||||||
|
sources = []
|
||||||
|
|
||||||
|
production_version = data.get("productionVersion")
|
||||||
|
if not production_version:
|
||||||
|
return
|
||||||
|
|
||||||
|
# server payload contains info about all versions
|
||||||
|
# active addon must have 'productionVersion' and matching version info
|
||||||
|
version_data = data.get("versions", {})[production_version]
|
||||||
|
|
||||||
|
for source in version_data.get("clientSourceInfo", []):
|
||||||
|
if source.get("type") == UrlType.FILESYSTEM.value:
|
||||||
|
source_addon = LocalAddonSource(type=source["type"],
|
||||||
|
path=source["path"])
|
||||||
|
if source.get("type") == UrlType.HTTP.value:
|
||||||
|
source_addon = WebAddonSource(type=source["type"],
|
||||||
|
url=source["url"])
|
||||||
|
|
||||||
|
sources.append(source_addon)
|
||||||
|
|
||||||
|
return cls(name=data.get("name"),
|
||||||
|
version=production_version,
|
||||||
|
sources=sources,
|
||||||
|
hash=data.get("hash"),
|
||||||
|
description=data.get("description"),
|
||||||
|
title=data.get("title"),
|
||||||
|
license=data.get("license"),
|
||||||
|
authors=data.get("authors"))
|
||||||
|
|
||||||
|
|
@ -21,7 +21,7 @@ class RemoteFileHandler:
|
||||||
'tar.gz', 'tar.xz', 'tar.bz2']
|
'tar.gz', 'tar.xz', 'tar.bz2']
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def calculate_md5(fpath, chunk_size):
|
def calculate_md5(fpath, chunk_size=10000):
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
with open(fpath, 'rb') as f:
|
with open(fpath, 'rb') as f:
|
||||||
for chunk in iter(lambda: f.read(chunk_size), b''):
|
for chunk in iter(lambda: f.read(chunk_size), b''):
|
||||||
|
|
@ -33,17 +33,45 @@ class RemoteFileHandler:
|
||||||
return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs)
|
return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_integrity(fpath, md5=None):
|
def calculate_sha256(fpath):
|
||||||
|
"""Calculate sha256 for content of the file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fpath (str): Path to file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: hex encoded sha256
|
||||||
|
|
||||||
|
"""
|
||||||
|
h = hashlib.sha256()
|
||||||
|
b = bytearray(128 * 1024)
|
||||||
|
mv = memoryview(b)
|
||||||
|
with open(fpath, 'rb', buffering=0) as f:
|
||||||
|
for n in iter(lambda: f.readinto(mv), 0):
|
||||||
|
h.update(mv[:n])
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_sha256(fpath, sha256, **kwargs):
|
||||||
|
return sha256 == RemoteFileHandler.calculate_sha256(fpath, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_integrity(fpath, hash_value=None, hash_type=None):
|
||||||
if not os.path.isfile(fpath):
|
if not os.path.isfile(fpath):
|
||||||
return False
|
return False
|
||||||
if md5 is None:
|
if hash_value is None:
|
||||||
return True
|
return True
|
||||||
return RemoteFileHandler.check_md5(fpath, md5)
|
if not hash_type:
|
||||||
|
raise ValueError("Provide hash type, md5 or sha256")
|
||||||
|
if hash_type == 'md5':
|
||||||
|
return RemoteFileHandler.check_md5(fpath, hash_value)
|
||||||
|
if hash_type == "sha256":
|
||||||
|
return RemoteFileHandler.check_sha256(fpath, hash_value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def download_url(
|
def download_url(
|
||||||
url, root, filename=None,
|
url, root, filename=None,
|
||||||
md5=None, max_redirect_hops=3
|
sha256=None, max_redirect_hops=3
|
||||||
):
|
):
|
||||||
"""Download a file from a url and place it in root.
|
"""Download a file from a url and place it in root.
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -51,7 +79,7 @@ class RemoteFileHandler:
|
||||||
root (str): Directory to place downloaded file in
|
root (str): Directory to place downloaded file in
|
||||||
filename (str, optional): Name to save the file under.
|
filename (str, optional): Name to save the file under.
|
||||||
If None, use the basename of the URL
|
If None, use the basename of the URL
|
||||||
md5 (str, optional): MD5 checksum of the download.
|
sha256 (str, optional): sha256 checksum of the download.
|
||||||
If None, do not check
|
If None, do not check
|
||||||
max_redirect_hops (int, optional): Maximum number of redirect
|
max_redirect_hops (int, optional): Maximum number of redirect
|
||||||
hops allowed
|
hops allowed
|
||||||
|
|
@ -64,7 +92,8 @@ class RemoteFileHandler:
|
||||||
os.makedirs(root, exist_ok=True)
|
os.makedirs(root, exist_ok=True)
|
||||||
|
|
||||||
# check if file is already present locally
|
# check if file is already present locally
|
||||||
if RemoteFileHandler.check_integrity(fpath, md5):
|
if RemoteFileHandler.check_integrity(fpath,
|
||||||
|
sha256, hash_type="sha256"):
|
||||||
print('Using downloaded and verified file: ' + fpath)
|
print('Using downloaded and verified file: ' + fpath)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -76,7 +105,7 @@ class RemoteFileHandler:
|
||||||
file_id = RemoteFileHandler._get_google_drive_file_id(url)
|
file_id = RemoteFileHandler._get_google_drive_file_id(url)
|
||||||
if file_id is not None:
|
if file_id is not None:
|
||||||
return RemoteFileHandler.download_file_from_google_drive(
|
return RemoteFileHandler.download_file_from_google_drive(
|
||||||
file_id, root, filename, md5)
|
file_id, root, filename, sha256)
|
||||||
|
|
||||||
# download the file
|
# download the file
|
||||||
try:
|
try:
|
||||||
|
|
@ -92,20 +121,21 @@ class RemoteFileHandler:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
# check integrity of downloaded file
|
# check integrity of downloaded file
|
||||||
if not RemoteFileHandler.check_integrity(fpath, md5):
|
if not RemoteFileHandler.check_integrity(fpath,
|
||||||
|
sha256, hash_type="sha256"):
|
||||||
raise RuntimeError("File not found or corrupted.")
|
raise RuntimeError("File not found or corrupted.")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def download_file_from_google_drive(file_id, root,
|
def download_file_from_google_drive(file_id, root,
|
||||||
filename=None,
|
filename=None,
|
||||||
md5=None):
|
sha256=None):
|
||||||
"""Download a Google Drive file from and place it in root.
|
"""Download a Google Drive file from and place it in root.
|
||||||
Args:
|
Args:
|
||||||
file_id (str): id of file to be downloaded
|
file_id (str): id of file to be downloaded
|
||||||
root (str): Directory to place downloaded file in
|
root (str): Directory to place downloaded file in
|
||||||
filename (str, optional): Name to save the file under.
|
filename (str, optional): Name to save the file under.
|
||||||
If None, use the id of the file.
|
If None, use the id of the file.
|
||||||
md5 (str, optional): MD5 checksum of the download.
|
sha256 (str, optional): sha256 checksum of the download.
|
||||||
If None, do not check
|
If None, do not check
|
||||||
"""
|
"""
|
||||||
# Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa
|
# Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa
|
||||||
|
|
@ -119,8 +149,8 @@ class RemoteFileHandler:
|
||||||
|
|
||||||
os.makedirs(root, exist_ok=True)
|
os.makedirs(root, exist_ok=True)
|
||||||
|
|
||||||
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath,
|
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(
|
||||||
md5):
|
fpath, sha256, hash_type="sha256"):
|
||||||
print('Using downloaded and verified file: ' + fpath)
|
print('Using downloaded and verified file: ' + fpath)
|
||||||
else:
|
else:
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
|
|
@ -0,0 +1,167 @@
|
||||||
|
import pytest
|
||||||
|
import attr
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from common.openpype_common.distribution.addon_distribution import (
|
||||||
|
AddonDownloader,
|
||||||
|
OSAddonDownloader,
|
||||||
|
HTTPAddonDownloader,
|
||||||
|
AddonInfo,
|
||||||
|
update_addon_state,
|
||||||
|
UpdateState
|
||||||
|
)
|
||||||
|
from common.openpype_common.distribution.addon_info import UrlType
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def addon_downloader():
|
||||||
|
addon_downloader = AddonDownloader()
|
||||||
|
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
|
||||||
|
addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader)
|
||||||
|
|
||||||
|
yield addon_downloader
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def http_downloader(addon_downloader):
|
||||||
|
yield addon_downloader.get_downloader(UrlType.HTTP.value)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_folder():
|
||||||
|
yield tempfile.mkdtemp()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_addon_info():
|
||||||
|
addon_info = {
|
||||||
|
"versions": {
|
||||||
|
"1.0.0": {
|
||||||
|
"clientPyproject": {
|
||||||
|
"tool": {
|
||||||
|
"poetry": {
|
||||||
|
"dependencies": {
|
||||||
|
"nxtools": "^1.6",
|
||||||
|
"orjson": "^3.6.7",
|
||||||
|
"typer": "^0.4.1",
|
||||||
|
"email-validator": "^1.1.3",
|
||||||
|
"python": "^3.10",
|
||||||
|
"fastapi": "^0.73.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"hasSettings": True,
|
||||||
|
"clientSourceInfo": [
|
||||||
|
{
|
||||||
|
"type": "http",
|
||||||
|
"url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": {
|
||||||
|
"windows": ["P:/sources/some_file.zip",
|
||||||
|
"W:/sources/some_file.zip"], # noqa
|
||||||
|
"linux": ["/mnt/srv/sources/some_file.zip"],
|
||||||
|
"darwin": ["/Volumes/srv/sources/some_file.zip"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"frontendScopes": {
|
||||||
|
"project": {
|
||||||
|
"sidebar": "hierarchy"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
|
"title": "Slack addon",
|
||||||
|
"name": "openpype_slack",
|
||||||
|
"productionVersion": "1.0.0",
|
||||||
|
"hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa
|
||||||
|
}
|
||||||
|
yield addon_info
|
||||||
|
|
||||||
|
|
||||||
|
def test_register(printer):
|
||||||
|
addon_downloader = AddonDownloader()
|
||||||
|
|
||||||
|
assert len(addon_downloader._downloaders) == 0, "Contains registered"
|
||||||
|
|
||||||
|
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
|
||||||
|
assert len(addon_downloader._downloaders) == 1, "Should contain one"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_downloader(printer, addon_downloader):
|
||||||
|
assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
addon_downloader.get_downloader("unknown"), "Shouldn't find"
|
||||||
|
|
||||||
|
|
||||||
|
def test_addon_info(printer, sample_addon_info):
|
||||||
|
"""Tests parsing of expected payload from v4 server into AadonInfo."""
|
||||||
|
valid_minimum = {
|
||||||
|
"name": "openpype_slack",
|
||||||
|
"productionVersion": "1.0.0",
|
||||||
|
"versions": {
|
||||||
|
"1.0.0": {
|
||||||
|
"clientSourceInfo": [
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": {
|
||||||
|
"windows": [
|
||||||
|
"P:/sources/some_file.zip",
|
||||||
|
"W:/sources/some_file.zip"],
|
||||||
|
"linux": [
|
||||||
|
"/mnt/srv/sources/some_file.zip"],
|
||||||
|
"darwin": [
|
||||||
|
"/Volumes/srv/sources/some_file.zip"] # noqa
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert AddonInfo.from_dict(valid_minimum), "Missing required fields"
|
||||||
|
|
||||||
|
valid_minimum["versions"].pop("1.0.0")
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa
|
||||||
|
|
||||||
|
valid_minimum.pop("productionVersion")
|
||||||
|
assert not AddonInfo.from_dict(
|
||||||
|
valid_minimum), "none if not productionVersion" # noqa
|
||||||
|
|
||||||
|
addon = AddonInfo.from_dict(sample_addon_info)
|
||||||
|
assert addon, "Should be created"
|
||||||
|
assert addon.name == "openpype_slack", "Incorrect name"
|
||||||
|
assert addon.version == "1.0.0", "Incorrect version"
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
assert addon["name"], "Dict approach not implemented"
|
||||||
|
|
||||||
|
addon_as_dict = attr.asdict(addon)
|
||||||
|
assert addon_as_dict["name"], "Dict approach should work"
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_addon_state(printer, sample_addon_info,
|
||||||
|
temp_folder, addon_downloader):
|
||||||
|
"""Tests possible cases of addon update."""
|
||||||
|
addon_info = AddonInfo.from_dict(sample_addon_info)
|
||||||
|
orig_hash = addon_info.hash
|
||||||
|
|
||||||
|
addon_info.hash = "brokenhash"
|
||||||
|
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||||
|
assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \
|
||||||
|
"Update should failed because of wrong hash"
|
||||||
|
|
||||||
|
addon_info.hash = orig_hash
|
||||||
|
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||||
|
assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \
|
||||||
|
"Addon should have been updated"
|
||||||
|
|
||||||
|
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||||
|
assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \
|
||||||
|
"Addon should already exist"
|
||||||
|
|
@ -63,7 +63,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
"""
|
"""
|
||||||
staging = False
|
staging = False
|
||||||
path = None
|
path = None
|
||||||
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") # noqa: E501
|
# this should match any string complying with https://semver.org/
|
||||||
|
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?") # noqa: E501
|
||||||
_installed_version = None
|
_installed_version = None
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|
@ -122,7 +123,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
if self.staging:
|
if self.staging:
|
||||||
if kwargs.get("build"):
|
if kwargs.get("build"):
|
||||||
if "staging" not in kwargs.get("build"):
|
if "staging" not in kwargs.get("build"):
|
||||||
kwargs["build"] = "{}-staging".format(kwargs.get("build"))
|
kwargs["build"] = f"{kwargs.get('build')}-staging"
|
||||||
else:
|
else:
|
||||||
kwargs["build"] = "staging"
|
kwargs["build"] = "staging"
|
||||||
|
|
||||||
|
|
@ -136,8 +137,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
return bool(result and self.staging == other.staging)
|
return bool(result and self.staging == other.staging)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<{}: {} - path={}>".format(
|
return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>"
|
||||||
self.__class__.__name__, str(self), self.path)
|
|
||||||
|
|
||||||
def __lt__(self, other: OpenPypeVersion):
|
def __lt__(self, other: OpenPypeVersion):
|
||||||
result = super().__lt__(other)
|
result = super().__lt__(other)
|
||||||
|
|
@ -212,6 +212,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
OpenPypeVersion: of detected or None.
|
OpenPypeVersion: of detected or None.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# strip .zip ext if present
|
||||||
|
string = re.sub(r"\.zip$", "", string, flags=re.IGNORECASE)
|
||||||
m = re.search(OpenPypeVersion._VERSION_REGEX, string)
|
m = re.search(OpenPypeVersion._VERSION_REGEX, string)
|
||||||
if not m:
|
if not m:
|
||||||
return None
|
return None
|
||||||
|
|
@ -232,10 +234,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
return openpype_version
|
return openpype_version
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
if self.path:
|
return hash(self.path) if self.path else hash(str(self))
|
||||||
return hash(self.path)
|
|
||||||
else:
|
|
||||||
return hash(str(self))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_version_in_dir(
|
def is_version_in_dir(
|
||||||
|
|
@ -384,7 +383,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_local_versions(
|
def get_local_versions(
|
||||||
cls, production: bool = None, staging: bool = None
|
cls, production: bool = None,
|
||||||
|
staging: bool = None
|
||||||
) -> List:
|
) -> List:
|
||||||
"""Get all versions available on this machine.
|
"""Get all versions available on this machine.
|
||||||
|
|
||||||
|
|
@ -394,6 +394,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
Args:
|
Args:
|
||||||
production (bool): Return production versions.
|
production (bool): Return production versions.
|
||||||
staging (bool): Return staging versions.
|
staging (bool): Return staging versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: of compatible versions available on the machine.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Return all local versions if arguments are set to None
|
# Return all local versions if arguments are set to None
|
||||||
if production is None and staging is None:
|
if production is None and staging is None:
|
||||||
|
|
@ -410,10 +414,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
if not production and not staging:
|
if not production and not staging:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
# DEPRECATED: backwards compatible way to look for versions in root
|
||||||
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
|
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
|
||||||
versions = OpenPypeVersion.get_versions_from_directory(
|
versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
|
||||||
dir_to_search
|
|
||||||
)
|
|
||||||
filtered_versions = []
|
filtered_versions = []
|
||||||
for version in versions:
|
for version in versions:
|
||||||
if version.is_staging():
|
if version.is_staging():
|
||||||
|
|
@ -425,7 +429,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_remote_versions(
|
def get_remote_versions(
|
||||||
cls, production: bool = None, staging: bool = None
|
cls, production: bool = None,
|
||||||
|
staging: bool = None
|
||||||
) -> List:
|
) -> List:
|
||||||
"""Get all versions available in OpenPype Path.
|
"""Get all versions available in OpenPype Path.
|
||||||
|
|
||||||
|
|
@ -435,6 +440,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
Args:
|
Args:
|
||||||
production (bool): Return production versions.
|
production (bool): Return production versions.
|
||||||
staging (bool): Return staging versions.
|
staging (bool): Return staging versions.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Return all local versions if arguments are set to None
|
# Return all local versions if arguments are set to None
|
||||||
if production is None and staging is None:
|
if production is None and staging is None:
|
||||||
|
|
@ -469,6 +475,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
versions = cls.get_versions_from_directory(dir_to_search)
|
versions = cls.get_versions_from_directory(dir_to_search)
|
||||||
|
|
||||||
filtered_versions = []
|
filtered_versions = []
|
||||||
for version in versions:
|
for version in versions:
|
||||||
if version.is_staging():
|
if version.is_staging():
|
||||||
|
|
@ -479,7 +486,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
return list(sorted(set(filtered_versions)))
|
return list(sorted(set(filtered_versions)))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_versions_from_directory(openpype_dir: Path) -> List:
|
def get_versions_from_directory(
|
||||||
|
openpype_dir: Path) -> List:
|
||||||
"""Get all detected OpenPype versions in directory.
|
"""Get all detected OpenPype versions in directory.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -492,15 +500,22 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
ValueError: if invalid path is specified.
|
ValueError: if invalid path is specified.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
openpype_versions = []
|
||||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||||
raise ValueError("specified directory is invalid")
|
return openpype_versions
|
||||||
|
|
||||||
_openpype_versions = []
|
|
||||||
# iterate over directory in first level and find all that might
|
# iterate over directory in first level and find all that might
|
||||||
# contain OpenPype.
|
# contain OpenPype.
|
||||||
for item in openpype_dir.iterdir():
|
for item in openpype_dir.iterdir():
|
||||||
|
# if the item is directory with major.minor version, dive deeper
|
||||||
|
|
||||||
# if file, strip extension, in case of dir not.
|
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||||
|
_versions = OpenPypeVersion.get_versions_from_directory(
|
||||||
|
item)
|
||||||
|
if _versions:
|
||||||
|
openpype_versions += _versions
|
||||||
|
|
||||||
|
# if file exists, strip extension, in case of dir don't.
|
||||||
name = item.name if item.is_dir() else item.stem
|
name = item.name if item.is_dir() else item.stem
|
||||||
result = OpenPypeVersion.version_in_str(name)
|
result = OpenPypeVersion.version_in_str(name)
|
||||||
|
|
||||||
|
|
@ -519,9 +534,9 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
detected_version.path = item
|
detected_version.path = item
|
||||||
_openpype_versions.append(detected_version)
|
openpype_versions.append(detected_version)
|
||||||
|
|
||||||
return sorted(_openpype_versions)
|
return sorted(openpype_versions)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_installed_version_str() -> str:
|
def get_installed_version_str() -> str:
|
||||||
|
|
@ -550,13 +565,13 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
staging: bool = False,
|
staging: bool = False,
|
||||||
local: bool = None,
|
local: bool = None,
|
||||||
remote: bool = None
|
remote: bool = None
|
||||||
) -> OpenPypeVersion:
|
) -> Union[OpenPypeVersion, None]:
|
||||||
"""Get latest available version.
|
"""Get the latest available version.
|
||||||
|
|
||||||
The version does not contain information about path and source.
|
The version does not contain information about path and source.
|
||||||
|
|
||||||
This is utility version to get latest version from all found. Build
|
This is utility version to get the latest version from all found.
|
||||||
version is not listed if staging is enabled.
|
Build version is not listed if staging is enabled.
|
||||||
|
|
||||||
Arguments 'local' and 'remote' define if local and remote repository
|
Arguments 'local' and 'remote' define if local and remote repository
|
||||||
versions are used. All versions are used if both are not set (or set
|
versions are used. All versions are used if both are not set (or set
|
||||||
|
|
@ -568,6 +583,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
staging (bool, optional): List staging versions if True.
|
staging (bool, optional): List staging versions if True.
|
||||||
local (bool, optional): List local versions if True.
|
local (bool, optional): List local versions if True.
|
||||||
remote (bool, optional): List remote versions if True.
|
remote (bool, optional): List remote versions if True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Latest OpenPypeVersion or None
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if local is None and remote is None:
|
if local is None and remote is None:
|
||||||
local = True
|
local = True
|
||||||
|
|
@ -621,6 +640,21 @@ class OpenPypeVersion(semver.VersionInfo):
|
||||||
return None
|
return None
|
||||||
return OpenPypeVersion(version=result)
|
return OpenPypeVersion(version=result)
|
||||||
|
|
||||||
|
def is_compatible(self, version: OpenPypeVersion):
|
||||||
|
"""Test build compatibility.
|
||||||
|
|
||||||
|
This will simply compare major and minor versions (ignoring patch
|
||||||
|
and the rest).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (OpenPypeVersion): Version to check compatibility with.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: if the version is compatible
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.major == version.major and self.minor == version.minor
|
||||||
|
|
||||||
|
|
||||||
class BootstrapRepos:
|
class BootstrapRepos:
|
||||||
"""Class for bootstrapping local OpenPype installation.
|
"""Class for bootstrapping local OpenPype installation.
|
||||||
|
|
@ -714,9 +748,9 @@ class BootstrapRepos:
|
||||||
self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]:
|
self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]:
|
||||||
"""Copy zip created from OpenPype repositories to user data dir.
|
"""Copy zip created from OpenPype repositories to user data dir.
|
||||||
|
|
||||||
This detect OpenPype version either in local "live" OpenPype
|
This detects OpenPype version either in local "live" OpenPype
|
||||||
repository or in user provided path. Then it will zip it in temporary
|
repository or in user provided path. Then it will zip it in temporary
|
||||||
directory and finally it will move it to destination which is user
|
directory, and finally it will move it to destination which is user
|
||||||
data directory. Existing files will be replaced.
|
data directory. Existing files will be replaced.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -727,7 +761,7 @@ class BootstrapRepos:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# if repo dir is not set, we detect local "live" OpenPype repository
|
# if repo dir is not set, we detect local "live" OpenPype repository
|
||||||
# version and use it as a source. Otherwise repo_dir is user
|
# version and use it as a source. Otherwise, repo_dir is user
|
||||||
# entered location.
|
# entered location.
|
||||||
if repo_dir:
|
if repo_dir:
|
||||||
version = self.get_version(repo_dir)
|
version = self.get_version(repo_dir)
|
||||||
|
|
@ -741,8 +775,9 @@ class BootstrapRepos:
|
||||||
return
|
return
|
||||||
|
|
||||||
# create destination directory
|
# create destination directory
|
||||||
if not self.data_dir.exists():
|
destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa
|
||||||
self.data_dir.mkdir(parents=True)
|
if not destination.exists():
|
||||||
|
destination.mkdir(parents=True)
|
||||||
|
|
||||||
# create zip inside temporary directory.
|
# create zip inside temporary directory.
|
||||||
with tempfile.TemporaryDirectory() as temp_dir:
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
|
@ -770,7 +805,9 @@ class BootstrapRepos:
|
||||||
Path to moved zip on success.
|
Path to moved zip on success.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
destination = self.data_dir / zip_file.name
|
version = OpenPypeVersion.version_in_str(zip_file.name)
|
||||||
|
destination_dir = self.data_dir / f"{version.major}.{version.minor}"
|
||||||
|
destination = destination_dir / zip_file.name
|
||||||
|
|
||||||
if destination.exists():
|
if destination.exists():
|
||||||
self._print(
|
self._print(
|
||||||
|
|
@ -781,8 +818,15 @@ class BootstrapRepos:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._print(str(e), LOG_ERROR, exc_info=True)
|
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
if not destination_dir.exists():
|
||||||
|
destination_dir.mkdir(parents=True)
|
||||||
|
elif not destination_dir.is_dir():
|
||||||
|
self._print(
|
||||||
|
"Destination exists but is not directory.", LOG_ERROR)
|
||||||
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
shutil.move(zip_file.as_posix(), self.data_dir.as_posix())
|
shutil.move(zip_file.as_posix(), destination_dir.as_posix())
|
||||||
except shutil.Error as e:
|
except shutil.Error as e:
|
||||||
self._print(str(e), LOG_ERROR, exc_info=True)
|
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
@ -995,6 +1039,16 @@ class BootstrapRepos:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _validate_dir(path: Path) -> tuple:
|
def _validate_dir(path: Path) -> tuple:
|
||||||
|
"""Validate checksums in a given path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (Path): path to folder to validate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple(bool, str): returns status and reason as a bool
|
||||||
|
and str in a tuple.
|
||||||
|
|
||||||
|
"""
|
||||||
checksums_file = Path(path / "checksums")
|
checksums_file = Path(path / "checksums")
|
||||||
if not checksums_file.exists():
|
if not checksums_file.exists():
|
||||||
# FIXME: This should be set to False sometimes in the future
|
# FIXME: This should be set to False sometimes in the future
|
||||||
|
|
@ -1076,11 +1130,24 @@ class BootstrapRepos:
|
||||||
sys.path.insert(0, directory.as_posix())
|
sys.path.insert(0, directory.as_posix())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_openpype_version(version, staging):
|
def find_openpype_version(
|
||||||
|
version: Union[str, OpenPypeVersion],
|
||||||
|
staging: bool
|
||||||
|
) -> Union[OpenPypeVersion, None]:
|
||||||
|
"""Find location of specified OpenPype version.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (Union[str, OpenPypeVersion): Version to find.
|
||||||
|
staging (bool): Filter staging versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
requested OpenPypeVersion.
|
||||||
|
|
||||||
|
"""
|
||||||
|
installed_version = OpenPypeVersion.get_installed_version()
|
||||||
if isinstance(version, str):
|
if isinstance(version, str):
|
||||||
version = OpenPypeVersion(version=version)
|
version = OpenPypeVersion(version=version)
|
||||||
|
|
||||||
installed_version = OpenPypeVersion.get_installed_version()
|
|
||||||
if installed_version == version:
|
if installed_version == version:
|
||||||
return installed_version
|
return installed_version
|
||||||
|
|
||||||
|
|
@ -1107,7 +1174,18 @@ class BootstrapRepos:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_latest_openpype_version(staging):
|
def find_latest_openpype_version(
|
||||||
|
staging: bool
|
||||||
|
) -> Union[OpenPypeVersion, None]:
|
||||||
|
"""Find the latest available OpenPype version in all location.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
staging (bool): True to look for staging versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Latest OpenPype version on None if nothing was found.
|
||||||
|
|
||||||
|
"""
|
||||||
installed_version = OpenPypeVersion.get_installed_version()
|
installed_version = OpenPypeVersion.get_installed_version()
|
||||||
local_versions = OpenPypeVersion.get_local_versions(
|
local_versions = OpenPypeVersion.get_local_versions(
|
||||||
staging=staging
|
staging=staging
|
||||||
|
|
@ -1138,7 +1216,8 @@ class BootstrapRepos:
|
||||||
self,
|
self,
|
||||||
openpype_path: Union[Path, str] = None,
|
openpype_path: Union[Path, str] = None,
|
||||||
staging: bool = False,
|
staging: bool = False,
|
||||||
include_zips: bool = False) -> Union[List[OpenPypeVersion], None]:
|
include_zips: bool = False
|
||||||
|
) -> Union[List[OpenPypeVersion], None]:
|
||||||
"""Get ordered dict of detected OpenPype version.
|
"""Get ordered dict of detected OpenPype version.
|
||||||
|
|
||||||
Resolution order for OpenPype is following:
|
Resolution order for OpenPype is following:
|
||||||
|
|
@ -1172,30 +1251,38 @@ class BootstrapRepos:
|
||||||
("Finding OpenPype in non-filesystem locations is"
|
("Finding OpenPype in non-filesystem locations is"
|
||||||
" not implemented yet."))
|
" not implemented yet."))
|
||||||
|
|
||||||
dir_to_search = self.data_dir
|
# if checks bellow for OPENPYPE_PATH and registry fails, use data_dir
|
||||||
user_versions = self.get_openpype_versions(self.data_dir, staging)
|
# DEPRECATED: lookup in root of this folder is deprecated in favour
|
||||||
# if we have openpype_path specified, search only there.
|
# of major.minor sub-folders.
|
||||||
|
dirs_to_search = [self.data_dir]
|
||||||
|
|
||||||
if openpype_path:
|
if openpype_path:
|
||||||
dir_to_search = openpype_path
|
dirs_to_search = [openpype_path]
|
||||||
|
elif os.getenv("OPENPYPE_PATH") \
|
||||||
|
and Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||||
|
# first try OPENPYPE_PATH and if that is not available,
|
||||||
|
# try registry.
|
||||||
|
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
|
||||||
else:
|
else:
|
||||||
if os.getenv("OPENPYPE_PATH"):
|
try:
|
||||||
if Path(os.getenv("OPENPYPE_PATH")).exists():
|
registry_dir = Path(
|
||||||
dir_to_search = Path(os.getenv("OPENPYPE_PATH"))
|
str(self.registry.get_item("openPypePath")))
|
||||||
else:
|
if registry_dir.exists():
|
||||||
try:
|
dirs_to_search = [registry_dir]
|
||||||
registry_dir = Path(
|
|
||||||
str(self.registry.get_item("openPypePath")))
|
|
||||||
if registry_dir.exists():
|
|
||||||
dir_to_search = registry_dir
|
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# nothing found in registry, we'll use data dir
|
# nothing found in registry, we'll use data dir
|
||||||
pass
|
pass
|
||||||
|
|
||||||
openpype_versions = self.get_openpype_versions(dir_to_search, staging)
|
openpype_versions = []
|
||||||
openpype_versions += user_versions
|
for dir_to_search in dirs_to_search:
|
||||||
|
try:
|
||||||
|
openpype_versions += self.get_openpype_versions(
|
||||||
|
dir_to_search, staging)
|
||||||
|
except ValueError:
|
||||||
|
# location is invalid, skip it
|
||||||
|
pass
|
||||||
|
|
||||||
# remove zip file version if needed.
|
|
||||||
if not include_zips:
|
if not include_zips:
|
||||||
openpype_versions = [
|
openpype_versions = [
|
||||||
v for v in openpype_versions if v.path.suffix != ".zip"
|
v for v in openpype_versions if v.path.suffix != ".zip"
|
||||||
|
|
@ -1308,9 +1395,8 @@ class BootstrapRepos:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"version {version} is not associated with any file")
|
f"version {version} is not associated with any file")
|
||||||
|
|
||||||
destination = self.data_dir / version.path.stem
|
destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa
|
||||||
if destination.exists():
|
if destination.exists() and destination.is_dir():
|
||||||
assert destination.is_dir()
|
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(destination)
|
shutil.rmtree(destination)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
|
|
@ -1379,7 +1465,7 @@ class BootstrapRepos:
|
||||||
else:
|
else:
|
||||||
dir_name = openpype_version.path.stem
|
dir_name = openpype_version.path.stem
|
||||||
|
|
||||||
destination = self.data_dir / dir_name
|
destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa
|
||||||
|
|
||||||
# test if destination directory already exist, if so lets delete it.
|
# test if destination directory already exist, if so lets delete it.
|
||||||
if destination.exists() and force:
|
if destination.exists() and force:
|
||||||
|
|
@ -1557,9 +1643,10 @@ class BootstrapRepos:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_openpype_versions(self,
|
def get_openpype_versions(
|
||||||
openpype_dir: Path,
|
self,
|
||||||
staging: bool = False) -> list:
|
openpype_dir: Path,
|
||||||
|
staging: bool = False) -> list:
|
||||||
"""Get all detected OpenPype versions in directory.
|
"""Get all detected OpenPype versions in directory.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -1574,14 +1661,20 @@ class BootstrapRepos:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||||
raise ValueError("specified directory is invalid")
|
raise ValueError(f"specified directory {openpype_dir} is invalid")
|
||||||
|
|
||||||
_openpype_versions = []
|
openpype_versions = []
|
||||||
# iterate over directory in first level and find all that might
|
# iterate over directory in first level and find all that might
|
||||||
# contain OpenPype.
|
# contain OpenPype.
|
||||||
for item in openpype_dir.iterdir():
|
for item in openpype_dir.iterdir():
|
||||||
|
# if the item is directory with major.minor version, dive deeper
|
||||||
|
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||||
|
_versions = self.get_openpype_versions(
|
||||||
|
item, staging=staging)
|
||||||
|
if _versions:
|
||||||
|
openpype_versions += _versions
|
||||||
|
|
||||||
# if file, strip extension, in case of dir not.
|
# if it is file, strip extension, in case of dir don't.
|
||||||
name = item.name if item.is_dir() else item.stem
|
name = item.name if item.is_dir() else item.stem
|
||||||
result = OpenPypeVersion.version_in_str(name)
|
result = OpenPypeVersion.version_in_str(name)
|
||||||
|
|
||||||
|
|
@ -1601,12 +1694,12 @@ class BootstrapRepos:
|
||||||
|
|
||||||
detected_version.path = item
|
detected_version.path = item
|
||||||
if staging and detected_version.is_staging():
|
if staging and detected_version.is_staging():
|
||||||
_openpype_versions.append(detected_version)
|
openpype_versions.append(detected_version)
|
||||||
|
|
||||||
if not staging and not detected_version.is_staging():
|
if not staging and not detected_version.is_staging():
|
||||||
_openpype_versions.append(detected_version)
|
openpype_versions.append(detected_version)
|
||||||
|
|
||||||
return sorted(_openpype_versions)
|
return sorted(openpype_versions)
|
||||||
|
|
||||||
|
|
||||||
class OpenPypeVersionExists(Exception):
|
class OpenPypeVersionExists(Exception):
|
||||||
|
|
|
||||||
|
|
@ -388,8 +388,11 @@ class InstallDialog(QtWidgets.QDialog):
|
||||||
install_thread.start()
|
install_thread.start()
|
||||||
|
|
||||||
def _installation_finished(self):
|
def _installation_finished(self):
|
||||||
|
# TODO we should find out why status can be set to 'None'?
|
||||||
|
# - 'InstallThread.run' should handle all cases so not sure where
|
||||||
|
# that come from
|
||||||
status = self._install_thread.result()
|
status = self._install_thread.result()
|
||||||
if status >= 0:
|
if status is not None and status >= 0:
|
||||||
self._update_progress(100)
|
self._update_progress(100)
|
||||||
QtWidgets.QApplication.processEvents()
|
QtWidgets.QApplication.processEvents()
|
||||||
self.done(3)
|
self.done(3)
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,7 @@ class InstallThread(QThread):
|
||||||
progress_callback=self.set_progress, message=self.message)
|
progress_callback=self.set_progress, message=self.message)
|
||||||
local_version = OpenPypeVersion.get_installed_version_str()
|
local_version = OpenPypeVersion.get_installed_version_str()
|
||||||
|
|
||||||
# if user did entered nothing, we install OpenPype from local version.
|
# if user did enter nothing, we install OpenPype from local version.
|
||||||
# zip content of `repos`, copy it to user data dir and append
|
# zip content of `repos`, copy it to user data dir and append
|
||||||
# version to it.
|
# version to it.
|
||||||
if not self._path:
|
if not self._path:
|
||||||
|
|
@ -93,6 +93,23 @@ class InstallThread(QThread):
|
||||||
detected = bs.find_openpype(include_zips=True)
|
detected = bs.find_openpype(include_zips=True)
|
||||||
|
|
||||||
if detected:
|
if detected:
|
||||||
|
if not OpenPypeVersion.get_installed_version().is_compatible(
|
||||||
|
detected[-1]):
|
||||||
|
self.message.emit((
|
||||||
|
f"Latest detected version {detected[-1]} "
|
||||||
|
"is not compatible with the currently running "
|
||||||
|
f"{local_version}"
|
||||||
|
), True)
|
||||||
|
self.message.emit((
|
||||||
|
"Filtering detected versions to compatible ones..."
|
||||||
|
), False)
|
||||||
|
|
||||||
|
detected = [
|
||||||
|
version for version in detected
|
||||||
|
if version.is_compatible(
|
||||||
|
OpenPypeVersion.get_installed_version())
|
||||||
|
]
|
||||||
|
|
||||||
if OpenPypeVersion(
|
if OpenPypeVersion(
|
||||||
version=local_version, path=Path()) < detected[-1]:
|
version=local_version, path=Path()) < detected[-1]:
|
||||||
self.message.emit((
|
self.message.emit((
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OpenPypeVersionIncompatible(Exception):
|
||||||
|
"""OpenPype version is not compatible with the installed one (build)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||||
"""Check if should add ca certificate to mongo url.
|
"""Check if should add ca certificate to mongo url.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,75 @@
|
||||||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
import warnings
|
||||||
from __future__ import absolute_import
|
import functools
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
|
||||||
def get_errored_instances_from_context(context):
|
class ActionDeprecatedWarning(DeprecationWarning):
|
||||||
|
pass
|
||||||
instances = list()
|
|
||||||
for result in context.data["results"]:
|
|
||||||
if result["instance"] is None:
|
|
||||||
# When instance is None we are on the "context" result
|
|
||||||
continue
|
|
||||||
|
|
||||||
if result["error"]:
|
|
||||||
instances.append(result["instance"])
|
|
||||||
|
|
||||||
return instances
|
|
||||||
|
|
||||||
|
|
||||||
def get_errored_plugins_from_data(context):
|
def deprecated(new_destination):
|
||||||
"""Get all failed validation plugins
|
"""Mark functions as deprecated.
|
||||||
|
|
||||||
Args:
|
|
||||||
context (object):
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list of plugins which failed during validation
|
|
||||||
|
|
||||||
|
It will result in a warning being emitted when the function is used.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
plugins = list()
|
func = None
|
||||||
results = context.data.get("results", [])
|
if callable(new_destination):
|
||||||
for result in results:
|
func = new_destination
|
||||||
if result["success"] is True:
|
new_destination = None
|
||||||
continue
|
|
||||||
plugins.append(result["plugin"])
|
|
||||||
|
|
||||||
return plugins
|
def _decorator(decorated_func):
|
||||||
|
if new_destination is None:
|
||||||
|
warning_message = (
|
||||||
|
" Please check content of deprecated function to figure out"
|
||||||
|
" possible replacement."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
warning_message = " Please replace your usage with '{}'.".format(
|
||||||
|
new_destination
|
||||||
|
)
|
||||||
|
|
||||||
|
@functools.wraps(decorated_func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
warnings.simplefilter("always", ActionDeprecatedWarning)
|
||||||
|
warnings.warn(
|
||||||
|
(
|
||||||
|
"Call to deprecated function '{}'"
|
||||||
|
"\nFunction was moved or removed.{}"
|
||||||
|
).format(decorated_func.__name__, warning_message),
|
||||||
|
category=ActionDeprecatedWarning,
|
||||||
|
stacklevel=4
|
||||||
|
)
|
||||||
|
return decorated_func(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
if func is None:
|
||||||
|
return _decorator
|
||||||
|
return _decorator(func)
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated("openpype.pipeline.publish.get_errored_instances_from_context")
|
||||||
|
def get_errored_instances_from_context(context):
|
||||||
|
"""
|
||||||
|
Deprecated:
|
||||||
|
Since 3.14.* will be removed in 3.16.* or later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||||
|
|
||||||
|
return get_errored_instances_from_context(context)
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated("openpype.pipeline.publish.get_errored_plugins_from_context")
|
||||||
|
def get_errored_plugins_from_data(context):
|
||||||
|
"""
|
||||||
|
Deprecated:
|
||||||
|
Since 3.14.* will be removed in 3.16.* or later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from openpype.pipeline.publish import get_errored_plugins_from_context
|
||||||
|
|
||||||
|
return get_errored_plugins_from_context(context)
|
||||||
|
|
||||||
|
|
||||||
class RepairAction(pyblish.api.Action):
|
class RepairAction(pyblish.api.Action):
|
||||||
|
|
@ -45,6 +78,13 @@ class RepairAction(pyblish.api.Action):
|
||||||
To process the repairing this requires a static `repair(instance)` method
|
To process the repairing this requires a static `repair(instance)` method
|
||||||
is available on the plugin.
|
is available on the plugin.
|
||||||
|
|
||||||
|
Deprecated:
|
||||||
|
'RepairAction' and 'RepairContextAction' were moved to
|
||||||
|
'openpype.pipeline.publish' please change you imports.
|
||||||
|
There is no "reasonable" way hot mark these classes as deprecated
|
||||||
|
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||||
|
removed in 3.16.*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
label = "Repair"
|
label = "Repair"
|
||||||
on = "failed" # This action is only available on a failed plug-in
|
on = "failed" # This action is only available on a failed plug-in
|
||||||
|
|
@ -71,6 +111,13 @@ class RepairContextAction(pyblish.api.Action):
|
||||||
To process the repairing this requires a static `repair(instance)` method
|
To process the repairing this requires a static `repair(instance)` method
|
||||||
is available on the plugin.
|
is available on the plugin.
|
||||||
|
|
||||||
|
Deprecated:
|
||||||
|
'RepairAction' and 'RepairContextAction' were moved to
|
||||||
|
'openpype.pipeline.publish' please change you imports.
|
||||||
|
There is no "reasonable" way hot mark these classes as deprecated
|
||||||
|
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||||
|
removed in 3.16.*
|
||||||
|
|
||||||
"""
|
"""
|
||||||
label = "Repair"
|
label = "Repair"
|
||||||
on = "failed" # This action is only available on a failed plug-in
|
on = "failed" # This action is only available on a failed plug-in
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,6 @@ from .lib import (
|
||||||
PypeLogger,
|
PypeLogger,
|
||||||
Logger,
|
Logger,
|
||||||
Anatomy,
|
Anatomy,
|
||||||
config,
|
|
||||||
execute,
|
execute,
|
||||||
run_subprocess,
|
run_subprocess,
|
||||||
version_up,
|
version_up,
|
||||||
|
|
@ -49,7 +48,6 @@ from .plugin import (
|
||||||
ValidateContentsOrder,
|
ValidateContentsOrder,
|
||||||
ValidateSceneOrder,
|
ValidateSceneOrder,
|
||||||
ValidateMeshOrder,
|
ValidateMeshOrder,
|
||||||
ValidationException
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# temporary fix, might
|
# temporary fix, might
|
||||||
|
|
@ -73,7 +71,6 @@ __all__ = [
|
||||||
"PypeLogger",
|
"PypeLogger",
|
||||||
"Logger",
|
"Logger",
|
||||||
"Anatomy",
|
"Anatomy",
|
||||||
"config",
|
|
||||||
"execute",
|
"execute",
|
||||||
"get_default_components",
|
"get_default_components",
|
||||||
"ApplicationManager",
|
"ApplicationManager",
|
||||||
|
|
@ -94,8 +91,6 @@ __all__ = [
|
||||||
"RepairAction",
|
"RepairAction",
|
||||||
"RepairContextAction",
|
"RepairContextAction",
|
||||||
|
|
||||||
"ValidationException",
|
|
||||||
|
|
||||||
# get contextual data
|
# get contextual data
|
||||||
"version_up",
|
"version_up",
|
||||||
"get_asset",
|
"get_asset",
|
||||||
|
|
|
||||||
|
|
@ -29,8 +29,14 @@ def main(ctx):
|
||||||
|
|
||||||
It wraps different commands together.
|
It wraps different commands together.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if ctx.invoked_subcommand is None:
|
if ctx.invoked_subcommand is None:
|
||||||
ctx.invoke(tray)
|
# Print help if headless mode is used
|
||||||
|
if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1":
|
||||||
|
print(ctx.get_help())
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
ctx.invoke(tray)
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
@main.command()
|
||||||
|
|
@ -40,18 +46,6 @@ def settings(dev):
|
||||||
PypeCommands().launch_settings_gui(dev)
|
PypeCommands().launch_settings_gui(dev)
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
|
||||||
def standalonepublisher():
|
|
||||||
"""Show Pype Standalone publisher UI."""
|
|
||||||
PypeCommands().launch_standalone_publisher()
|
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
|
||||||
def traypublisher():
|
|
||||||
"""Show new OpenPype Standalone publisher UI."""
|
|
||||||
PypeCommands().launch_traypublisher()
|
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
@main.command()
|
||||||
def tray():
|
def tray():
|
||||||
"""Launch pype tray.
|
"""Launch pype tray.
|
||||||
|
|
@ -289,6 +283,13 @@ def projectmanager():
|
||||||
PypeCommands().launch_project_manager()
|
PypeCommands().launch_project_manager()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command(context_settings={"ignore_unknown_options": True})
|
||||||
|
def publish_report_viewer():
|
||||||
|
from openpype.tools.publisher.publish_report_viewer import main
|
||||||
|
|
||||||
|
sys.exit(main())
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
@main.command()
|
||||||
@click.argument("output_path")
|
@click.argument("output_path")
|
||||||
@click.option("--project", help="Define project context")
|
@click.option("--project", help="Define project context")
|
||||||
|
|
@ -443,3 +444,26 @@ def interactive():
|
||||||
__version__, sys.version, sys.platform
|
__version__, sys.version, sys.platform
|
||||||
)
|
)
|
||||||
code.interact(banner)
|
code.interact(banner)
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--build", help="Print only build version",
|
||||||
|
is_flag=True, default=False)
|
||||||
|
def version(build):
|
||||||
|
"""Print OpenPype version."""
|
||||||
|
|
||||||
|
from openpype.version import __version__
|
||||||
|
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
|
||||||
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
|
||||||
|
if getattr(sys, 'frozen', False):
|
||||||
|
local_version = BootstrapRepos.get_version(
|
||||||
|
Path(os.getenv("OPENPYPE_ROOT")))
|
||||||
|
else:
|
||||||
|
local_version = OpenPypeVersion.get_installed_version_str()
|
||||||
|
|
||||||
|
if build:
|
||||||
|
print(local_version)
|
||||||
|
return
|
||||||
|
print(f"{__version__} (booted: {local_version})")
|
||||||
|
|
|
||||||
|
|
@ -45,6 +45,17 @@ from .entities import (
|
||||||
get_workfile_info,
|
get_workfile_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .entity_links import (
|
||||||
|
get_linked_asset_ids,
|
||||||
|
get_linked_assets,
|
||||||
|
get_linked_representation_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .operations import (
|
||||||
|
create_project,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"OpenPypeMongoConnection",
|
"OpenPypeMongoConnection",
|
||||||
|
|
||||||
|
|
@ -88,4 +99,10 @@ __all__ = (
|
||||||
"get_thumbnail_id_from_source",
|
"get_thumbnail_id_from_source",
|
||||||
|
|
||||||
"get_workfile_info",
|
"get_workfile_info",
|
||||||
|
|
||||||
|
"get_linked_asset_ids",
|
||||||
|
"get_linked_assets",
|
||||||
|
"get_linked_representation_id",
|
||||||
|
|
||||||
|
"create_project",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -6,38 +6,15 @@ that has project name as a context (e.g. on 'ProjectEntity'?).
|
||||||
+ We will need more specific functions doing wery specific queires really fast.
|
+ We will need more specific functions doing wery specific queires really fast.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import re
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
import six
|
import six
|
||||||
from bson.objectid import ObjectId
|
from bson.objectid import ObjectId
|
||||||
|
|
||||||
from .mongo import OpenPypeMongoConnection
|
from .mongo import get_project_database, get_project_connection
|
||||||
|
|
||||||
|
PatternType = type(re.compile(""))
|
||||||
def _get_project_database():
|
|
||||||
db_name = os.environ.get("AVALON_DB") or "avalon"
|
|
||||||
return OpenPypeMongoConnection.get_mongo_client()[db_name]
|
|
||||||
|
|
||||||
|
|
||||||
def get_project_connection(project_name):
|
|
||||||
"""Direct access to mongo collection.
|
|
||||||
|
|
||||||
We're trying to avoid using direct access to mongo. This should be used
|
|
||||||
only for Create, Update and Remove operations until there are implemented
|
|
||||||
api calls for that.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project_name(str): Project name for which collection should be
|
|
||||||
returned.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
pymongo.Collection: Collection realated to passed project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not project_name:
|
|
||||||
raise ValueError("Invalid project name {}".format(str(project_name)))
|
|
||||||
return _get_project_database()[project_name]
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_fields(fields, required_fields=None):
|
def _prepare_fields(fields, required_fields=None):
|
||||||
|
|
@ -57,22 +34,42 @@ def _prepare_fields(fields, required_fields=None):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def _convert_id(in_id):
|
def convert_id(in_id):
|
||||||
|
"""Helper function for conversion of id from string to ObjectId.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
in_id (Union[str, ObjectId, Any]): Entity id that should be converted
|
||||||
|
to right type for queries.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Union[ObjectId, Any]: Converted ids to ObjectId or in type.
|
||||||
|
"""
|
||||||
|
|
||||||
if isinstance(in_id, six.string_types):
|
if isinstance(in_id, six.string_types):
|
||||||
return ObjectId(in_id)
|
return ObjectId(in_id)
|
||||||
return in_id
|
return in_id
|
||||||
|
|
||||||
|
|
||||||
def _convert_ids(in_ids):
|
def convert_ids(in_ids):
|
||||||
|
"""Helper function for conversion of ids from string to ObjectId.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that
|
||||||
|
should be converted to right type for queries.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[ObjectId]: Converted ids to ObjectId.
|
||||||
|
"""
|
||||||
|
|
||||||
_output = set()
|
_output = set()
|
||||||
for in_id in in_ids:
|
for in_id in in_ids:
|
||||||
if in_id is not None:
|
if in_id is not None:
|
||||||
_output.add(_convert_id(in_id))
|
_output.add(convert_id(in_id))
|
||||||
return list(_output)
|
return list(_output)
|
||||||
|
|
||||||
|
|
||||||
def get_projects(active=True, inactive=False, fields=None):
|
def get_projects(active=True, inactive=False, fields=None):
|
||||||
mongodb = _get_project_database()
|
mongodb = get_project_database()
|
||||||
for project_name in mongodb.collection_names():
|
for project_name in mongodb.collection_names():
|
||||||
if project_name in ("system.indexes",):
|
if project_name in ("system.indexes",):
|
||||||
continue
|
continue
|
||||||
|
|
@ -83,7 +80,7 @@ def get_projects(active=True, inactive=False, fields=None):
|
||||||
yield project_doc
|
yield project_doc
|
||||||
|
|
||||||
|
|
||||||
def get_project(project_name, active=True, inactive=False, fields=None):
|
def get_project(project_name, active=True, inactive=True, fields=None):
|
||||||
# Skip if both are disabled
|
# Skip if both are disabled
|
||||||
if not active and not inactive:
|
if not active and not inactive:
|
||||||
return None
|
return None
|
||||||
|
|
@ -140,7 +137,7 @@ def get_asset_by_id(project_name, asset_id, fields=None):
|
||||||
None: Asset was not found by id.
|
None: Asset was not found by id.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
asset_id = _convert_id(asset_id)
|
asset_id = convert_id(asset_id)
|
||||||
if not asset_id:
|
if not asset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -221,7 +218,7 @@ def _get_assets(
|
||||||
query_filter = {"type": {"$in": asset_types}}
|
query_filter = {"type": {"$in": asset_types}}
|
||||||
|
|
||||||
if asset_ids is not None:
|
if asset_ids is not None:
|
||||||
asset_ids = _convert_ids(asset_ids)
|
asset_ids = convert_ids(asset_ids)
|
||||||
if not asset_ids:
|
if not asset_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["_id"] = {"$in": asset_ids}
|
query_filter["_id"] = {"$in": asset_ids}
|
||||||
|
|
@ -232,7 +229,7 @@ def _get_assets(
|
||||||
query_filter["name"] = {"$in": list(asset_names)}
|
query_filter["name"] = {"$in": list(asset_names)}
|
||||||
|
|
||||||
if parent_ids is not None:
|
if parent_ids is not None:
|
||||||
parent_ids = _convert_ids(parent_ids)
|
parent_ids = convert_ids(parent_ids)
|
||||||
if not parent_ids:
|
if not parent_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["data.visualParent"] = {"$in": parent_ids}
|
query_filter["data.visualParent"] = {"$in": parent_ids}
|
||||||
|
|
@ -332,7 +329,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None):
|
||||||
"type": "subset"
|
"type": "subset"
|
||||||
}
|
}
|
||||||
if asset_ids is not None:
|
if asset_ids is not None:
|
||||||
asset_ids = _convert_ids(asset_ids)
|
asset_ids = convert_ids(asset_ids)
|
||||||
if not asset_ids:
|
if not asset_ids:
|
||||||
return []
|
return []
|
||||||
subset_query["parent"] = {"$in": asset_ids}
|
subset_query["parent"] = {"$in": asset_ids}
|
||||||
|
|
@ -372,7 +369,7 @@ def get_subset_by_id(project_name, subset_id, fields=None):
|
||||||
Dict: Subset document which can be reduced to specified 'fields'.
|
Dict: Subset document which can be reduced to specified 'fields'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subset_id = _convert_id(subset_id)
|
subset_id = convert_id(subset_id)
|
||||||
if not subset_id:
|
if not subset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -392,14 +389,15 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
|
||||||
returned if 'None' is passed.
|
returned if 'None' is passed.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None: If subset with specified filters was not found.
|
Union[None, Dict[str, Any]]: None if subset with specified filters was
|
||||||
Dict: Subset document which can be reduced to specified 'fields'.
|
not found or dict subset document which can be reduced to
|
||||||
"""
|
specified 'fields'.
|
||||||
|
|
||||||
|
"""
|
||||||
if not subset_name:
|
if not subset_name:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
asset_id = _convert_id(asset_id)
|
asset_id = convert_id(asset_id)
|
||||||
if not asset_id:
|
if not asset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -453,13 +451,13 @@ def get_subsets(
|
||||||
query_filter = {"type": {"$in": subset_types}}
|
query_filter = {"type": {"$in": subset_types}}
|
||||||
|
|
||||||
if asset_ids is not None:
|
if asset_ids is not None:
|
||||||
asset_ids = _convert_ids(asset_ids)
|
asset_ids = convert_ids(asset_ids)
|
||||||
if not asset_ids:
|
if not asset_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["parent"] = {"$in": asset_ids}
|
query_filter["parent"] = {"$in": asset_ids}
|
||||||
|
|
||||||
if subset_ids is not None:
|
if subset_ids is not None:
|
||||||
subset_ids = _convert_ids(subset_ids)
|
subset_ids = convert_ids(subset_ids)
|
||||||
if not subset_ids:
|
if not subset_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["_id"] = {"$in": subset_ids}
|
query_filter["_id"] = {"$in": subset_ids}
|
||||||
|
|
@ -474,7 +472,7 @@ def get_subsets(
|
||||||
for asset_id, names in names_by_asset_ids.items():
|
for asset_id, names in names_by_asset_ids.items():
|
||||||
if asset_id and names:
|
if asset_id and names:
|
||||||
or_query.append({
|
or_query.append({
|
||||||
"parent": _convert_id(asset_id),
|
"parent": convert_id(asset_id),
|
||||||
"name": {"$in": list(names)}
|
"name": {"$in": list(names)}
|
||||||
})
|
})
|
||||||
if not or_query:
|
if not or_query:
|
||||||
|
|
@ -535,7 +533,7 @@ def get_version_by_id(project_name, version_id, fields=None):
|
||||||
Dict: Version document which can be reduced to specified 'fields'.
|
Dict: Version document which can be reduced to specified 'fields'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
version_id = _convert_id(version_id)
|
version_id = convert_id(version_id)
|
||||||
if not version_id:
|
if not version_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -562,7 +560,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None):
|
||||||
Dict: Version document which can be reduced to specified 'fields'.
|
Dict: Version document which can be reduced to specified 'fields'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subset_id = _convert_id(subset_id)
|
subset_id = convert_id(subset_id)
|
||||||
if not subset_id:
|
if not subset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -592,7 +590,7 @@ def version_is_latest(project_name, version_id):
|
||||||
bool: True if is latest version from subset else False.
|
bool: True if is latest version from subset else False.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
version_id = _convert_id(version_id)
|
version_id = convert_id(version_id)
|
||||||
if not version_id:
|
if not version_id:
|
||||||
return False
|
return False
|
||||||
version_doc = get_version_by_id(
|
version_doc = get_version_by_id(
|
||||||
|
|
@ -635,13 +633,13 @@ def _get_versions(
|
||||||
query_filter = {"type": {"$in": version_types}}
|
query_filter = {"type": {"$in": version_types}}
|
||||||
|
|
||||||
if subset_ids is not None:
|
if subset_ids is not None:
|
||||||
subset_ids = _convert_ids(subset_ids)
|
subset_ids = convert_ids(subset_ids)
|
||||||
if not subset_ids:
|
if not subset_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["parent"] = {"$in": subset_ids}
|
query_filter["parent"] = {"$in": subset_ids}
|
||||||
|
|
||||||
if version_ids is not None:
|
if version_ids is not None:
|
||||||
version_ids = _convert_ids(version_ids)
|
version_ids = convert_ids(version_ids)
|
||||||
if not version_ids:
|
if not version_ids:
|
||||||
return []
|
return []
|
||||||
query_filter["_id"] = {"$in": version_ids}
|
query_filter["_id"] = {"$in": version_ids}
|
||||||
|
|
@ -715,7 +713,7 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
|
||||||
Dict: Hero version entity data.
|
Dict: Hero version entity data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subset_id = _convert_id(subset_id)
|
subset_id = convert_id(subset_id)
|
||||||
if not subset_id:
|
if not subset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -745,7 +743,7 @@ def get_hero_version_by_id(project_name, version_id, fields=None):
|
||||||
Dict: Hero version entity data.
|
Dict: Hero version entity data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
version_id = _convert_id(version_id)
|
version_id = convert_id(version_id)
|
||||||
if not version_id:
|
if not version_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -811,7 +809,7 @@ def get_output_link_versions(project_name, version_id, fields=None):
|
||||||
links for passed version.
|
links for passed version.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
version_id = _convert_id(version_id)
|
version_id = convert_id(version_id)
|
||||||
if not version_id:
|
if not version_id:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -819,7 +817,7 @@ def get_output_link_versions(project_name, version_id, fields=None):
|
||||||
# Does make sense to look for hero versions?
|
# Does make sense to look for hero versions?
|
||||||
query_filter = {
|
query_filter = {
|
||||||
"type": "version",
|
"type": "version",
|
||||||
"data.inputLinks.input": version_id
|
"data.inputLinks.id": version_id
|
||||||
}
|
}
|
||||||
return conn.find(query_filter, _prepare_fields(fields))
|
return conn.find(query_filter, _prepare_fields(fields))
|
||||||
|
|
||||||
|
|
@ -837,7 +835,7 @@ def get_last_versions(project_name, subset_ids, fields=None):
|
||||||
dict[ObjectId, int]: Key is subset id and value is last version name.
|
dict[ObjectId, int]: Key is subset id and value is last version name.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subset_ids = _convert_ids(subset_ids)
|
subset_ids = convert_ids(subset_ids)
|
||||||
if not subset_ids:
|
if not subset_ids:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
@ -923,7 +921,7 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None):
|
||||||
Dict: Version document which can be reduced to specified 'fields'.
|
Dict: Version document which can be reduced to specified 'fields'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subset_id = _convert_id(subset_id)
|
subset_id = convert_id(subset_id)
|
||||||
if not subset_id:
|
if not subset_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -996,7 +994,7 @@ def get_representation_by_id(project_name, representation_id, fields=None):
|
||||||
"type": {"$in": repre_types}
|
"type": {"$in": repre_types}
|
||||||
}
|
}
|
||||||
if representation_id is not None:
|
if representation_id is not None:
|
||||||
query_filter["_id"] = _convert_id(representation_id)
|
query_filter["_id"] = convert_id(representation_id)
|
||||||
|
|
||||||
conn = get_project_connection(project_name)
|
conn = get_project_connection(project_name)
|
||||||
|
|
||||||
|
|
@ -1021,7 +1019,7 @@ def get_representation_by_name(
|
||||||
to specified 'fields'.
|
to specified 'fields'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
version_id = _convert_id(version_id)
|
version_id = convert_id(version_id)
|
||||||
if not version_id or not representation_name:
|
if not version_id or not representation_name:
|
||||||
return None
|
return None
|
||||||
repre_types = ["representation", "archived_representations"]
|
repre_types = ["representation", "archived_representations"]
|
||||||
|
|
@ -1035,17 +1033,70 @@ def get_representation_by_name(
|
||||||
return conn.find_one(query_filter, _prepare_fields(fields))
|
return conn.find_one(query_filter, _prepare_fields(fields))
|
||||||
|
|
||||||
|
|
||||||
|
def _flatten_dict(data):
|
||||||
|
flatten_queue = collections.deque()
|
||||||
|
flatten_queue.append(data)
|
||||||
|
output = {}
|
||||||
|
while flatten_queue:
|
||||||
|
item = flatten_queue.popleft()
|
||||||
|
for key, value in item.items():
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
output[key] = value
|
||||||
|
continue
|
||||||
|
|
||||||
|
tmp = {}
|
||||||
|
for subkey, subvalue in value.items():
|
||||||
|
new_key = "{}.{}".format(key, subkey)
|
||||||
|
tmp[new_key] = subvalue
|
||||||
|
flatten_queue.append(tmp)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def _regex_filters(filters):
|
||||||
|
output = []
|
||||||
|
for key, value in filters.items():
|
||||||
|
regexes = []
|
||||||
|
a_values = []
|
||||||
|
if isinstance(value, PatternType):
|
||||||
|
regexes.append(value)
|
||||||
|
elif isinstance(value, (list, tuple, set)):
|
||||||
|
for item in value:
|
||||||
|
if isinstance(item, PatternType):
|
||||||
|
regexes.append(item)
|
||||||
|
else:
|
||||||
|
a_values.append(item)
|
||||||
|
else:
|
||||||
|
a_values.append(value)
|
||||||
|
|
||||||
|
key_filters = []
|
||||||
|
if len(a_values) == 1:
|
||||||
|
key_filters.append({key: a_values[0]})
|
||||||
|
elif a_values:
|
||||||
|
key_filters.append({key: {"$in": a_values}})
|
||||||
|
|
||||||
|
for regex in regexes:
|
||||||
|
key_filters.append({key: {"$regex": regex}})
|
||||||
|
|
||||||
|
if len(key_filters) == 1:
|
||||||
|
output.append(key_filters[0])
|
||||||
|
else:
|
||||||
|
output.append({"$or": key_filters})
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def _get_representations(
|
def _get_representations(
|
||||||
project_name,
|
project_name,
|
||||||
representation_ids,
|
representation_ids,
|
||||||
representation_names,
|
representation_names,
|
||||||
version_ids,
|
version_ids,
|
||||||
extensions,
|
context_filters,
|
||||||
names_by_version_ids,
|
names_by_version_ids,
|
||||||
standard,
|
standard,
|
||||||
archived,
|
archived,
|
||||||
fields
|
fields
|
||||||
):
|
):
|
||||||
|
default_output = []
|
||||||
repre_types = []
|
repre_types = []
|
||||||
if standard:
|
if standard:
|
||||||
repre_types.append("representation")
|
repre_types.append("representation")
|
||||||
|
|
@ -1053,7 +1104,7 @@ def _get_representations(
|
||||||
repre_types.append("archived_representation")
|
repre_types.append("archived_representation")
|
||||||
|
|
||||||
if not repre_types:
|
if not repre_types:
|
||||||
return []
|
return default_output
|
||||||
|
|
||||||
if len(repre_types) == 1:
|
if len(repre_types) == 1:
|
||||||
query_filter = {"type": repre_types[0]}
|
query_filter = {"type": repre_types[0]}
|
||||||
|
|
@ -1061,38 +1112,62 @@ def _get_representations(
|
||||||
query_filter = {"type": {"$in": repre_types}}
|
query_filter = {"type": {"$in": repre_types}}
|
||||||
|
|
||||||
if representation_ids is not None:
|
if representation_ids is not None:
|
||||||
representation_ids = _convert_ids(representation_ids)
|
representation_ids = convert_ids(representation_ids)
|
||||||
if not representation_ids:
|
if not representation_ids:
|
||||||
return []
|
return default_output
|
||||||
query_filter["_id"] = {"$in": representation_ids}
|
query_filter["_id"] = {"$in": representation_ids}
|
||||||
|
|
||||||
if representation_names is not None:
|
if representation_names is not None:
|
||||||
if not representation_names:
|
if not representation_names:
|
||||||
return []
|
return default_output
|
||||||
query_filter["name"] = {"$in": list(representation_names)}
|
query_filter["name"] = {"$in": list(representation_names)}
|
||||||
|
|
||||||
if version_ids is not None:
|
if version_ids is not None:
|
||||||
version_ids = _convert_ids(version_ids)
|
version_ids = convert_ids(version_ids)
|
||||||
if not version_ids:
|
if not version_ids:
|
||||||
return []
|
return default_output
|
||||||
query_filter["parent"] = {"$in": version_ids}
|
query_filter["parent"] = {"$in": version_ids}
|
||||||
|
|
||||||
if extensions is not None:
|
or_queries = []
|
||||||
if not extensions:
|
|
||||||
return []
|
|
||||||
query_filter["context.ext"] = {"$in": list(extensions)}
|
|
||||||
|
|
||||||
if names_by_version_ids is not None:
|
if names_by_version_ids is not None:
|
||||||
or_query = []
|
or_query = []
|
||||||
for version_id, names in names_by_version_ids.items():
|
for version_id, names in names_by_version_ids.items():
|
||||||
if version_id and names:
|
if version_id and names:
|
||||||
or_query.append({
|
or_query.append({
|
||||||
"parent": _convert_id(version_id),
|
"parent": convert_id(version_id),
|
||||||
"name": {"$in": list(names)}
|
"name": {"$in": list(names)}
|
||||||
})
|
})
|
||||||
if not or_query:
|
if not or_query:
|
||||||
|
return default_output
|
||||||
|
or_queries.append(or_query)
|
||||||
|
|
||||||
|
if context_filters is not None:
|
||||||
|
if not context_filters:
|
||||||
return []
|
return []
|
||||||
query_filter["$or"] = or_query
|
_flatten_filters = _flatten_dict(context_filters)
|
||||||
|
flatten_filters = {}
|
||||||
|
for key, value in _flatten_filters.items():
|
||||||
|
if not key.startswith("context"):
|
||||||
|
key = "context.{}".format(key)
|
||||||
|
flatten_filters[key] = value
|
||||||
|
|
||||||
|
for item in _regex_filters(flatten_filters):
|
||||||
|
for key, value in item.items():
|
||||||
|
if key != "$or":
|
||||||
|
query_filter[key] = value
|
||||||
|
|
||||||
|
elif value:
|
||||||
|
or_queries.append(value)
|
||||||
|
|
||||||
|
if len(or_queries) == 1:
|
||||||
|
query_filter["$or"] = or_queries[0]
|
||||||
|
elif or_queries:
|
||||||
|
and_query = []
|
||||||
|
for or_query in or_queries:
|
||||||
|
if isinstance(or_query, list):
|
||||||
|
or_query = {"$or": or_query}
|
||||||
|
and_query.append(or_query)
|
||||||
|
query_filter["$and"] = and_query
|
||||||
|
|
||||||
conn = get_project_connection(project_name)
|
conn = get_project_connection(project_name)
|
||||||
|
|
||||||
|
|
@ -1104,7 +1179,7 @@ def get_representations(
|
||||||
representation_ids=None,
|
representation_ids=None,
|
||||||
representation_names=None,
|
representation_names=None,
|
||||||
version_ids=None,
|
version_ids=None,
|
||||||
extensions=None,
|
context_filters=None,
|
||||||
names_by_version_ids=None,
|
names_by_version_ids=None,
|
||||||
archived=False,
|
archived=False,
|
||||||
standard=True,
|
standard=True,
|
||||||
|
|
@ -1122,8 +1197,8 @@ def get_representations(
|
||||||
as filter. Filter ignored if 'None' is passed.
|
as filter. Filter ignored if 'None' is passed.
|
||||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||||
ignored if 'None' is passed.
|
ignored if 'None' is passed.
|
||||||
extensions (Iterable[str]): Filter by extension of main representation
|
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||||
file (without dot).
|
representation context fields.
|
||||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||||
using version ids and list of names under the version.
|
using version ids and list of names under the version.
|
||||||
archived (bool): Output will also contain archived representations.
|
archived (bool): Output will also contain archived representations.
|
||||||
|
|
@ -1139,7 +1214,7 @@ def get_representations(
|
||||||
representation_ids=representation_ids,
|
representation_ids=representation_ids,
|
||||||
representation_names=representation_names,
|
representation_names=representation_names,
|
||||||
version_ids=version_ids,
|
version_ids=version_ids,
|
||||||
extensions=extensions,
|
context_filters=context_filters,
|
||||||
names_by_version_ids=names_by_version_ids,
|
names_by_version_ids=names_by_version_ids,
|
||||||
standard=True,
|
standard=True,
|
||||||
archived=archived,
|
archived=archived,
|
||||||
|
|
@ -1152,7 +1227,7 @@ def get_archived_representations(
|
||||||
representation_ids=None,
|
representation_ids=None,
|
||||||
representation_names=None,
|
representation_names=None,
|
||||||
version_ids=None,
|
version_ids=None,
|
||||||
extensions=None,
|
context_filters=None,
|
||||||
names_by_version_ids=None,
|
names_by_version_ids=None,
|
||||||
fields=None
|
fields=None
|
||||||
):
|
):
|
||||||
|
|
@ -1168,8 +1243,8 @@ def get_archived_representations(
|
||||||
as filter. Filter ignored if 'None' is passed.
|
as filter. Filter ignored if 'None' is passed.
|
||||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||||
ignored if 'None' is passed.
|
ignored if 'None' is passed.
|
||||||
extensions (Iterable[str]): Filter by extension of main representation
|
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||||
file (without dot).
|
representation context fields.
|
||||||
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||||
using version ids and list of names under the version.
|
using version ids and list of names under the version.
|
||||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||||
|
|
@ -1184,7 +1259,7 @@ def get_archived_representations(
|
||||||
representation_ids=representation_ids,
|
representation_ids=representation_ids,
|
||||||
representation_names=representation_names,
|
representation_names=representation_names,
|
||||||
version_ids=version_ids,
|
version_ids=version_ids,
|
||||||
extensions=extensions,
|
context_filters=context_filters,
|
||||||
names_by_version_ids=names_by_version_ids,
|
names_by_version_ids=names_by_version_ids,
|
||||||
standard=False,
|
standard=False,
|
||||||
archived=True,
|
archived=True,
|
||||||
|
|
@ -1207,58 +1282,64 @@ def get_representations_parents(project_name, representations):
|
||||||
dict[ObjectId, tuple]: Parents by representation id.
|
dict[ObjectId, tuple]: Parents by representation id.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
repres_by_version_id = collections.defaultdict(list)
|
repre_docs_by_version_id = collections.defaultdict(list)
|
||||||
versions_by_version_id = {}
|
version_docs_by_version_id = {}
|
||||||
versions_by_subset_id = collections.defaultdict(list)
|
version_docs_by_subset_id = collections.defaultdict(list)
|
||||||
subsets_by_subset_id = {}
|
subset_docs_by_subset_id = {}
|
||||||
subsets_by_asset_id = collections.defaultdict(list)
|
subset_docs_by_asset_id = collections.defaultdict(list)
|
||||||
output = {}
|
output = {}
|
||||||
for representation in representations:
|
for repre_doc in representations:
|
||||||
repre_id = representation["_id"]
|
repre_id = repre_doc["_id"]
|
||||||
|
version_id = repre_doc["parent"]
|
||||||
output[repre_id] = (None, None, None, None)
|
output[repre_id] = (None, None, None, None)
|
||||||
version_id = representation["parent"]
|
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||||
repres_by_version_id[version_id].append(representation)
|
|
||||||
|
|
||||||
versions = get_versions(
|
version_docs = get_versions(
|
||||||
project_name, version_ids=repres_by_version_id.keys()
|
project_name,
|
||||||
|
version_ids=repre_docs_by_version_id.keys(),
|
||||||
|
hero=True
|
||||||
)
|
)
|
||||||
for version in versions:
|
for version_doc in version_docs:
|
||||||
version_id = version["_id"]
|
version_id = version_doc["_id"]
|
||||||
subset_id = version["parent"]
|
subset_id = version_doc["parent"]
|
||||||
versions_by_version_id[version_id] = version
|
version_docs_by_version_id[version_id] = version_doc
|
||||||
versions_by_subset_id[subset_id].append(version)
|
version_docs_by_subset_id[subset_id].append(version_doc)
|
||||||
|
|
||||||
subsets = get_subsets(
|
subset_docs = get_subsets(
|
||||||
project_name, subset_ids=versions_by_subset_id.keys()
|
project_name, subset_ids=version_docs_by_subset_id.keys()
|
||||||
)
|
)
|
||||||
for subset in subsets:
|
for subset_doc in subset_docs:
|
||||||
subset_id = subset["_id"]
|
subset_id = subset_doc["_id"]
|
||||||
asset_id = subset["parent"]
|
asset_id = subset_doc["parent"]
|
||||||
subsets_by_subset_id[subset_id] = subset
|
subset_docs_by_subset_id[subset_id] = subset_doc
|
||||||
subsets_by_asset_id[asset_id].append(subset)
|
subset_docs_by_asset_id[asset_id].append(subset_doc)
|
||||||
|
|
||||||
assets = get_assets(project_name, asset_ids=subsets_by_asset_id.keys())
|
asset_docs = get_assets(
|
||||||
assets_by_id = {
|
project_name, asset_ids=subset_docs_by_asset_id.keys()
|
||||||
asset["_id"]: asset
|
)
|
||||||
for asset in assets
|
asset_docs_by_id = {
|
||||||
|
asset_doc["_id"]: asset_doc
|
||||||
|
for asset_doc in asset_docs
|
||||||
}
|
}
|
||||||
|
|
||||||
project = get_project(project_name)
|
project_doc = get_project(project_name)
|
||||||
|
|
||||||
for version_id, representations in repres_by_version_id.items():
|
for version_id, repre_docs in repre_docs_by_version_id.items():
|
||||||
asset = None
|
asset_doc = None
|
||||||
subset = None
|
subset_doc = None
|
||||||
version = versions_by_version_id.get(version_id)
|
version_doc = version_docs_by_version_id.get(version_id)
|
||||||
if version:
|
if version_doc:
|
||||||
subset_id = version["parent"]
|
subset_id = version_doc["parent"]
|
||||||
subset = subsets_by_subset_id.get(subset_id)
|
subset_doc = subset_docs_by_subset_id.get(subset_id)
|
||||||
if subset:
|
if subset_doc:
|
||||||
asset_id = subset["parent"]
|
asset_id = subset_doc["parent"]
|
||||||
asset = assets_by_id.get(asset_id)
|
asset_doc = asset_docs_by_id.get(asset_id)
|
||||||
|
|
||||||
for representation in representations:
|
for repre_doc in repre_docs:
|
||||||
repre_id = representation["_id"]
|
repre_id = repre_doc["_id"]
|
||||||
output[repre_id] = (version, subset, asset, project)
|
output[repre_id] = (
|
||||||
|
version_doc, subset_doc, asset_doc, project_doc
|
||||||
|
)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1303,7 +1384,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id):
|
||||||
if not src_type or not src_id:
|
if not src_type or not src_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
query_filter = {"_id": _convert_id(src_id)}
|
query_filter = {"_id": convert_id(src_id)}
|
||||||
|
|
||||||
conn = get_project_connection(project_name)
|
conn = get_project_connection(project_name)
|
||||||
src_doc = conn.find_one(query_filter, {"data.thumbnail_id"})
|
src_doc = conn.find_one(query_filter, {"data.thumbnail_id"})
|
||||||
|
|
@ -1330,7 +1411,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if thumbnail_ids:
|
if thumbnail_ids:
|
||||||
thumbnail_ids = _convert_ids(thumbnail_ids)
|
thumbnail_ids = convert_ids(thumbnail_ids)
|
||||||
|
|
||||||
if not thumbnail_ids:
|
if not thumbnail_ids:
|
||||||
return []
|
return []
|
||||||
|
|
@ -1358,7 +1439,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None):
|
||||||
|
|
||||||
if not thumbnail_id:
|
if not thumbnail_id:
|
||||||
return None
|
return None
|
||||||
query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)}
|
query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)}
|
||||||
conn = get_project_connection(project_name)
|
conn = get_project_connection(project_name)
|
||||||
return conn.find_one(query_filter, _prepare_fields(fields))
|
return conn.find_one(query_filter, _prepare_fields(fields))
|
||||||
|
|
||||||
|
|
@ -1386,7 +1467,7 @@ def get_workfile_info(
|
||||||
|
|
||||||
query_filter = {
|
query_filter = {
|
||||||
"type": "workfile",
|
"type": "workfile",
|
||||||
"parent": _convert_id(asset_id),
|
"parent": convert_id(asset_id),
|
||||||
"task_name": task_name,
|
"task_name": task_name,
|
||||||
"filename": filename
|
"filename": filename
|
||||||
}
|
}
|
||||||
|
|
@ -1397,7 +1478,7 @@ def get_workfile_info(
|
||||||
"""
|
"""
|
||||||
## Custom data storage:
|
## Custom data storage:
|
||||||
- Settings - OP settings overrides and local settings
|
- Settings - OP settings overrides and local settings
|
||||||
- Logging - logs from PypeLogger
|
- Logging - logs from Logger
|
||||||
- Webpublisher - jobs
|
- Webpublisher - jobs
|
||||||
- Ftrack - events
|
- Ftrack - events
|
||||||
- Maya - Shaders
|
- Maya - Shaders
|
||||||
|
|
|
||||||
241
openpype/client/entity_links.py
Normal file
241
openpype/client/entity_links.py
Normal file
|
|
@ -0,0 +1,241 @@
|
||||||
|
from .mongo import get_project_connection
|
||||||
|
from .entities import (
|
||||||
|
get_assets,
|
||||||
|
get_asset_by_id,
|
||||||
|
get_version_by_id,
|
||||||
|
get_representation_by_id,
|
||||||
|
convert_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
|
||||||
|
"""Extract linked asset ids from asset document.
|
||||||
|
|
||||||
|
One of asset document or asset id must be passed.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Asset links now works only from asset to assets.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
asset_doc (dict): Asset document from DB.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Union[ObjectId, str]]: Asset ids of input links.
|
||||||
|
"""
|
||||||
|
|
||||||
|
output = []
|
||||||
|
if not asset_doc and not asset_id:
|
||||||
|
return output
|
||||||
|
|
||||||
|
if not asset_doc:
|
||||||
|
asset_doc = get_asset_by_id(
|
||||||
|
project_name, asset_id, fields=["data.inputLinks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
input_links = asset_doc["data"].get("inputLinks")
|
||||||
|
if not input_links:
|
||||||
|
return output
|
||||||
|
|
||||||
|
for item in input_links:
|
||||||
|
# Backwards compatibility for "_id" key which was replaced with
|
||||||
|
# "id"
|
||||||
|
if "_id" in item:
|
||||||
|
link_id = item["_id"]
|
||||||
|
else:
|
||||||
|
link_id = item["id"]
|
||||||
|
output.append(link_id)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def get_linked_assets(
|
||||||
|
project_name, asset_doc=None, asset_id=None, fields=None
|
||||||
|
):
|
||||||
|
"""Return linked assets based on passed asset document.
|
||||||
|
|
||||||
|
One of asset document or asset id must be passed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Name of project where to look for queried entities.
|
||||||
|
asset_doc (Dict[str, Any]): Asset document from database.
|
||||||
|
asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
|
||||||
|
asset document.
|
||||||
|
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||||
|
returned if 'None' is passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Dict[str, Any]]: Asset documents of input links for passed
|
||||||
|
asset doc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not asset_doc:
|
||||||
|
if not asset_id:
|
||||||
|
return []
|
||||||
|
asset_doc = get_asset_by_id(
|
||||||
|
project_name,
|
||||||
|
asset_id,
|
||||||
|
fields=["data.inputLinks"]
|
||||||
|
)
|
||||||
|
if not asset_doc:
|
||||||
|
return []
|
||||||
|
|
||||||
|
link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc)
|
||||||
|
if not link_ids:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
|
||||||
|
|
||||||
|
|
||||||
|
def get_linked_representation_id(
|
||||||
|
project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
|
||||||
|
):
|
||||||
|
"""Returns list of linked ids of particular type (if provided).
|
||||||
|
|
||||||
|
One of representation document or representation id must be passed.
|
||||||
|
Note:
|
||||||
|
Representation links now works only from representation through version
|
||||||
|
back to representations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Name of project where look for links.
|
||||||
|
repre_doc (Dict[str, Any]): Representation document.
|
||||||
|
repre_id (Union[ObjectId, str]): Representation id.
|
||||||
|
link_type (str): Type of link (e.g. 'reference', ...).
|
||||||
|
max_depth (int): Limit recursion level. Default: 0
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[ObjectId] Linked representation ids.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if repre_doc:
|
||||||
|
repre_id = repre_doc["_id"]
|
||||||
|
|
||||||
|
if repre_id:
|
||||||
|
repre_id = convert_id(repre_id)
|
||||||
|
|
||||||
|
if not repre_id and not repre_doc:
|
||||||
|
return []
|
||||||
|
|
||||||
|
version_id = None
|
||||||
|
if repre_doc:
|
||||||
|
version_id = repre_doc.get("parent")
|
||||||
|
|
||||||
|
if not version_id:
|
||||||
|
repre_doc = get_representation_by_id(
|
||||||
|
project_name, repre_id, fields=["parent"]
|
||||||
|
)
|
||||||
|
version_id = repre_doc["parent"]
|
||||||
|
|
||||||
|
if not version_id:
|
||||||
|
return []
|
||||||
|
|
||||||
|
version_doc = get_version_by_id(
|
||||||
|
project_name, version_id, fields=["type", "version_id"]
|
||||||
|
)
|
||||||
|
if version_doc["type"] == "hero_version":
|
||||||
|
version_id = version_doc["version_id"]
|
||||||
|
|
||||||
|
if max_depth is None:
|
||||||
|
max_depth = 0
|
||||||
|
|
||||||
|
match = {
|
||||||
|
"_id": version_id,
|
||||||
|
# Links are not stored to hero versions at this moment so filter
|
||||||
|
# is limited to just versions
|
||||||
|
"type": "version"
|
||||||
|
}
|
||||||
|
|
||||||
|
graph_lookup = {
|
||||||
|
"from": project_name,
|
||||||
|
"startWith": "$data.inputLinks.id",
|
||||||
|
"connectFromField": "data.inputLinks.id",
|
||||||
|
"connectToField": "_id",
|
||||||
|
"as": "outputs_recursive",
|
||||||
|
"depthField": "depth"
|
||||||
|
}
|
||||||
|
if max_depth != 0:
|
||||||
|
# We offset by -1 since 0 basically means no recursion
|
||||||
|
# but the recursion only happens after the initial lookup
|
||||||
|
# for outputs.
|
||||||
|
graph_lookup["maxDepth"] = max_depth - 1
|
||||||
|
|
||||||
|
query_pipeline = [
|
||||||
|
# Match
|
||||||
|
{"$match": match},
|
||||||
|
# Recursive graph lookup for inputs
|
||||||
|
{"$graphLookup": graph_lookup}
|
||||||
|
]
|
||||||
|
|
||||||
|
conn = get_project_connection(project_name)
|
||||||
|
result = conn.aggregate(query_pipeline)
|
||||||
|
referenced_version_ids = _process_referenced_pipeline_result(
|
||||||
|
result, link_type
|
||||||
|
)
|
||||||
|
if not referenced_version_ids:
|
||||||
|
return []
|
||||||
|
|
||||||
|
ref_ids = conn.distinct(
|
||||||
|
"_id",
|
||||||
|
filter={
|
||||||
|
"parent": {"$in": list(referenced_version_ids)},
|
||||||
|
"type": "representation"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return list(ref_ids)
|
||||||
|
|
||||||
|
|
||||||
|
def _process_referenced_pipeline_result(result, link_type):
|
||||||
|
"""Filters result from pipeline for particular link_type.
|
||||||
|
|
||||||
|
Pipeline cannot use link_type directly in a query.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(list)
|
||||||
|
"""
|
||||||
|
|
||||||
|
referenced_version_ids = set()
|
||||||
|
correctly_linked_ids = set()
|
||||||
|
for item in result:
|
||||||
|
input_links = item.get("data", {}).get("inputLinks")
|
||||||
|
if not input_links:
|
||||||
|
continue
|
||||||
|
|
||||||
|
_filter_input_links(
|
||||||
|
input_links,
|
||||||
|
link_type,
|
||||||
|
correctly_linked_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
# outputs_recursive in random order, sort by depth
|
||||||
|
outputs_recursive = item.get("outputs_recursive")
|
||||||
|
if not outputs_recursive:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
|
||||||
|
output_links = output.get("data", {}).get("inputLinks")
|
||||||
|
if not output_links:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Leaf
|
||||||
|
if output["_id"] not in correctly_linked_ids:
|
||||||
|
continue
|
||||||
|
|
||||||
|
_filter_input_links(
|
||||||
|
output_links,
|
||||||
|
link_type,
|
||||||
|
correctly_linked_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
referenced_version_ids.add(output["_id"])
|
||||||
|
|
||||||
|
return referenced_version_ids
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_input_links(input_links, link_type, correctly_linked_ids):
|
||||||
|
for input_link in input_links:
|
||||||
|
if link_type and input_link["type"] != link_type:
|
||||||
|
continue
|
||||||
|
|
||||||
|
link_id = input_link.get("id") or input_link.get("_id")
|
||||||
|
if link_id is not None:
|
||||||
|
correctly_linked_ids.add(link_id)
|
||||||
|
|
@ -208,3 +208,28 @@ class OpenPypeMongoConnection:
|
||||||
mongo_url, time.time() - t1
|
mongo_url, time.time() - t1
|
||||||
))
|
))
|
||||||
return mongo_client
|
return mongo_client
|
||||||
|
|
||||||
|
|
||||||
|
def get_project_database():
|
||||||
|
db_name = os.environ.get("AVALON_DB") or "avalon"
|
||||||
|
return OpenPypeMongoConnection.get_mongo_client()[db_name]
|
||||||
|
|
||||||
|
|
||||||
|
def get_project_connection(project_name):
|
||||||
|
"""Direct access to mongo collection.
|
||||||
|
|
||||||
|
We're trying to avoid using direct access to mongo. This should be used
|
||||||
|
only for Create, Update and Remove operations until there are implemented
|
||||||
|
api calls for that.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name(str): Project name for which collection should be
|
||||||
|
returned.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
pymongo.Collection: Collection realated to passed project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not project_name:
|
||||||
|
raise ValueError("Invalid project name {}".format(str(project_name)))
|
||||||
|
return get_project_database()[project_name]
|
||||||
|
|
|
||||||
39
openpype/client/notes.md
Normal file
39
openpype/client/notes.md
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
# Client functionality
|
||||||
|
## Reason
|
||||||
|
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
|
||||||
|
|
||||||
|
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
|
||||||
|
|
||||||
|
## Queries
|
||||||
|
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data.
|
||||||
|
|
||||||
|
### Create
|
||||||
|
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
|
||||||
|
|
||||||
|
### Update
|
||||||
|
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion.
|
||||||
|
|
||||||
|
### Delete
|
||||||
|
Delete operation need entity id. Entity will be deleted from mongo.
|
||||||
|
|
||||||
|
|
||||||
|
## What (probably) won't be replaced
|
||||||
|
Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future.
|
||||||
|
- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data.
|
||||||
|
- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3.
|
||||||
|
- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure.
|
||||||
|
- Code parts that is marked as deprecated in v3 or will be deprecated in v4.
|
||||||
|
- integrate asset legacy publish plugin - already is legacy kept for safety
|
||||||
|
- integrate thumbnail - thumbnails will be stored in different way in v4
|
||||||
|
- input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation".
|
||||||
|
|
||||||
|
## Known missing replacements
|
||||||
|
- change subset group in loader tool
|
||||||
|
- integrate subset group
|
||||||
|
- query input links in openpype lib
|
||||||
|
- create project in openpype lib
|
||||||
|
- save/create workfile doc in openpype lib
|
||||||
|
- integrate hero version
|
||||||
794
openpype/client/operations.py
Normal file
794
openpype/client/operations.py
Normal file
|
|
@ -0,0 +1,794 @@
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
import copy
|
||||||
|
import collections
|
||||||
|
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||||
|
|
||||||
|
import six
|
||||||
|
from bson.objectid import ObjectId
|
||||||
|
from pymongo import DeleteOne, InsertOne, UpdateOne
|
||||||
|
|
||||||
|
from .mongo import get_project_connection
|
||||||
|
from .entities import get_project
|
||||||
|
|
||||||
|
REMOVED_VALUE = object()
|
||||||
|
|
||||||
|
PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
|
||||||
|
PROJECT_NAME_REGEX = re.compile(
|
||||||
|
"^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS)
|
||||||
|
)
|
||||||
|
|
||||||
|
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
|
||||||
|
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
||||||
|
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||||
|
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||||
|
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||||
|
CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
|
||||||
|
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||||
|
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
|
||||||
|
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def _create_or_convert_to_mongo_id(mongo_id):
|
||||||
|
if mongo_id is None:
|
||||||
|
return ObjectId()
|
||||||
|
return ObjectId(mongo_id)
|
||||||
|
|
||||||
|
|
||||||
|
def new_project_document(
|
||||||
|
project_name, project_code, config, data=None, entity_id=None
|
||||||
|
):
|
||||||
|
"""Create skeleton data of project document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Name of project. Used as identifier of a project.
|
||||||
|
project_code (str): Shorter version of projet without spaces and
|
||||||
|
special characters (in most of cases). Should be also considered
|
||||||
|
as unique name across projects.
|
||||||
|
config (Dic[str, Any]): Project config consist of roots, templates,
|
||||||
|
applications and other project Anatomy related data.
|
||||||
|
data (Dict[str, Any]): Project data with information about it's
|
||||||
|
attributes (e.g. 'fps' etc.) or integration specific keys.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of project document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
data["code"] = project_code
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"name": project_name,
|
||||||
|
"type": CURRENT_PROJECT_SCHEMA,
|
||||||
|
"entity_data": data,
|
||||||
|
"config": config
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_asset_document(
|
||||||
|
name, project_id, parent_id, parents, data=None, entity_id=None
|
||||||
|
):
|
||||||
|
"""Create skeleton data of asset document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): Is considered as unique identifier of asset in project.
|
||||||
|
project_id (Union[str, ObjectId]): Id of project doument.
|
||||||
|
parent_id (Union[str, ObjectId]): Id of parent asset.
|
||||||
|
parents (List[str]): List of parent assets names.
|
||||||
|
data (Dict[str, Any]): Asset document data. Empty dictionary is used
|
||||||
|
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of asset document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
if parent_id is not None:
|
||||||
|
parent_id = ObjectId(parent_id)
|
||||||
|
data["visualParent"] = parent_id
|
||||||
|
data["parents"] = parents
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"type": "asset",
|
||||||
|
"name": name,
|
||||||
|
"parent": ObjectId(project_id),
|
||||||
|
"data": data,
|
||||||
|
"schema": CURRENT_ASSET_DOC_SCHEMA
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
|
||||||
|
"""Create skeleton data of subset document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): Is considered as unique identifier of subset under asset.
|
||||||
|
family (str): Subset's family.
|
||||||
|
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||||
|
data (Dict[str, Any]): Subset document data. Empty dictionary is used
|
||||||
|
if not passed. Value of 'family' is used to fill 'family'.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of subset document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
data["family"] = family
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"schema": CURRENT_SUBSET_SCHEMA,
|
||||||
|
"type": "subset",
|
||||||
|
"name": name,
|
||||||
|
"data": data,
|
||||||
|
"parent": asset_id
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_version_doc(version, subset_id, data=None, entity_id=None):
|
||||||
|
"""Create skeleton data of version document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (int): Is considered as unique identifier of version
|
||||||
|
under subset.
|
||||||
|
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||||
|
data (Dict[str, Any]): Version document data.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of version document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"schema": CURRENT_VERSION_SCHEMA,
|
||||||
|
"type": "version",
|
||||||
|
"name": int(version),
|
||||||
|
"parent": subset_id,
|
||||||
|
"data": data
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None):
|
||||||
|
"""Create skeleton data of hero version document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version_id (ObjectId): Is considered as unique identifier of version
|
||||||
|
under subset.
|
||||||
|
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||||
|
data (Dict[str, Any]): Version document data.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of version document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"schema": CURRENT_HERO_VERSION_SCHEMA,
|
||||||
|
"type": "hero_version",
|
||||||
|
"version_id": version_id,
|
||||||
|
"parent": subset_id,
|
||||||
|
"data": data
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_representation_doc(
|
||||||
|
name, version_id, context, data=None, entity_id=None
|
||||||
|
):
|
||||||
|
"""Create skeleton data of asset document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (int): Is considered as unique identifier of version
|
||||||
|
under subset.
|
||||||
|
version_id (Union[str, ObjectId]): Id of parent version.
|
||||||
|
context (Dict[str, Any]): Representation context used for fill template
|
||||||
|
of to query.
|
||||||
|
data (Dict[str, Any]): Representation document data.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of version document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"schema": CURRENT_REPRESENTATION_SCHEMA,
|
||||||
|
"type": "representation",
|
||||||
|
"parent": version_id,
|
||||||
|
"name": name,
|
||||||
|
"data": data,
|
||||||
|
|
||||||
|
# Imprint shortcut to context for performance reasons.
|
||||||
|
"context": context
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_thumbnail_doc(data=None, entity_id=None):
|
||||||
|
"""Create skeleton data of thumbnail document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (Dict[str, Any]): Thumbnail document data.
|
||||||
|
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||||
|
created if not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of thumbnail document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"type": "thumbnail",
|
||||||
|
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||||
|
"data": data
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def new_workfile_info_doc(
|
||||||
|
filename, asset_id, task_name, files, data=None, entity_id=None
|
||||||
|
):
|
||||||
|
"""Create skeleton data of workfile info document.
|
||||||
|
|
||||||
|
Workfile document is at this moment used primarily for artist notes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename (str): Filename of workfile.
|
||||||
|
asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
|
||||||
|
task_name (str): Task under which was workfile created.
|
||||||
|
files (List[str]): List of rootless filepaths related to workfile.
|
||||||
|
data (Dict[str, Any]): Additional metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Skeleton of workfile info document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||||
|
"type": "workfile",
|
||||||
|
"parent": ObjectId(asset_id),
|
||||||
|
"task_name": task_name,
|
||||||
|
"filename": filename,
|
||||||
|
"data": data,
|
||||||
|
"files": files
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_update_data(old_doc, new_doc, replace):
|
||||||
|
changes = {}
|
||||||
|
for key, value in new_doc.items():
|
||||||
|
if key not in old_doc or value != old_doc[key]:
|
||||||
|
changes[key] = value
|
||||||
|
|
||||||
|
if replace:
|
||||||
|
for key in old_doc.keys():
|
||||||
|
if key not in new_doc:
|
||||||
|
changes[key] = REMOVED_VALUE
|
||||||
|
return changes
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_subset_update_data(old_doc, new_doc, replace=True):
|
||||||
|
"""Compare two subset documents and prepare update data.
|
||||||
|
|
||||||
|
Based on compared values will create update data for 'UpdateOperation'.
|
||||||
|
|
||||||
|
Empty output means that documents are identical.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Changes between old and new document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return _prepare_update_data(old_doc, new_doc, replace)
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_version_update_data(old_doc, new_doc, replace=True):
|
||||||
|
"""Compare two version documents and prepare update data.
|
||||||
|
|
||||||
|
Based on compared values will create update data for 'UpdateOperation'.
|
||||||
|
|
||||||
|
Empty output means that documents are identical.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Changes between old and new document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return _prepare_update_data(old_doc, new_doc, replace)
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
|
||||||
|
"""Compare two hero version documents and prepare update data.
|
||||||
|
|
||||||
|
Based on compared values will create update data for 'UpdateOperation'.
|
||||||
|
|
||||||
|
Empty output means that documents are identical.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Changes between old and new document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return _prepare_update_data(old_doc, new_doc, replace)
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||||
|
"""Compare two representation documents and prepare update data.
|
||||||
|
|
||||||
|
Based on compared values will create update data for 'UpdateOperation'.
|
||||||
|
|
||||||
|
Empty output means that documents are identical.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Changes between old and new document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return _prepare_update_data(old_doc, new_doc, replace)
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
|
||||||
|
"""Compare two workfile info documents and prepare update data.
|
||||||
|
|
||||||
|
Based on compared values will create update data for 'UpdateOperation'.
|
||||||
|
|
||||||
|
Empty output means that documents are identical.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Changes between old and new document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return _prepare_update_data(old_doc, new_doc, replace)
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(ABCMeta)
|
||||||
|
class AbstractOperation(object):
|
||||||
|
"""Base operation class.
|
||||||
|
|
||||||
|
Opration represent a call into database. The call can create, change or
|
||||||
|
remove data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): On which project operation will happen.
|
||||||
|
entity_type (str): Type of entity on which change happens.
|
||||||
|
e.g. 'asset', 'representation' etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, project_name, entity_type):
|
||||||
|
self._project_name = project_name
|
||||||
|
self._entity_type = entity_type
|
||||||
|
self._id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def project_name(self):
|
||||||
|
return self._project_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
"""Identifier of operation."""
|
||||||
|
|
||||||
|
return self._id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_type(self):
|
||||||
|
return self._entity_type
|
||||||
|
|
||||||
|
@abstractproperty
|
||||||
|
def operation_name(self):
|
||||||
|
"""Stringified type of operation."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def to_mongo_operation(self):
|
||||||
|
"""Convert operation to Mongo batch operation."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def to_data(self):
|
||||||
|
"""Convert opration to data that can be converted to json or others.
|
||||||
|
|
||||||
|
Warning:
|
||||||
|
Current state returns ObjectId objects which cannot be parsed by
|
||||||
|
json.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Description of operation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": self._id,
|
||||||
|
"entity_type": self.entity_type,
|
||||||
|
"project_name": self.project_name,
|
||||||
|
"operation": self.operation_name
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CreateOperation(AbstractOperation):
|
||||||
|
"""Opeartion to create an entity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): On which project operation will happen.
|
||||||
|
entity_type (str): Type of entity on which change happens.
|
||||||
|
e.g. 'asset', 'representation' etc.
|
||||||
|
data (Dict[str, Any]): Data of entity that will be created.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_name = "create"
|
||||||
|
|
||||||
|
def __init__(self, project_name, entity_type, data):
|
||||||
|
super(CreateOperation, self).__init__(project_name, entity_type)
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
data = {}
|
||||||
|
else:
|
||||||
|
data = copy.deepcopy(dict(data))
|
||||||
|
|
||||||
|
if "_id" not in data:
|
||||||
|
data["_id"] = ObjectId()
|
||||||
|
else:
|
||||||
|
data["_id"] = ObjectId(data["_id"])
|
||||||
|
|
||||||
|
self._entity_id = data["_id"]
|
||||||
|
self._data = data
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self.set_value(key, value)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self.data[key]
|
||||||
|
|
||||||
|
def set_value(self, key, value):
|
||||||
|
self.data[key] = value
|
||||||
|
|
||||||
|
def get(self, key, *args, **kwargs):
|
||||||
|
return self.data.get(key, *args, **kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_id(self):
|
||||||
|
return self._entity_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
def to_mongo_operation(self):
|
||||||
|
return InsertOne(copy.deepcopy(self._data))
|
||||||
|
|
||||||
|
def to_data(self):
|
||||||
|
output = super(CreateOperation, self).to_data()
|
||||||
|
output["data"] = copy.deepcopy(self.data)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateOperation(AbstractOperation):
|
||||||
|
"""Opeartion to update an entity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): On which project operation will happen.
|
||||||
|
entity_type (str): Type of entity on which change happens.
|
||||||
|
e.g. 'asset', 'representation' etc.
|
||||||
|
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||||
|
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||||
|
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||||
|
removed. Only first level of dictionary is checked (on purpose).
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_name = "update"
|
||||||
|
|
||||||
|
def __init__(self, project_name, entity_type, entity_id, update_data):
|
||||||
|
super(UpdateOperation, self).__init__(project_name, entity_type)
|
||||||
|
|
||||||
|
self._entity_id = ObjectId(entity_id)
|
||||||
|
self._update_data = update_data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_id(self):
|
||||||
|
return self._entity_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def update_data(self):
|
||||||
|
return self._update_data
|
||||||
|
|
||||||
|
def to_mongo_operation(self):
|
||||||
|
unset_data = {}
|
||||||
|
set_data = {}
|
||||||
|
for key, value in self._update_data.items():
|
||||||
|
if value is REMOVED_VALUE:
|
||||||
|
unset_data[key] = None
|
||||||
|
else:
|
||||||
|
set_data[key] = value
|
||||||
|
|
||||||
|
op_data = {}
|
||||||
|
if unset_data:
|
||||||
|
op_data["$unset"] = unset_data
|
||||||
|
if set_data:
|
||||||
|
op_data["$set"] = set_data
|
||||||
|
|
||||||
|
if not op_data:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return UpdateOne(
|
||||||
|
{"_id": self.entity_id},
|
||||||
|
op_data
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_data(self):
|
||||||
|
changes = {}
|
||||||
|
for key, value in self._update_data.items():
|
||||||
|
if value is REMOVED_VALUE:
|
||||||
|
value = None
|
||||||
|
changes[key] = value
|
||||||
|
|
||||||
|
output = super(UpdateOperation, self).to_data()
|
||||||
|
output.update({
|
||||||
|
"entity_id": self.entity_id,
|
||||||
|
"changes": changes
|
||||||
|
})
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteOperation(AbstractOperation):
|
||||||
|
"""Opeartion to delete an entity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): On which project operation will happen.
|
||||||
|
entity_type (str): Type of entity on which change happens.
|
||||||
|
e.g. 'asset', 'representation' etc.
|
||||||
|
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_name = "delete"
|
||||||
|
|
||||||
|
def __init__(self, project_name, entity_type, entity_id):
|
||||||
|
super(DeleteOperation, self).__init__(project_name, entity_type)
|
||||||
|
|
||||||
|
self._entity_id = ObjectId(entity_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_id(self):
|
||||||
|
return self._entity_id
|
||||||
|
|
||||||
|
def to_mongo_operation(self):
|
||||||
|
return DeleteOne({"_id": self.entity_id})
|
||||||
|
|
||||||
|
def to_data(self):
|
||||||
|
output = super(DeleteOperation, self).to_data()
|
||||||
|
output["entity_id"] = self.entity_id
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class OperationsSession(object):
|
||||||
|
"""Session storing operations that should happen in an order.
|
||||||
|
|
||||||
|
At this moment does not handle anything special can be sonsidered as
|
||||||
|
stupid list of operations that will happen after each other. If creation
|
||||||
|
of same entity is there multiple times it's handled in any way and document
|
||||||
|
values are not validated.
|
||||||
|
|
||||||
|
All operations must be related to single project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name to which are operations related.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._operations = []
|
||||||
|
|
||||||
|
def add(self, operation):
|
||||||
|
"""Add operation to be processed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation (BaseOperation): Operation that should be processed.
|
||||||
|
"""
|
||||||
|
if not isinstance(
|
||||||
|
operation,
|
||||||
|
(CreateOperation, UpdateOperation, DeleteOperation)
|
||||||
|
):
|
||||||
|
raise TypeError("Expected Operation object got {}".format(
|
||||||
|
str(type(operation))
|
||||||
|
))
|
||||||
|
|
||||||
|
self._operations.append(operation)
|
||||||
|
|
||||||
|
def append(self, operation):
|
||||||
|
"""Add operation to be processed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation (BaseOperation): Operation that should be processed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.add(operation)
|
||||||
|
|
||||||
|
def extend(self, operations):
|
||||||
|
"""Add operations to be processed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operations (List[BaseOperation]): Operations that should be
|
||||||
|
processed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for operation in operations:
|
||||||
|
self.add(operation)
|
||||||
|
|
||||||
|
def remove(self, operation):
|
||||||
|
"""Remove operation."""
|
||||||
|
|
||||||
|
self._operations.remove(operation)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Clear all registered operations."""
|
||||||
|
|
||||||
|
self._operations = []
|
||||||
|
|
||||||
|
def to_data(self):
|
||||||
|
return [
|
||||||
|
operation.to_data()
|
||||||
|
for operation in self._operations
|
||||||
|
]
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
"""Commit session operations."""
|
||||||
|
|
||||||
|
operations, self._operations = self._operations, []
|
||||||
|
if not operations:
|
||||||
|
return
|
||||||
|
|
||||||
|
operations_by_project = collections.defaultdict(list)
|
||||||
|
for operation in operations:
|
||||||
|
operations_by_project[operation.project_name].append(operation)
|
||||||
|
|
||||||
|
for project_name, operations in operations_by_project.items():
|
||||||
|
bulk_writes = []
|
||||||
|
for operation in operations:
|
||||||
|
mongo_op = operation.to_mongo_operation()
|
||||||
|
if mongo_op is not None:
|
||||||
|
bulk_writes.append(mongo_op)
|
||||||
|
|
||||||
|
if bulk_writes:
|
||||||
|
collection = get_project_connection(project_name)
|
||||||
|
collection.bulk_write(bulk_writes)
|
||||||
|
|
||||||
|
def create_entity(self, project_name, entity_type, data):
|
||||||
|
"""Fast access to 'CreateOperation'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CreateOperation: Object of update operation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation = CreateOperation(project_name, entity_type, data)
|
||||||
|
self.add(operation)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
def update_entity(self, project_name, entity_type, entity_id, update_data):
|
||||||
|
"""Fast access to 'UpdateOperation'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
UpdateOperation: Object of update operation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation = UpdateOperation(
|
||||||
|
project_name, entity_type, entity_id, update_data
|
||||||
|
)
|
||||||
|
self.add(operation)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
def delete_entity(self, project_name, entity_type, entity_id):
|
||||||
|
"""Fast access to 'DeleteOperation'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DeleteOperation: Object of delete operation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation = DeleteOperation(project_name, entity_type, entity_id)
|
||||||
|
self.add(operation)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
|
||||||
|
def create_project(project_name, project_code, library_project=False):
|
||||||
|
"""Create project using OpenPype settings.
|
||||||
|
|
||||||
|
This project creation function is not validating project document on
|
||||||
|
creation. It is because project document is created blindly with only
|
||||||
|
minimum required information about project which is it's name, code, type
|
||||||
|
and schema.
|
||||||
|
|
||||||
|
Entered project name must be unique and project must not exist yet.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This function is here to be OP v4 ready but in v3 has more logic
|
||||||
|
to do. That's why inner imports are in the body.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name(str): New project name. Should be unique.
|
||||||
|
project_code(str): Project's code should be unique too.
|
||||||
|
library_project(bool): Project is library project.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: When project name already exists in MongoDB.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Created project document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from openpype.settings import ProjectSettings, SaveWarningExc
|
||||||
|
from openpype.pipeline.schema import validate
|
||||||
|
|
||||||
|
if get_project(project_name, fields=["name"]):
|
||||||
|
raise ValueError("Project with name \"{}\" already exists".format(
|
||||||
|
project_name
|
||||||
|
))
|
||||||
|
|
||||||
|
if not PROJECT_NAME_REGEX.match(project_name):
|
||||||
|
raise ValueError((
|
||||||
|
"Project name \"{}\" contain invalid characters"
|
||||||
|
).format(project_name))
|
||||||
|
|
||||||
|
project_doc = {
|
||||||
|
"type": "project",
|
||||||
|
"name": project_name,
|
||||||
|
"data": {
|
||||||
|
"code": project_code,
|
||||||
|
"library_project": library_project
|
||||||
|
},
|
||||||
|
"schema": CURRENT_PROJECT_SCHEMA
|
||||||
|
}
|
||||||
|
|
||||||
|
op_session = OperationsSession()
|
||||||
|
# Insert document with basic data
|
||||||
|
create_op = op_session.create_entity(
|
||||||
|
project_name, project_doc["type"], project_doc
|
||||||
|
)
|
||||||
|
op_session.commit()
|
||||||
|
|
||||||
|
# Load ProjectSettings for the project and save it to store all attributes
|
||||||
|
# and Anatomy
|
||||||
|
try:
|
||||||
|
project_settings_entity = ProjectSettings(project_name)
|
||||||
|
project_settings_entity.save()
|
||||||
|
except SaveWarningExc as exc:
|
||||||
|
print(str(exc))
|
||||||
|
except Exception:
|
||||||
|
op_session.delete_entity(
|
||||||
|
project_name, project_doc["type"], create_op.entity_id
|
||||||
|
)
|
||||||
|
op_session.commit()
|
||||||
|
raise
|
||||||
|
|
||||||
|
project_doc = get_project(project_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Validate created project document
|
||||||
|
validate(project_doc)
|
||||||
|
except Exception:
|
||||||
|
# Remove project if is not valid
|
||||||
|
op_session.delete_entity(
|
||||||
|
project_name, project_doc["type"], create_op.entity_id
|
||||||
|
)
|
||||||
|
op_session.commit()
|
||||||
|
raise
|
||||||
|
|
||||||
|
return project_doc
|
||||||
|
|
@ -19,6 +19,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||||
"hiero",
|
"hiero",
|
||||||
"houdini",
|
"houdini",
|
||||||
"nukestudio",
|
"nukestudio",
|
||||||
|
"fusion",
|
||||||
"blender",
|
"blender",
|
||||||
"photoshop",
|
"photoshop",
|
||||||
"tvpaint",
|
"tvpaint",
|
||||||
|
|
|
||||||
177
openpype/hooks/pre_copy_last_published_workfile.py
Normal file
177
openpype/hooks/pre_copy_last_published_workfile.py
Normal file
|
|
@ -0,0 +1,177 @@
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from time import sleep
|
||||||
|
from openpype.client.entities import (
|
||||||
|
get_last_version_by_subset_id,
|
||||||
|
get_representations,
|
||||||
|
get_subsets,
|
||||||
|
)
|
||||||
|
from openpype.lib import PreLaunchHook
|
||||||
|
from openpype.lib.local_settings import get_local_site_id
|
||||||
|
from openpype.lib.profiles_filtering import filter_profiles
|
||||||
|
from openpype.pipeline.load.utils import get_representation_path
|
||||||
|
from openpype.settings.lib import get_project_settings
|
||||||
|
|
||||||
|
|
||||||
|
class CopyLastPublishedWorkfile(PreLaunchHook):
|
||||||
|
"""Copy last published workfile as first workfile.
|
||||||
|
|
||||||
|
Prelaunch hook works only if last workfile leads to not existing file.
|
||||||
|
- That is possible only if it's first version.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Before `AddLastWorkfileToLaunchArgs`
|
||||||
|
order = -1
|
||||||
|
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
"""Check if local workfile doesn't exist, else copy it.
|
||||||
|
|
||||||
|
1- Check if setting for this feature is enabled
|
||||||
|
2- Check if workfile in work area doesn't exist
|
||||||
|
3- Check if published workfile exists and is copied locally in publish
|
||||||
|
4- Substitute copied published workfile as first workfile
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None: This is a void method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
sync_server = self.modules_manager.get("sync_server")
|
||||||
|
if not sync_server or not sync_server.enabled:
|
||||||
|
self.log.debug("Sync server module is not enabled or available")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check there is no workfile available
|
||||||
|
last_workfile = self.data.get("last_workfile_path")
|
||||||
|
if os.path.exists(last_workfile):
|
||||||
|
self.log.debug(
|
||||||
|
"Last workfile exists. Skipping {} process.".format(
|
||||||
|
self.__class__.__name__
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get data
|
||||||
|
project_name = self.data["project_name"]
|
||||||
|
task_name = self.data["task_name"]
|
||||||
|
task_type = self.data["task_type"]
|
||||||
|
host_name = self.application.host_name
|
||||||
|
|
||||||
|
# Check settings has enabled it
|
||||||
|
project_settings = get_project_settings(project_name)
|
||||||
|
profiles = project_settings["global"]["tools"]["Workfiles"][
|
||||||
|
"last_workfile_on_startup"
|
||||||
|
]
|
||||||
|
filter_data = {
|
||||||
|
"tasks": task_name,
|
||||||
|
"task_types": task_type,
|
||||||
|
"hosts": host_name,
|
||||||
|
}
|
||||||
|
last_workfile_settings = filter_profiles(profiles, filter_data)
|
||||||
|
use_last_published_workfile = last_workfile_settings.get(
|
||||||
|
"use_last_published_workfile"
|
||||||
|
)
|
||||||
|
if use_last_published_workfile is None:
|
||||||
|
self.log.info(
|
||||||
|
(
|
||||||
|
"Seems like old version of settings is used."
|
||||||
|
' Can\'t access custom templates in host "{}".'.format(
|
||||||
|
host_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
elif use_last_published_workfile is False:
|
||||||
|
self.log.info(
|
||||||
|
(
|
||||||
|
'Project "{}" has turned off to use last published'
|
||||||
|
' workfile as first workfile for host "{}"'.format(
|
||||||
|
project_name, host_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info("Trying to fetch last published workfile...")
|
||||||
|
|
||||||
|
project_doc = self.data.get("project_doc")
|
||||||
|
asset_doc = self.data.get("asset_doc")
|
||||||
|
anatomy = self.data.get("anatomy")
|
||||||
|
|
||||||
|
# Check it can proceed
|
||||||
|
if not project_doc and not asset_doc:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get subset id
|
||||||
|
subset_id = next(
|
||||||
|
(
|
||||||
|
subset["_id"]
|
||||||
|
for subset in get_subsets(
|
||||||
|
project_name,
|
||||||
|
asset_ids=[asset_doc["_id"]],
|
||||||
|
fields=["_id", "data.family", "data.families"],
|
||||||
|
)
|
||||||
|
if subset["data"].get("family") == "workfile"
|
||||||
|
# Legacy compatibility
|
||||||
|
or "workfile" in subset["data"].get("families", {})
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if not subset_id:
|
||||||
|
self.log.debug(
|
||||||
|
'No any workfile for asset "{}".'.format(asset_doc["name"])
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get workfile representation
|
||||||
|
last_version_doc = get_last_version_by_subset_id(
|
||||||
|
project_name, subset_id, fields=["_id"]
|
||||||
|
)
|
||||||
|
if not last_version_doc:
|
||||||
|
self.log.debug("Subset does not have any versions")
|
||||||
|
return
|
||||||
|
|
||||||
|
workfile_representation = next(
|
||||||
|
(
|
||||||
|
representation
|
||||||
|
for representation in get_representations(
|
||||||
|
project_name, version_ids=[last_version_doc["_id"]]
|
||||||
|
)
|
||||||
|
if representation["context"]["task"]["name"] == task_name
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not workfile_representation:
|
||||||
|
self.log.debug(
|
||||||
|
'No published workfile for task "{}" and host "{}".'.format(
|
||||||
|
task_name, host_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
local_site_id = get_local_site_id()
|
||||||
|
sync_server.add_site(
|
||||||
|
project_name,
|
||||||
|
workfile_representation["_id"],
|
||||||
|
local_site_id,
|
||||||
|
force=True,
|
||||||
|
priority=99,
|
||||||
|
reset_timer=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
while not sync_server.is_representation_on_site(
|
||||||
|
project_name, workfile_representation["_id"], local_site_id
|
||||||
|
):
|
||||||
|
sleep(5)
|
||||||
|
|
||||||
|
# Get paths
|
||||||
|
published_workfile_path = get_representation_path(
|
||||||
|
workfile_representation, root=anatomy.roots
|
||||||
|
)
|
||||||
|
local_workfile_dir = os.path.dirname(last_workfile)
|
||||||
|
|
||||||
|
# Copy file and substitute path
|
||||||
|
self.data["last_workfile_path"] = shutil.copy(
|
||||||
|
published_workfile_path, local_workfile_dir
|
||||||
|
)
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from openpype.lib import (
|
from openpype.lib import PreLaunchHook
|
||||||
PreLaunchHook,
|
from openpype.settings import get_project_settings
|
||||||
get_custom_workfile_template_by_context,
|
from openpype.pipeline.workfile import (
|
||||||
|
get_custom_workfile_template,
|
||||||
get_custom_workfile_template_by_string_context
|
get_custom_workfile_template_by_string_context
|
||||||
)
|
)
|
||||||
from openpype.settings import get_project_settings
|
|
||||||
|
|
||||||
|
|
||||||
class CopyTemplateWorkfile(PreLaunchHook):
|
class CopyTemplateWorkfile(PreLaunchHook):
|
||||||
|
|
@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
||||||
project_name = self.data["project_name"]
|
project_name = self.data["project_name"]
|
||||||
asset_name = self.data["asset_name"]
|
asset_name = self.data["asset_name"]
|
||||||
task_name = self.data["task_name"]
|
task_name = self.data["task_name"]
|
||||||
|
host_name = self.application.host_name
|
||||||
|
|
||||||
project_settings = get_project_settings(project_name)
|
project_settings = get_project_settings(project_name)
|
||||||
host_settings = project_settings[self.application.host_name]
|
|
||||||
|
|
||||||
workfile_builder_settings = host_settings.get("workfile_builder")
|
|
||||||
if not workfile_builder_settings:
|
|
||||||
# TODO remove warning when deprecated
|
|
||||||
self.log.warning((
|
|
||||||
"Seems like old version of settings is used."
|
|
||||||
" Can't access custom templates in host \"{}\"."
|
|
||||||
).format(self.application.full_label))
|
|
||||||
return
|
|
||||||
|
|
||||||
if not workfile_builder_settings["create_first_version"]:
|
|
||||||
self.log.info((
|
|
||||||
"Project \"{}\" has turned off to create first workfile for"
|
|
||||||
" application \"{}\""
|
|
||||||
).format(project_name, self.application.full_label))
|
|
||||||
return
|
|
||||||
|
|
||||||
# Backwards compatibility
|
|
||||||
template_profiles = workfile_builder_settings.get("custom_templates")
|
|
||||||
if not template_profiles:
|
|
||||||
self.log.info(
|
|
||||||
"Custom templates are not filled. Skipping template copy."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
project_doc = self.data.get("project_doc")
|
project_doc = self.data.get("project_doc")
|
||||||
asset_doc = self.data.get("asset_doc")
|
asset_doc = self.data.get("asset_doc")
|
||||||
anatomy = self.data.get("anatomy")
|
anatomy = self.data.get("anatomy")
|
||||||
if project_doc and asset_doc:
|
if project_doc and asset_doc:
|
||||||
self.log.debug("Started filtering of custom template paths.")
|
self.log.debug("Started filtering of custom template paths.")
|
||||||
template_path = get_custom_workfile_template_by_context(
|
template_path = get_custom_workfile_template(
|
||||||
template_profiles, project_doc, asset_doc, task_name, anatomy
|
project_doc,
|
||||||
|
asset_doc,
|
||||||
|
task_name,
|
||||||
|
host_name,
|
||||||
|
anatomy,
|
||||||
|
project_settings
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
||||||
"Global data collection probably did not execute."
|
"Global data collection probably did not execute."
|
||||||
" Using backup solution."
|
" Using backup solution."
|
||||||
))
|
))
|
||||||
dbcon = self.data.get("dbcon")
|
|
||||||
template_path = get_custom_workfile_template_by_string_context(
|
template_path = get_custom_workfile_template_by_string_context(
|
||||||
template_profiles, project_name, asset_name, task_name,
|
project_name,
|
||||||
dbcon, anatomy
|
asset_name,
|
||||||
|
task_name,
|
||||||
|
host_name,
|
||||||
|
anatomy,
|
||||||
|
project_settings
|
||||||
)
|
)
|
||||||
|
|
||||||
if not template_path:
|
if not template_path:
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
import os
|
import os
|
||||||
from openpype.lib import (
|
from openpype.lib import PreLaunchHook
|
||||||
PreLaunchHook,
|
from openpype.pipeline.workfile import create_workdir_extra_folders
|
||||||
create_workdir_extra_folders
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,24 @@
|
||||||
from .host import (
|
from .host import (
|
||||||
HostBase,
|
HostBase,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .interfaces import (
|
||||||
IWorkfileHost,
|
IWorkfileHost,
|
||||||
ILoadHost,
|
ILoadHost,
|
||||||
|
IPublishHost,
|
||||||
INewPublisher,
|
INewPublisher,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .dirmap import HostDirmap
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"HostBase",
|
"HostBase",
|
||||||
|
|
||||||
"IWorkfileHost",
|
"IWorkfileHost",
|
||||||
"ILoadHost",
|
"ILoadHost",
|
||||||
|
"IPublishHost",
|
||||||
"INewPublisher",
|
"INewPublisher",
|
||||||
|
|
||||||
|
"HostDirmap",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
205
openpype/host/dirmap.py
Normal file
205
openpype/host/dirmap.py
Normal file
|
|
@ -0,0 +1,205 @@
|
||||||
|
"""Dirmap functionality used in host integrations inside DCCs.
|
||||||
|
|
||||||
|
Idea for current dirmap implementation was used from Maya where is possible to
|
||||||
|
enter source and destination roots and maya will try each found source
|
||||||
|
in referenced file replace with each destionation paths. First path which
|
||||||
|
exists is used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from openpype.lib import Logger
|
||||||
|
from openpype.modules import ModulesManager
|
||||||
|
from openpype.settings import get_project_settings
|
||||||
|
from openpype.settings.lib import get_site_local_overrides
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(ABCMeta)
|
||||||
|
class HostDirmap(object):
|
||||||
|
"""Abstract class for running dirmap on a workfile in a host.
|
||||||
|
|
||||||
|
Dirmap is used to translate paths inside of host workfile from one
|
||||||
|
OS to another. (Eg. arstist created workfile on Win, different artists
|
||||||
|
opens same file on Linux.)
|
||||||
|
|
||||||
|
Expects methods to be implemented inside of host:
|
||||||
|
on_dirmap_enabled: run host code for enabling dirmap
|
||||||
|
do_dirmap: run host code to do actual remapping
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, host_name, project_name, project_settings=None, sync_module=None
|
||||||
|
):
|
||||||
|
self.host_name = host_name
|
||||||
|
self.project_name = project_name
|
||||||
|
self._project_settings = project_settings
|
||||||
|
self._sync_module = sync_module # to limit reinit of Modules
|
||||||
|
self._log = None
|
||||||
|
self._mapping = None # cache mapping
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sync_module(self):
|
||||||
|
if self._sync_module is None:
|
||||||
|
manager = ModulesManager()
|
||||||
|
self._sync_module = manager["sync_server"]
|
||||||
|
return self._sync_module
|
||||||
|
|
||||||
|
@property
|
||||||
|
def project_settings(self):
|
||||||
|
if self._project_settings is None:
|
||||||
|
self._project_settings = get_project_settings(self.project_name)
|
||||||
|
return self._project_settings
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log(self):
|
||||||
|
if self._log is None:
|
||||||
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
|
return self._log
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def on_enable_dirmap(self):
|
||||||
|
"""Run host dependent operation for enabling dirmap if necessary."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def dirmap_routine(self, source_path, destination_path):
|
||||||
|
"""Run host dependent remapping from source_path to destination_path"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def process_dirmap(self):
|
||||||
|
# type: (dict) -> None
|
||||||
|
"""Go through all paths in Settings and set them using `dirmap`.
|
||||||
|
|
||||||
|
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||||
|
Local Settings when artist is syncing workfile locally.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_settings (dict): Settings for current project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self._mapping:
|
||||||
|
self._mapping = self.get_mappings(self.project_settings)
|
||||||
|
if not self._mapping:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info("Processing directory mapping ...")
|
||||||
|
self.on_enable_dirmap()
|
||||||
|
self.log.info("mapping:: {}".format(self._mapping))
|
||||||
|
|
||||||
|
for k, sp in enumerate(self._mapping["source-path"]):
|
||||||
|
dst = self._mapping["destination-path"][k]
|
||||||
|
try:
|
||||||
|
print("{} -> {}".format(sp, dst))
|
||||||
|
self.dirmap_routine(sp, dst)
|
||||||
|
except IndexError:
|
||||||
|
# missing corresponding destination path
|
||||||
|
self.log.error((
|
||||||
|
"invalid dirmap mapping, missing corresponding"
|
||||||
|
" destination directory."
|
||||||
|
))
|
||||||
|
break
|
||||||
|
except RuntimeError:
|
||||||
|
self.log.error(
|
||||||
|
"invalid path {} -> {}, mapping not registered".format(
|
||||||
|
sp, dst
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
def get_mappings(self, project_settings):
|
||||||
|
"""Get translation from source-path to destination-path.
|
||||||
|
|
||||||
|
It checks if Site Sync is enabled and user chose to use local
|
||||||
|
site, in that case configuration in Local Settings takes precedence
|
||||||
|
"""
|
||||||
|
|
||||||
|
local_mapping = self._get_local_sync_dirmap(project_settings)
|
||||||
|
dirmap_label = "{}-dirmap".format(self.host_name)
|
||||||
|
if (
|
||||||
|
not self.project_settings[self.host_name].get(dirmap_label)
|
||||||
|
and not local_mapping
|
||||||
|
):
|
||||||
|
return {}
|
||||||
|
mapping_settings = self.project_settings[self.host_name][dirmap_label]
|
||||||
|
mapping_enabled = mapping_settings["enabled"] or bool(local_mapping)
|
||||||
|
if not mapping_enabled:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
mapping = (
|
||||||
|
local_mapping
|
||||||
|
or mapping_settings["paths"]
|
||||||
|
or {}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not mapping
|
||||||
|
or not mapping.get("destination-path")
|
||||||
|
or not mapping.get("source-path")
|
||||||
|
):
|
||||||
|
return {}
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
def _get_local_sync_dirmap(self, project_settings):
|
||||||
|
"""
|
||||||
|
Returns dirmap if synch to local project is enabled.
|
||||||
|
|
||||||
|
Only valid mapping is from roots of remote site to local site set
|
||||||
|
in Local Settings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_settings (dict)
|
||||||
|
Returns:
|
||||||
|
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapping = {}
|
||||||
|
|
||||||
|
if not project_settings["global"]["sync_server"]["enabled"]:
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
project_name = os.getenv("AVALON_PROJECT")
|
||||||
|
|
||||||
|
active_site = self.sync_module.get_local_normalized_site(
|
||||||
|
self.sync_module.get_active_site(project_name))
|
||||||
|
remote_site = self.sync_module.get_local_normalized_site(
|
||||||
|
self.sync_module.get_remote_site(project_name))
|
||||||
|
self.log.debug(
|
||||||
|
"active {} - remote {}".format(active_site, remote_site)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
active_site == "local"
|
||||||
|
and project_name in self.sync_module.get_enabled_projects()
|
||||||
|
and active_site != remote_site
|
||||||
|
):
|
||||||
|
sync_settings = self.sync_module.get_sync_project_setting(
|
||||||
|
project_name,
|
||||||
|
exclude_locals=False,
|
||||||
|
cached=False)
|
||||||
|
|
||||||
|
active_overrides = get_site_local_overrides(
|
||||||
|
project_name, active_site)
|
||||||
|
remote_overrides = get_site_local_overrides(
|
||||||
|
project_name, remote_site)
|
||||||
|
|
||||||
|
self.log.debug("local overrides {}".format(active_overrides))
|
||||||
|
self.log.debug("remote overrides {}".format(remote_overrides))
|
||||||
|
for root_name, active_site_dir in active_overrides.items():
|
||||||
|
remote_site_dir = (
|
||||||
|
remote_overrides.get(root_name)
|
||||||
|
or sync_settings["sites"][remote_site]["root"][root_name]
|
||||||
|
)
|
||||||
|
if os.path.isdir(active_site_dir):
|
||||||
|
if "destination-path" not in mapping:
|
||||||
|
mapping["destination-path"] = []
|
||||||
|
mapping["destination-path"].append(active_site_dir)
|
||||||
|
|
||||||
|
if "source-path" not in mapping:
|
||||||
|
mapping["source-path"] = []
|
||||||
|
mapping["source-path"].append(remote_site_dir)
|
||||||
|
|
||||||
|
self.log.debug("local sync mapping:: {}".format(mapping))
|
||||||
|
return mapping
|
||||||
|
|
@ -1,30 +1,12 @@
|
||||||
import logging
|
import logging
|
||||||
import contextlib
|
import contextlib
|
||||||
from abc import ABCMeta, abstractproperty, abstractmethod
|
from abc import ABCMeta, abstractproperty
|
||||||
import six
|
import six
|
||||||
|
|
||||||
# NOTE can't import 'typing' because of issues in Maya 2020
|
# NOTE can't import 'typing' because of issues in Maya 2020
|
||||||
# - shiboken crashes on 'typing' module import
|
# - shiboken crashes on 'typing' module import
|
||||||
|
|
||||||
|
|
||||||
class MissingMethodsError(ValueError):
|
|
||||||
"""Exception when host miss some required methods for specific workflow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
host (HostBase): Host implementation where are missing methods.
|
|
||||||
missing_methods (list[str]): List of missing methods.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, host, missing_methods):
|
|
||||||
joined_missing = ", ".join(
|
|
||||||
['"{}"'.format(item) for item in missing_methods]
|
|
||||||
)
|
|
||||||
message = (
|
|
||||||
"Host \"{}\" miss methods {}".format(host.name, joined_missing)
|
|
||||||
)
|
|
||||||
super(MissingMethodsError, self).__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(ABCMeta)
|
@six.add_metaclass(ABCMeta)
|
||||||
class HostBase(object):
|
class HostBase(object):
|
||||||
"""Base of host implementation class.
|
"""Base of host implementation class.
|
||||||
|
|
@ -178,347 +160,3 @@ class HostBase(object):
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ILoadHost:
|
|
||||||
"""Implementation requirements to be able use reference of representations.
|
|
||||||
|
|
||||||
The load plugins can do referencing even without implementation of methods
|
|
||||||
here, but switch and removement of containers would not be possible.
|
|
||||||
|
|
||||||
Questions:
|
|
||||||
- Is list container dependency of host or load plugins?
|
|
||||||
- Should this be directly in HostBase?
|
|
||||||
- how to find out if referencing is available?
|
|
||||||
- do we need to know that?
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_missing_load_methods(host):
|
|
||||||
"""Look for missing methods on "old type" host implementation.
|
|
||||||
|
|
||||||
Method is used for validation of implemented functions related to
|
|
||||||
loading. Checks only existence of methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host where to look for
|
|
||||||
required methods.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: Missing method implementations for loading workflow.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(host, ILoadHost):
|
|
||||||
return []
|
|
||||||
|
|
||||||
required = ["ls"]
|
|
||||||
missing = []
|
|
||||||
for name in required:
|
|
||||||
if not hasattr(host, name):
|
|
||||||
missing.append(name)
|
|
||||||
return missing
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_load_methods(host):
|
|
||||||
"""Validate implemented methods of "old type" host for load workflow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host to validate.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MissingMethodsError: If there are missing methods on host
|
|
||||||
implementation.
|
|
||||||
"""
|
|
||||||
missing = ILoadHost.get_missing_load_methods(host)
|
|
||||||
if missing:
|
|
||||||
raise MissingMethodsError(host, missing)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_containers(self):
|
|
||||||
"""Retreive referenced containers from scene.
|
|
||||||
|
|
||||||
This can be implemented in hosts where referencing can be used.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Rename function to something more self explanatory.
|
|
||||||
Suggestion: 'get_containers'
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[dict]: Information about loaded containers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
# --- Deprecated method names ---
|
|
||||||
def ls(self):
|
|
||||||
"""Deprecated variant of 'get_containers'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.get_containers()
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(ABCMeta)
|
|
||||||
class IWorkfileHost:
|
|
||||||
"""Implementation requirements to be able use workfile utils and tool."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_missing_workfile_methods(host):
|
|
||||||
"""Look for missing methods on "old type" host implementation.
|
|
||||||
|
|
||||||
Method is used for validation of implemented functions related to
|
|
||||||
workfiles. Checks only existence of methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host where to look for
|
|
||||||
required methods.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: Missing method implementations for workfiles workflow.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return []
|
|
||||||
|
|
||||||
required = [
|
|
||||||
"open_file",
|
|
||||||
"save_file",
|
|
||||||
"current_file",
|
|
||||||
"has_unsaved_changes",
|
|
||||||
"file_extensions",
|
|
||||||
"work_root",
|
|
||||||
]
|
|
||||||
missing = []
|
|
||||||
for name in required:
|
|
||||||
if not hasattr(host, name):
|
|
||||||
missing.append(name)
|
|
||||||
return missing
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_workfile_methods(host):
|
|
||||||
"""Validate methods of "old type" host for workfiles workflow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host to validate.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MissingMethodsError: If there are missing methods on host
|
|
||||||
implementation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
missing = IWorkfileHost.get_missing_workfile_methods(host)
|
|
||||||
if missing:
|
|
||||||
raise MissingMethodsError(host, missing)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_workfile_extensions(self):
|
|
||||||
"""Extensions that can be used as save.
|
|
||||||
|
|
||||||
Questions:
|
|
||||||
This could potentially use 'HostDefinition'.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def save_workfile(self, dst_path=None):
|
|
||||||
"""Save currently opened scene.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dst_path (str): Where the current scene should be saved. Or use
|
|
||||||
current path if 'None' is passed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def open_workfile(self, filepath):
|
|
||||||
"""Open passed filepath in the host.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filepath (str): Path to workfile.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_current_workfile(self):
|
|
||||||
"""Retreive path to current opened file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Path to file which is currently opened.
|
|
||||||
None: If nothing is opened.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def workfile_has_unsaved_changes(self):
|
|
||||||
"""Currently opened scene is saved.
|
|
||||||
|
|
||||||
Not all hosts can know if current scene is saved because the API of
|
|
||||||
DCC does not support it.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if scene is saved and False if has unsaved
|
|
||||||
modifications.
|
|
||||||
None: Can't tell if workfiles has modifications.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def work_root(self, session):
|
|
||||||
"""Modify workdir per host.
|
|
||||||
|
|
||||||
Default implementation keeps workdir untouched.
|
|
||||||
|
|
||||||
Warnings:
|
|
||||||
We must handle this modification with more sofisticated way because
|
|
||||||
this can't be called out of DCC so opening of last workfile
|
|
||||||
(calculated before DCC is launched) is complicated. Also breaking
|
|
||||||
defined work template is not a good idea.
|
|
||||||
Only place where it's really used and can make sense is Maya. There
|
|
||||||
workspace.mel can modify subfolders where to look for maya files.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session (dict): Session context data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Path to new workdir.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return session["AVALON_WORKDIR"]
|
|
||||||
|
|
||||||
# --- Deprecated method names ---
|
|
||||||
def file_extensions(self):
|
|
||||||
"""Deprecated variant of 'get_workfile_extensions'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
return self.get_workfile_extensions()
|
|
||||||
|
|
||||||
def save_file(self, dst_path=None):
|
|
||||||
"""Deprecated variant of 'save_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.save_workfile()
|
|
||||||
|
|
||||||
def open_file(self, filepath):
|
|
||||||
"""Deprecated variant of 'open_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.open_workfile(filepath)
|
|
||||||
|
|
||||||
def current_file(self):
|
|
||||||
"""Deprecated variant of 'get_current_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.get_current_workfile()
|
|
||||||
|
|
||||||
def has_unsaved_changes(self):
|
|
||||||
"""Deprecated variant of 'workfile_has_unsaved_changes'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.workfile_has_unsaved_changes()
|
|
||||||
|
|
||||||
|
|
||||||
class INewPublisher:
|
|
||||||
"""Functions related to new creation system in new publisher.
|
|
||||||
|
|
||||||
New publisher is not storing information only about each created instance
|
|
||||||
but also some global data. At this moment are data related only to context
|
|
||||||
publish plugins but that can extend in future.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_missing_publish_methods(host):
|
|
||||||
"""Look for missing methods on "old type" host implementation.
|
|
||||||
|
|
||||||
Method is used for validation of implemented functions related to
|
|
||||||
new publish creation. Checks only existence of methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Host module where to look for
|
|
||||||
required methods.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: Missing method implementations for new publsher
|
|
||||||
workflow.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(host, INewPublisher):
|
|
||||||
return []
|
|
||||||
|
|
||||||
required = [
|
|
||||||
"get_context_data",
|
|
||||||
"update_context_data",
|
|
||||||
]
|
|
||||||
missing = []
|
|
||||||
for name in required:
|
|
||||||
if not hasattr(host, name):
|
|
||||||
missing.append(name)
|
|
||||||
return missing
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_publish_methods(host):
|
|
||||||
"""Validate implemented methods of "old type" host.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Host module to validate.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MissingMethodsError: If there are missing methods on host
|
|
||||||
implementation.
|
|
||||||
"""
|
|
||||||
missing = INewPublisher.get_missing_publish_methods(host)
|
|
||||||
if missing:
|
|
||||||
raise MissingMethodsError(host, missing)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_context_data(self):
|
|
||||||
"""Get global data related to creation-publishing from workfile.
|
|
||||||
|
|
||||||
These data are not related to any created instance but to whole
|
|
||||||
publishing context. Not saving/returning them will cause that each
|
|
||||||
reset of publishing resets all values to default ones.
|
|
||||||
|
|
||||||
Context data can contain information about enabled/disabled publish
|
|
||||||
plugins or other values that can be filled by artist.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Context data stored using 'update_context_data'.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def update_context_data(self, data, changes):
|
|
||||||
"""Store global context data to workfile.
|
|
||||||
|
|
||||||
Called when some values in context data has changed.
|
|
||||||
|
|
||||||
Without storing the values in a way that 'get_context_data' would
|
|
||||||
return them will each reset of publishing cause loose of filled values
|
|
||||||
by artist. Best practice is to store values into workfile, if possible.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict): New data as are.
|
|
||||||
changes (dict): Only data that has been changed. Each value has
|
|
||||||
tuple with '(<old>, <new>)' value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
|
||||||
386
openpype/host/interfaces.py
Normal file
386
openpype/host/interfaces.py
Normal file
|
|
@ -0,0 +1,386 @@
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
class MissingMethodsError(ValueError):
|
||||||
|
"""Exception when host miss some required methods for specific workflow.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host (HostBase): Host implementation where are missing methods.
|
||||||
|
missing_methods (list[str]): List of missing methods.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, host, missing_methods):
|
||||||
|
joined_missing = ", ".join(
|
||||||
|
['"{}"'.format(item) for item in missing_methods]
|
||||||
|
)
|
||||||
|
host_name = getattr(host, "name", None)
|
||||||
|
if not host_name:
|
||||||
|
try:
|
||||||
|
host_name = host.__file__.replace("\\", "/").split("/")[-3]
|
||||||
|
except Exception:
|
||||||
|
host_name = str(host)
|
||||||
|
message = (
|
||||||
|
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
|
||||||
|
)
|
||||||
|
super(MissingMethodsError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class ILoadHost:
|
||||||
|
"""Implementation requirements to be able use reference of representations.
|
||||||
|
|
||||||
|
The load plugins can do referencing even without implementation of methods
|
||||||
|
here, but switch and removement of containers would not be possible.
|
||||||
|
|
||||||
|
Questions:
|
||||||
|
- Is list container dependency of host or load plugins?
|
||||||
|
- Should this be directly in HostBase?
|
||||||
|
- how to find out if referencing is available?
|
||||||
|
- do we need to know that?
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_missing_load_methods(host):
|
||||||
|
"""Look for missing methods on "old type" host implementation.
|
||||||
|
|
||||||
|
Method is used for validation of implemented functions related to
|
||||||
|
loading. Checks only existence of methods.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Object of host where to look for
|
||||||
|
required methods.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: Missing method implementations for loading workflow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(host, ILoadHost):
|
||||||
|
return []
|
||||||
|
|
||||||
|
required = ["ls"]
|
||||||
|
missing = []
|
||||||
|
for name in required:
|
||||||
|
if not hasattr(host, name):
|
||||||
|
missing.append(name)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_load_methods(host):
|
||||||
|
"""Validate implemented methods of "old type" host for load workflow.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Object of host to validate.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
MissingMethodsError: If there are missing methods on host
|
||||||
|
implementation.
|
||||||
|
"""
|
||||||
|
missing = ILoadHost.get_missing_load_methods(host)
|
||||||
|
if missing:
|
||||||
|
raise MissingMethodsError(host, missing)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_containers(self):
|
||||||
|
"""Retreive referenced containers from scene.
|
||||||
|
|
||||||
|
This can be implemented in hosts where referencing can be used.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Rename function to something more self explanatory.
|
||||||
|
Suggestion: 'get_containers'
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict]: Information about loaded containers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
# --- Deprecated method names ---
|
||||||
|
def ls(self):
|
||||||
|
"""Deprecated variant of 'get_containers'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.get_containers()
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(ABCMeta)
|
||||||
|
class IWorkfileHost:
|
||||||
|
"""Implementation requirements to be able use workfile utils and tool."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_missing_workfile_methods(host):
|
||||||
|
"""Look for missing methods on "old type" host implementation.
|
||||||
|
|
||||||
|
Method is used for validation of implemented functions related to
|
||||||
|
workfiles. Checks only existence of methods.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Object of host where to look for
|
||||||
|
required methods.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: Missing method implementations for workfiles workflow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(host, IWorkfileHost):
|
||||||
|
return []
|
||||||
|
|
||||||
|
required = [
|
||||||
|
"open_file",
|
||||||
|
"save_file",
|
||||||
|
"current_file",
|
||||||
|
"has_unsaved_changes",
|
||||||
|
"file_extensions",
|
||||||
|
"work_root",
|
||||||
|
]
|
||||||
|
missing = []
|
||||||
|
for name in required:
|
||||||
|
if not hasattr(host, name):
|
||||||
|
missing.append(name)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_workfile_methods(host):
|
||||||
|
"""Validate methods of "old type" host for workfiles workflow.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Object of host to validate.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
MissingMethodsError: If there are missing methods on host
|
||||||
|
implementation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
missing = IWorkfileHost.get_missing_workfile_methods(host)
|
||||||
|
if missing:
|
||||||
|
raise MissingMethodsError(host, missing)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
"""Extensions that can be used as save.
|
||||||
|
|
||||||
|
Questions:
|
||||||
|
This could potentially use 'HostDefinition'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def save_workfile(self, dst_path=None):
|
||||||
|
"""Save currently opened scene.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dst_path (str): Where the current scene should be saved. Or use
|
||||||
|
current path if 'None' is passed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def open_workfile(self, filepath):
|
||||||
|
"""Open passed filepath in the host.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath (str): Path to workfile.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_current_workfile(self):
|
||||||
|
"""Retreive path to current opened file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Path to file which is currently opened.
|
||||||
|
None: If nothing is opened.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def workfile_has_unsaved_changes(self):
|
||||||
|
"""Currently opened scene is saved.
|
||||||
|
|
||||||
|
Not all hosts can know if current scene is saved because the API of
|
||||||
|
DCC does not support it.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if scene is saved and False if has unsaved
|
||||||
|
modifications.
|
||||||
|
None: Can't tell if workfiles has modifications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def work_root(self, session):
|
||||||
|
"""Modify workdir per host.
|
||||||
|
|
||||||
|
Default implementation keeps workdir untouched.
|
||||||
|
|
||||||
|
Warnings:
|
||||||
|
We must handle this modification with more sofisticated way because
|
||||||
|
this can't be called out of DCC so opening of last workfile
|
||||||
|
(calculated before DCC is launched) is complicated. Also breaking
|
||||||
|
defined work template is not a good idea.
|
||||||
|
Only place where it's really used and can make sense is Maya. There
|
||||||
|
workspace.mel can modify subfolders where to look for maya files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session (dict): Session context data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Path to new workdir.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return session["AVALON_WORKDIR"]
|
||||||
|
|
||||||
|
# --- Deprecated method names ---
|
||||||
|
def file_extensions(self):
|
||||||
|
"""Deprecated variant of 'get_workfile_extensions'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
return self.get_workfile_extensions()
|
||||||
|
|
||||||
|
def save_file(self, dst_path=None):
|
||||||
|
"""Deprecated variant of 'save_workfile'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.save_workfile(dst_path)
|
||||||
|
|
||||||
|
def open_file(self, filepath):
|
||||||
|
"""Deprecated variant of 'open_workfile'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.open_workfile(filepath)
|
||||||
|
|
||||||
|
def current_file(self):
|
||||||
|
"""Deprecated variant of 'get_current_workfile'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.get_current_workfile()
|
||||||
|
|
||||||
|
def has_unsaved_changes(self):
|
||||||
|
"""Deprecated variant of 'workfile_has_unsaved_changes'.
|
||||||
|
|
||||||
|
Todo:
|
||||||
|
Remove when all usages are replaced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.workfile_has_unsaved_changes()
|
||||||
|
|
||||||
|
|
||||||
|
class IPublishHost:
|
||||||
|
"""Functions related to new creation system in new publisher.
|
||||||
|
|
||||||
|
New publisher is not storing information only about each created instance
|
||||||
|
but also some global data. At this moment are data related only to context
|
||||||
|
publish plugins but that can extend in future.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_missing_publish_methods(host):
|
||||||
|
"""Look for missing methods on "old type" host implementation.
|
||||||
|
|
||||||
|
Method is used for validation of implemented functions related to
|
||||||
|
new publish creation. Checks only existence of methods.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Host module where to look for
|
||||||
|
required methods.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: Missing method implementations for new publsher
|
||||||
|
workflow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(host, IPublishHost):
|
||||||
|
return []
|
||||||
|
|
||||||
|
required = [
|
||||||
|
"get_context_data",
|
||||||
|
"update_context_data",
|
||||||
|
"get_context_title",
|
||||||
|
"get_current_context",
|
||||||
|
]
|
||||||
|
missing = []
|
||||||
|
for name in required:
|
||||||
|
if not hasattr(host, name):
|
||||||
|
missing.append(name)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_publish_methods(host):
|
||||||
|
"""Validate implemented methods of "old type" host.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Union[ModuleType, HostBase]: Host module to validate.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
MissingMethodsError: If there are missing methods on host
|
||||||
|
implementation.
|
||||||
|
"""
|
||||||
|
missing = IPublishHost.get_missing_publish_methods(host)
|
||||||
|
if missing:
|
||||||
|
raise MissingMethodsError(host, missing)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_context_data(self):
|
||||||
|
"""Get global data related to creation-publishing from workfile.
|
||||||
|
|
||||||
|
These data are not related to any created instance but to whole
|
||||||
|
publishing context. Not saving/returning them will cause that each
|
||||||
|
reset of publishing resets all values to default ones.
|
||||||
|
|
||||||
|
Context data can contain information about enabled/disabled publish
|
||||||
|
plugins or other values that can be filled by artist.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Context data stored using 'update_context_data'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def update_context_data(self, data, changes):
|
||||||
|
"""Store global context data to workfile.
|
||||||
|
|
||||||
|
Called when some values in context data has changed.
|
||||||
|
|
||||||
|
Without storing the values in a way that 'get_context_data' would
|
||||||
|
return them will each reset of publishing cause loose of filled values
|
||||||
|
by artist. Best practice is to store values into workfile, if possible.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): New data as are.
|
||||||
|
changes (dict): Only data that has been changed. Each value has
|
||||||
|
tuple with '(<old>, <new>)' value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class INewPublisher(IPublishHost):
|
||||||
|
"""Legacy interface replaced by 'IPublishHost'.
|
||||||
|
|
||||||
|
Deprecated:
|
||||||
|
'INewPublisher' is replaced by 'IPublishHost' please change your
|
||||||
|
imports.
|
||||||
|
There is no "reasonable" way hot mark these classes as deprecated
|
||||||
|
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||||
|
removed in 3.15.*
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
@ -1,9 +1,6 @@
|
||||||
def add_implementation_envs(env, _app):
|
from .addon import AfterEffectsAddon
|
||||||
"""Modify environments to contain all required for implementation."""
|
|
||||||
defaults = {
|
|
||||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
__all__ = (
|
||||||
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
|
"AfterEffectsAddon",
|
||||||
}
|
)
|
||||||
for key, value in defaults.items():
|
|
||||||
if not env.get(key):
|
|
||||||
env[key] = value
|
|
||||||
|
|
|
||||||
22
openpype/hosts/aftereffects/addon.py
Normal file
22
openpype/hosts/aftereffects/addon.py
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
from openpype.modules import OpenPypeModule, IHostAddon
|
||||||
|
|
||||||
|
|
||||||
|
class AfterEffectsAddon(OpenPypeModule, IHostAddon):
|
||||||
|
name = "aftereffects"
|
||||||
|
host_name = "aftereffects"
|
||||||
|
|
||||||
|
def initialize(self, module_settings):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def add_implementation_envs(self, env, _app):
|
||||||
|
"""Modify environments to contain all required for implementation."""
|
||||||
|
defaults = {
|
||||||
|
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||||
|
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
|
||||||
|
}
|
||||||
|
for key, value in defaults.items():
|
||||||
|
if not env.get(key):
|
||||||
|
env[key] = value
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".aep"]
|
||||||
|
|
@ -10,8 +10,9 @@ from wsrpc_aiohttp import (
|
||||||
WebSocketAsync
|
WebSocketAsync
|
||||||
)
|
)
|
||||||
|
|
||||||
from Qt import QtCore
|
from qtpy import QtCore
|
||||||
|
|
||||||
|
from openpype.lib import Logger
|
||||||
from openpype.pipeline import legacy_io
|
from openpype.pipeline import legacy_io
|
||||||
from openpype.tools.utils import host_tools
|
from openpype.tools.utils import host_tools
|
||||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||||
|
|
@ -84,8 +85,6 @@ class ProcessLauncher(QtCore.QObject):
|
||||||
@property
|
@property
|
||||||
def log(self):
|
def log(self):
|
||||||
if self._log is None:
|
if self._log is None:
|
||||||
from openpype.api import Logger
|
|
||||||
|
|
||||||
self._log = Logger.get_logger("{}-launcher".format(
|
self._log = Logger.get_logger("{}-launcher".format(
|
||||||
self.route_name))
|
self.route_name))
|
||||||
return self._log
|
return self._log
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,16 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import re
|
||||||
|
import json
|
||||||
import contextlib
|
import contextlib
|
||||||
import traceback
|
import traceback
|
||||||
import logging
|
import logging
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from Qt import QtWidgets
|
from qtpy import QtWidgets
|
||||||
|
|
||||||
from openpype.pipeline import install_host
|
from openpype.pipeline import install_host
|
||||||
from openpype.lib.remote_publish import headless_publish
|
from openpype.modules import ModulesManager
|
||||||
|
|
||||||
from openpype.tools.utils import host_tools
|
from openpype.tools.utils import host_tools
|
||||||
from .launch_logic import ProcessLauncher, get_stub
|
from .launch_logic import ProcessLauncher, get_stub
|
||||||
|
|
@ -35,10 +38,18 @@ def main(*subprocess_args):
|
||||||
launcher.start()
|
launcher.start()
|
||||||
|
|
||||||
if os.environ.get("HEADLESS_PUBLISH"):
|
if os.environ.get("HEADLESS_PUBLISH"):
|
||||||
launcher.execute_in_main_thread(lambda: headless_publish(
|
manager = ModulesManager()
|
||||||
log,
|
webpublisher_addon = manager["webpublisher"]
|
||||||
"CloseAE",
|
|
||||||
os.environ.get("IS_TEST")))
|
launcher.execute_in_main_thread(
|
||||||
|
partial(
|
||||||
|
webpublisher_addon.headless_publish,
|
||||||
|
log,
|
||||||
|
"CloseAE",
|
||||||
|
os.environ.get("IS_TEST")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||||
save = False
|
save = False
|
||||||
if os.getenv("WORKFILES_SAVE_AS"):
|
if os.getenv("WORKFILES_SAVE_AS"):
|
||||||
|
|
@ -68,3 +79,57 @@ def get_extension_manifest_path():
|
||||||
"CSXS",
|
"CSXS",
|
||||||
"manifest.xml"
|
"manifest.xml"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_unique_layer_name(layers, name):
|
||||||
|
"""
|
||||||
|
Gets all layer names and if 'name' is present in them, increases
|
||||||
|
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||||
|
Args:
|
||||||
|
layers (list): of strings, names only
|
||||||
|
name (string): checked value
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(string): name_00X (without version)
|
||||||
|
"""
|
||||||
|
names = {}
|
||||||
|
for layer in layers:
|
||||||
|
layer_name = re.sub(r'_\d{3}$', '', layer)
|
||||||
|
if layer_name in names.keys():
|
||||||
|
names[layer_name] = names[layer_name] + 1
|
||||||
|
else:
|
||||||
|
names[layer_name] = 1
|
||||||
|
occurrences = names.get(name, 0)
|
||||||
|
|
||||||
|
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_background_layers(file_url):
|
||||||
|
"""
|
||||||
|
Pulls file name from background json file, enrich with folder url for
|
||||||
|
AE to be able import files.
|
||||||
|
|
||||||
|
Order is important, follows order in json.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_url (str): abs url of background json
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(list): of abs paths to images
|
||||||
|
"""
|
||||||
|
with open(file_url) as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
|
||||||
|
layers = list()
|
||||||
|
bg_folder = os.path.dirname(file_url)
|
||||||
|
for child in data['children']:
|
||||||
|
if child.get("filename"):
|
||||||
|
layers.append(os.path.join(bg_folder, child.get("filename")).
|
||||||
|
replace("\\", "/"))
|
||||||
|
else:
|
||||||
|
for layer in child['children']:
|
||||||
|
if layer.get("filename"):
|
||||||
|
layers.append(os.path.join(bg_folder,
|
||||||
|
layer.get("filename")).
|
||||||
|
replace("\\", "/"))
|
||||||
|
return layers
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from Qt import QtWidgets
|
from qtpy import QtWidgets
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
from openpype import lib
|
from openpype.lib import Logger, register_event_callback
|
||||||
from openpype.api import Logger
|
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline import (
|
||||||
register_loader_plugin_path,
|
register_loader_plugin_path,
|
||||||
register_creator_plugin_path,
|
register_creator_plugin_path,
|
||||||
|
|
@ -16,9 +15,8 @@ from openpype.pipeline import (
|
||||||
)
|
)
|
||||||
from openpype.pipeline.load import any_outdated_containers
|
from openpype.pipeline.load import any_outdated_containers
|
||||||
import openpype.hosts.aftereffects
|
import openpype.hosts.aftereffects
|
||||||
from openpype.lib import register_event_callback
|
|
||||||
|
|
||||||
from .launch_logic import get_stub
|
from .launch_logic import get_stub, ConnectionNotEstablishedYet
|
||||||
|
|
||||||
log = Logger.get_logger(__name__)
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -111,7 +109,7 @@ def ls():
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
stub = get_stub() # only after AfterEffects is up
|
stub = get_stub() # only after AfterEffects is up
|
||||||
except lib.ConnectionNotEstablishedYet:
|
except ConnectionNotEstablishedYet:
|
||||||
print("Not connected yet, ignoring")
|
print("Not connected yet, ignoring")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -284,7 +282,7 @@ def _get_stub():
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
stub = get_stub() # only after Photoshop is up
|
stub = get_stub() # only after Photoshop is up
|
||||||
except lib.ConnectionNotEstablishedYet:
|
except ConnectionNotEstablishedYet:
|
||||||
print("Not connected yet, ignoring")
|
print("Not connected yet, ignoring")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
"""Host API required Work Files tool"""
|
"""Host API required Work Files tool"""
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
|
||||||
from .launch_logic import get_stub
|
from .launch_logic import get_stub
|
||||||
|
|
||||||
|
|
||||||
def file_extensions():
|
def file_extensions():
|
||||||
return HOST_WORKFILE_EXTENSIONS["aftereffects"]
|
return [".aep"]
|
||||||
|
|
||||||
|
|
||||||
def has_unsaved_changes():
|
def has_unsaved_changes():
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,8 @@ class AEWorkfileCreator(AutoCreator):
|
||||||
identifier = "workfile"
|
identifier = "workfile"
|
||||||
family = "workfile"
|
family = "workfile"
|
||||||
|
|
||||||
|
default_variant = "Main"
|
||||||
|
|
||||||
def get_instance_attr_defs(self):
|
def get_instance_attr_defs(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -35,7 +37,6 @@ class AEWorkfileCreator(AutoCreator):
|
||||||
existing_instance = instance
|
existing_instance = instance
|
||||||
break
|
break
|
||||||
|
|
||||||
variant = ''
|
|
||||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||||
task_name = legacy_io.Session["AVALON_TASK"]
|
task_name = legacy_io.Session["AVALON_TASK"]
|
||||||
|
|
@ -44,15 +45,17 @@ class AEWorkfileCreator(AutoCreator):
|
||||||
if existing_instance is None:
|
if existing_instance is None:
|
||||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||||
subset_name = self.get_subset_name(
|
subset_name = self.get_subset_name(
|
||||||
variant, task_name, asset_doc, project_name, host_name
|
self.default_variant, task_name, asset_doc,
|
||||||
|
project_name, host_name
|
||||||
)
|
)
|
||||||
data = {
|
data = {
|
||||||
"asset": asset_name,
|
"asset": asset_name,
|
||||||
"task": task_name,
|
"task": task_name,
|
||||||
"variant": variant
|
"variant": self.default_variant
|
||||||
}
|
}
|
||||||
data.update(self.get_dynamic_data(
|
data.update(self.get_dynamic_data(
|
||||||
variant, task_name, asset_doc, project_name, host_name
|
self.default_variant, task_name, asset_doc,
|
||||||
|
project_name, host_name
|
||||||
))
|
))
|
||||||
|
|
||||||
new_instance = CreatedInstance(
|
new_instance = CreatedInstance(
|
||||||
|
|
@ -69,7 +72,9 @@ class AEWorkfileCreator(AutoCreator):
|
||||||
):
|
):
|
||||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||||
subset_name = self.get_subset_name(
|
subset_name = self.get_subset_name(
|
||||||
variant, task_name, asset_doc, project_name, host_name
|
self.default_variant, task_name, asset_doc,
|
||||||
|
project_name, host_name
|
||||||
)
|
)
|
||||||
existing_instance["asset"] = asset_name
|
existing_instance["asset"] = asset_name
|
||||||
existing_instance["task"] = task_name
|
existing_instance["task"] = task_name
|
||||||
|
existing_instance["subset"] = subset_name
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from openpype.lib import (
|
|
||||||
get_background_layers,
|
|
||||||
get_unique_layer_name
|
|
||||||
)
|
|
||||||
from openpype.pipeline import get_representation_path
|
from openpype.pipeline import get_representation_path
|
||||||
from openpype.hosts.aftereffects.api import (
|
from openpype.hosts.aftereffects.api import (
|
||||||
AfterEffectsLoader,
|
AfterEffectsLoader,
|
||||||
containerise
|
containerise
|
||||||
)
|
)
|
||||||
|
from openpype.hosts.aftereffects.api.lib import (
|
||||||
|
get_background_layers,
|
||||||
|
get_unique_layer_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class BackgroundLoader(AfterEffectsLoader):
|
class BackgroundLoader(AfterEffectsLoader):
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from openpype import lib
|
|
||||||
|
|
||||||
from openpype.pipeline import get_representation_path
|
from openpype.pipeline import get_representation_path
|
||||||
from openpype.hosts.aftereffects.api import (
|
from openpype.hosts.aftereffects.api import (
|
||||||
AfterEffectsLoader,
|
AfterEffectsLoader,
|
||||||
containerise
|
containerise
|
||||||
)
|
)
|
||||||
|
from openpype.hosts.aftereffects.api.lib import get_unique_layer_name
|
||||||
|
|
||||||
|
|
||||||
class FileLoader(AfterEffectsLoader):
|
class FileLoader(AfterEffectsLoader):
|
||||||
|
|
@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader):
|
||||||
stub = self.get_stub()
|
stub = self.get_stub()
|
||||||
layers = stub.get_items(comps=True, folders=True, footages=True)
|
layers = stub.get_items(comps=True, folders=True, footages=True)
|
||||||
existing_layers = [layer.name for layer in layers]
|
existing_layers = [layer.name for layer in layers]
|
||||||
comp_name = lib.get_unique_layer_name(
|
comp_name = get_unique_layer_name(
|
||||||
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
||||||
|
|
||||||
import_options = {}
|
import_options = {}
|
||||||
|
|
@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader):
|
||||||
if namespace_from_container != layer_name:
|
if namespace_from_container != layer_name:
|
||||||
layers = stub.get_items(comps=True)
|
layers = stub.get_items(comps=True)
|
||||||
existing_layers = [layer.name for layer in layers]
|
existing_layers = [layer.name for layer in layers]
|
||||||
layer_name = lib.get_unique_layer_name(
|
layer_name = get_unique_layer_name(
|
||||||
existing_layers,
|
existing_layers,
|
||||||
"{}_{}".format(context["asset"], context["subset"]))
|
"{}_{}".format(context["asset"], context["subset"]))
|
||||||
else: # switching version - keep same name
|
else: # switching version - keep same name
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from openpype.lib import get_subset_name_with_asset_doc
|
|
||||||
from openpype.pipeline import legacy_io
|
from openpype.pipeline import legacy_io
|
||||||
|
from openpype.pipeline.create import get_subset_name
|
||||||
|
|
||||||
|
|
||||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
|
@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
label = "Collect After Effects Workfile Instance"
|
label = "Collect After Effects Workfile Instance"
|
||||||
order = pyblish.api.CollectorOrder + 0.1
|
order = pyblish.api.CollectorOrder + 0.1
|
||||||
|
|
||||||
|
default_variant = "Main"
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
existing_instance = None
|
existing_instance = None
|
||||||
for instance in context:
|
for instance in context:
|
||||||
|
|
@ -69,13 +71,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
# workfile instance
|
# workfile instance
|
||||||
family = "workfile"
|
family = "workfile"
|
||||||
subset = get_subset_name_with_asset_doc(
|
subset = get_subset_name(
|
||||||
family,
|
family,
|
||||||
"",
|
self.default_variant,
|
||||||
context.data["anatomyData"]["task"]["name"],
|
context.data["anatomyData"]["task"]["name"],
|
||||||
context.data["assetEntity"],
|
context.data["assetEntity"],
|
||||||
context.data["anatomyData"]["project"]["name"],
|
context.data["anatomyData"]["project"]["name"],
|
||||||
host_name=context.data["hostName"]
|
host_name=context.data["hostName"],
|
||||||
|
project_settings=context.data["project_settings"]
|
||||||
)
|
)
|
||||||
# Create instance
|
# Create instance
|
||||||
instance = context.create_instance(subset)
|
instance = context.create_instance(subset)
|
||||||
|
|
|
||||||
|
|
@ -2,14 +2,18 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import openpype.api
|
from openpype.lib import (
|
||||||
|
get_ffmpeg_tool_path,
|
||||||
|
run_subprocess,
|
||||||
|
)
|
||||||
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.aftereffects.api import get_stub
|
from openpype.hosts.aftereffects.api import get_stub
|
||||||
|
|
||||||
|
|
||||||
class ExtractLocalRender(openpype.api.Extractor):
|
class ExtractLocalRender(publish.Extractor):
|
||||||
"""Render RenderQueue locally."""
|
"""Render RenderQueue locally."""
|
||||||
|
|
||||||
order = openpype.api.Extractor.order - 0.47
|
order = publish.Extractor.order - 0.47
|
||||||
label = "Extract Local Render"
|
label = "Extract Local Render"
|
||||||
hosts = ["aftereffects"]
|
hosts = ["aftereffects"]
|
||||||
families = ["renderLocal", "render.local"]
|
families = ["renderLocal", "render.local"]
|
||||||
|
|
@ -53,7 +57,7 @@ class ExtractLocalRender(openpype.api.Extractor):
|
||||||
|
|
||||||
instance.data["representations"] = [repre_data]
|
instance.data["representations"] = [repre_data]
|
||||||
|
|
||||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||||
# Generate thumbnail.
|
# Generate thumbnail.
|
||||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||||
|
|
||||||
|
|
@ -66,7 +70,7 @@ class ExtractLocalRender(openpype.api.Extractor):
|
||||||
]
|
]
|
||||||
self.log.debug("Thumbnail args:: {}".format(args))
|
self.log.debug("Thumbnail args:: {}".format(args))
|
||||||
try:
|
try:
|
||||||
output = openpype.lib.run_subprocess(args)
|
output = run_subprocess(args)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
self.log.warning("Error in creating thumbnail")
|
self.log.warning("Error in creating thumbnail")
|
||||||
six.reraise(*sys.exc_info())
|
six.reraise(*sys.exc_info())
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
import openpype.api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.aftereffects.api import get_stub
|
from openpype.hosts.aftereffects.api import get_stub
|
||||||
|
|
||||||
|
|
||||||
class ExtractSaveScene(pyblish.api.ContextPlugin):
|
class ExtractSaveScene(pyblish.api.ContextPlugin):
|
||||||
"""Save scene before extraction."""
|
"""Save scene before extraction."""
|
||||||
|
|
||||||
order = openpype.api.Extractor.order - 0.48
|
order = publish.Extractor.order - 0.48
|
||||||
label = "Extract Save Scene"
|
label = "Extract Save Scene"
|
||||||
hosts = ["aftereffects"]
|
hosts = ["aftereffects"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from openpype.action import get_errored_plugins_from_data
|
|
||||||
from openpype.lib import version_up
|
from openpype.lib import version_up
|
||||||
|
from openpype.pipeline.publish import get_errored_plugins_from_context
|
||||||
|
|
||||||
from openpype.hosts.aftereffects.api import get_stub
|
from openpype.hosts.aftereffects.api import get_stub
|
||||||
|
|
||||||
|
|
@ -18,7 +18,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin):
|
||||||
optional = True
|
optional = True
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
errored_plugins = get_errored_plugins_from_context(instance.context)
|
||||||
if errored_plugins:
|
if errored_plugins:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Skipping incrementing current file because publishing failed."
|
"Skipping incrementing current file because publishing failed."
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
import openpype.api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.aftereffects.api import get_stub
|
from openpype.hosts.aftereffects.api import get_stub
|
||||||
|
|
||||||
|
|
||||||
class RemovePublishHighlight(openpype.api.Extractor):
|
class RemovePublishHighlight(publish.Extractor):
|
||||||
"""Clean utf characters which are not working in DL
|
"""Clean utf characters which are not working in DL
|
||||||
|
|
||||||
Published compositions are marked with unicode icon which causes
|
Published compositions are marked with unicode icon which causes
|
||||||
|
|
@ -10,7 +10,7 @@ class RemovePublishHighlight(openpype.api.Extractor):
|
||||||
rendering, add it later back to avoid confusion.
|
rendering, add it later back to avoid confusion.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = openpype.api.Extractor.order - 0.49 # just before save
|
order = publish.Extractor.order - 0.49 # just before save
|
||||||
label = "Clean render comp"
|
label = "Clean render comp"
|
||||||
hosts = ["aftereffects"]
|
hosts = ["aftereffects"]
|
||||||
families = ["render.farm"]
|
families = ["render.farm"]
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
import openpype.api
|
from openpype.pipeline import legacy_io
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline.publish import (
|
||||||
|
ValidateContentsOrder,
|
||||||
PublishXmlValidationError,
|
PublishXmlValidationError,
|
||||||
legacy_io,
|
|
||||||
)
|
)
|
||||||
from openpype.hosts.aftereffects.api import get_stub
|
from openpype.hosts.aftereffects.api import get_stub
|
||||||
|
|
||||||
|
|
@ -50,7 +50,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
||||||
label = "Validate Instance Asset"
|
label = "Validate Instance Asset"
|
||||||
hosts = ["aftereffects"]
|
hosts = ["aftereffects"]
|
||||||
actions = [ValidateInstanceAssetRepair]
|
actions = [ValidateInstanceAssetRepair]
|
||||||
order = openpype.api.ValidateContentsOrder
|
order = ValidateContentsOrder
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
instance_asset = instance.data["asset"]
|
instance_asset = instance.data["asset"]
|
||||||
|
|
|
||||||
|
|
@ -1,52 +1,6 @@
|
||||||
import os
|
from .addon import BlenderAddon
|
||||||
|
|
||||||
|
|
||||||
def add_implementation_envs(env, _app):
|
__all__ = (
|
||||||
"""Modify environments to contain all required for implementation."""
|
"BlenderAddon",
|
||||||
# Prepare path to implementation script
|
)
|
||||||
implementation_user_script_path = os.path.join(
|
|
||||||
os.path.dirname(os.path.abspath(__file__)),
|
|
||||||
"blender_addon"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add blender implementation script path to PYTHONPATH
|
|
||||||
python_path = env.get("PYTHONPATH") or ""
|
|
||||||
python_path_parts = [
|
|
||||||
path
|
|
||||||
for path in python_path.split(os.pathsep)
|
|
||||||
if path
|
|
||||||
]
|
|
||||||
python_path_parts.insert(0, implementation_user_script_path)
|
|
||||||
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
|
||||||
|
|
||||||
# Modify Blender user scripts path
|
|
||||||
previous_user_scripts = set()
|
|
||||||
# Implementation path is added to set for easier paths check inside loops
|
|
||||||
# - will be removed at the end
|
|
||||||
previous_user_scripts.add(implementation_user_script_path)
|
|
||||||
|
|
||||||
openpype_blender_user_scripts = (
|
|
||||||
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
|
|
||||||
)
|
|
||||||
for path in openpype_blender_user_scripts.split(os.pathsep):
|
|
||||||
if path:
|
|
||||||
previous_user_scripts.add(os.path.normpath(path))
|
|
||||||
|
|
||||||
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
|
||||||
for path in blender_user_scripts.split(os.pathsep):
|
|
||||||
if path:
|
|
||||||
previous_user_scripts.add(os.path.normpath(path))
|
|
||||||
|
|
||||||
# Remove implementation path from user script paths as is set to
|
|
||||||
# `BLENDER_USER_SCRIPTS`
|
|
||||||
previous_user_scripts.remove(implementation_user_script_path)
|
|
||||||
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
|
|
||||||
|
|
||||||
# Set custom user scripts env
|
|
||||||
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
|
|
||||||
previous_user_scripts
|
|
||||||
)
|
|
||||||
|
|
||||||
# Define Qt binding if not defined
|
|
||||||
if not env.get("QT_PREFERRED_BINDING"):
|
|
||||||
env["QT_PREFERRED_BINDING"] = "PySide2"
|
|
||||||
|
|
|
||||||
72
openpype/hosts/blender/addon.py
Normal file
72
openpype/hosts/blender/addon.py
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
import os
|
||||||
|
from openpype.modules import OpenPypeModule, IHostAddon
|
||||||
|
|
||||||
|
BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class BlenderAddon(OpenPypeModule, IHostAddon):
|
||||||
|
name = "blender"
|
||||||
|
host_name = "blender"
|
||||||
|
|
||||||
|
def initialize(self, module_settings):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def add_implementation_envs(self, env, _app):
|
||||||
|
"""Modify environments to contain all required for implementation."""
|
||||||
|
# Prepare path to implementation script
|
||||||
|
implementation_user_script_path = os.path.join(
|
||||||
|
BLENDER_ROOT_DIR,
|
||||||
|
"blender_addon"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add blender implementation script path to PYTHONPATH
|
||||||
|
python_path = env.get("PYTHONPATH") or ""
|
||||||
|
python_path_parts = [
|
||||||
|
path
|
||||||
|
for path in python_path.split(os.pathsep)
|
||||||
|
if path
|
||||||
|
]
|
||||||
|
python_path_parts.insert(0, implementation_user_script_path)
|
||||||
|
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
||||||
|
|
||||||
|
# Modify Blender user scripts path
|
||||||
|
previous_user_scripts = set()
|
||||||
|
# Implementation path is added to set for easier paths check inside
|
||||||
|
# loops - will be removed at the end
|
||||||
|
previous_user_scripts.add(implementation_user_script_path)
|
||||||
|
|
||||||
|
openpype_blender_user_scripts = (
|
||||||
|
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
|
||||||
|
)
|
||||||
|
for path in openpype_blender_user_scripts.split(os.pathsep):
|
||||||
|
if path:
|
||||||
|
previous_user_scripts.add(os.path.normpath(path))
|
||||||
|
|
||||||
|
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
||||||
|
for path in blender_user_scripts.split(os.pathsep):
|
||||||
|
if path:
|
||||||
|
previous_user_scripts.add(os.path.normpath(path))
|
||||||
|
|
||||||
|
# Remove implementation path from user script paths as is set to
|
||||||
|
# `BLENDER_USER_SCRIPTS`
|
||||||
|
previous_user_scripts.remove(implementation_user_script_path)
|
||||||
|
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
|
||||||
|
|
||||||
|
# Set custom user scripts env
|
||||||
|
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
|
||||||
|
previous_user_scripts
|
||||||
|
)
|
||||||
|
|
||||||
|
# Define Qt binding if not defined
|
||||||
|
if not env.get("QT_PREFERRED_BINDING"):
|
||||||
|
env["QT_PREFERRED_BINDING"] = "PySide2"
|
||||||
|
|
||||||
|
def get_launch_hook_paths(self, app):
|
||||||
|
if app.host_name != self.host_name:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
os.path.join(BLENDER_ROOT_DIR, "hooks")
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".blend"]
|
||||||
|
|
@ -2,7 +2,7 @@ import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
from openpype.api import get_errored_instances_from_context
|
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||||
|
|
||||||
|
|
||||||
class SelectInvalidAction(pyblish.api.Action):
|
class SelectInvalidAction(pyblish.api.Action):
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from typing import Dict, List, Union
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import addon_utils
|
import addon_utils
|
||||||
from openpype.api import Logger
|
from openpype.lib import Logger
|
||||||
|
|
||||||
from . import pipeline
|
from . import pipeline
|
||||||
|
|
||||||
|
|
@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List:
|
||||||
def read(node: bpy.types.bpy_struct_meta_idprop):
|
def read(node: bpy.types.bpy_struct_meta_idprop):
|
||||||
"""Return user-defined attributes from `node`"""
|
"""Return user-defined attributes from `node`"""
|
||||||
|
|
||||||
data = dict(node.get(pipeline.AVALON_PROPERTY))
|
data = dict(node.get(pipeline.AVALON_PROPERTY, {}))
|
||||||
|
|
||||||
# Ignore hidden/internal data
|
# Ignore hidden/internal data
|
||||||
data = {
|
data = {
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from pathlib import Path
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Dict, List, Optional, Union
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
from Qt import QtWidgets, QtCore
|
from qtpy import QtWidgets, QtCore
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import bpy.utils.previews
|
import bpy.utils.previews
|
||||||
|
|
@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict()
|
||||||
# This seems like a good value to keep the Qt app responsive and doesn't slow
|
# This seems like a good value to keep the Qt app responsive and doesn't slow
|
||||||
# down Blender. At least on macOS I the interace of Blender gets very laggy if
|
# down Blender. At least on macOS I the interace of Blender gets very laggy if
|
||||||
# you make it smaller.
|
# you make it smaller.
|
||||||
TIMER_INTERVAL: float = 0.01
|
TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
|
||||||
|
|
||||||
|
|
||||||
class BlenderApplication(QtWidgets.QApplication):
|
class BlenderApplication(QtWidgets.QApplication):
|
||||||
|
|
@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]:
|
||||||
dialog.setDetailedText(detail)
|
dialog.setDetailedText(detail)
|
||||||
dialog.exec_()
|
dialog.exec_()
|
||||||
|
|
||||||
|
# Refresh Manager
|
||||||
|
if GlobalClass.app:
|
||||||
|
manager = GlobalClass.app.get_window("WM_OT_avalon_manager")
|
||||||
|
if manager:
|
||||||
|
manager.refresh()
|
||||||
|
|
||||||
if not GlobalClass.is_windows:
|
if not GlobalClass.is_windows:
|
||||||
if OpenFileCacher.opening_file:
|
if OpenFileCacher.opening_file:
|
||||||
return TIMER_INTERVAL
|
return TIMER_INTERVAL
|
||||||
|
|
@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator):
|
||||||
self._app = BlenderApplication.get_app()
|
self._app = BlenderApplication.get_app()
|
||||||
GlobalClass.app = self._app
|
GlobalClass.app = self._app
|
||||||
|
|
||||||
bpy.app.timers.register(
|
if not bpy.app.timers.is_registered(_process_app_events):
|
||||||
_process_app_events,
|
bpy.app.timers.register(
|
||||||
persistent=True
|
_process_app_events,
|
||||||
)
|
persistent=True
|
||||||
|
)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
"""Execute the operator.
|
"""Execute the operator.
|
||||||
|
|
|
||||||
|
|
@ -20,8 +20,8 @@ from openpype.pipeline import (
|
||||||
deregister_creator_plugin_path,
|
deregister_creator_plugin_path,
|
||||||
AVALON_CONTAINER_ID,
|
AVALON_CONTAINER_ID,
|
||||||
)
|
)
|
||||||
from openpype.api import Logger
|
|
||||||
from openpype.lib import (
|
from openpype.lib import (
|
||||||
|
Logger,
|
||||||
register_event_callback,
|
register_event_callback,
|
||||||
emit_event
|
emit_event
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,6 @@ from typing import List, Optional
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
|
||||||
|
|
||||||
|
|
||||||
class OpenFileCacher:
|
class OpenFileCacher:
|
||||||
"""Store information about opening file.
|
"""Store information about opening file.
|
||||||
|
|
@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool:
|
||||||
def file_extensions() -> List[str]:
|
def file_extensions() -> List[str]:
|
||||||
"""Return the supported file extensions for Blender scene files."""
|
"""Return the supported file extensions for Blender scene files."""
|
||||||
|
|
||||||
return HOST_WORKFILE_EXTENSIONS["blender"]
|
return [".blend"]
|
||||||
|
|
||||||
|
|
||||||
def work_root(session: dict) -> str:
|
def work_root(session: dict) -> str:
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,10 @@
|
||||||
from openpype.pipeline import install_host
|
from openpype.pipeline import install_host
|
||||||
from openpype.hosts.blender import api
|
from openpype.hosts.blender import api
|
||||||
|
|
||||||
install_host(api)
|
|
||||||
|
def register():
|
||||||
|
install_host(api)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
pass
|
||||||
|
|
|
||||||
|
|
@ -32,11 +32,6 @@ class CreateCamera(plugin.Creator):
|
||||||
subset = self.data["subset"]
|
subset = self.data["subset"]
|
||||||
name = plugin.asset_name(asset, subset)
|
name = plugin.asset_name(asset, subset)
|
||||||
|
|
||||||
camera = bpy.data.cameras.new(subset)
|
|
||||||
camera_obj = bpy.data.objects.new(subset, camera)
|
|
||||||
|
|
||||||
instances.objects.link(camera_obj)
|
|
||||||
|
|
||||||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||||
instances.objects.link(asset_group)
|
instances.objects.link(asset_group)
|
||||||
|
|
@ -53,6 +48,11 @@ class CreateCamera(plugin.Creator):
|
||||||
bpy.ops.object.parent_set(keep_transform=True)
|
bpy.ops.object.parent_set(keep_transform=True)
|
||||||
else:
|
else:
|
||||||
plugin.deselect_all()
|
plugin.deselect_all()
|
||||||
|
camera = bpy.data.cameras.new(subset)
|
||||||
|
camera_obj = bpy.data.objects.new(subset, camera)
|
||||||
|
|
||||||
|
instances.objects.link(camera_obj)
|
||||||
|
|
||||||
camera_obj.select_set(True)
|
camera_obj.select_set(True)
|
||||||
asset_group.select_set(True)
|
asset_group.select_set(True)
|
||||||
bpy.context.view_layer.objects.active = asset_group
|
bpy.context.view_layer.objects.active = asset_group
|
||||||
|
|
|
||||||
82
openpype/hosts/blender/plugins/load/import_workfile.py
Normal file
82
openpype/hosts/blender/plugins/load/import_workfile.py
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from openpype.hosts.blender.api import plugin
|
||||||
|
|
||||||
|
|
||||||
|
def append_workfile(context, fname, do_import):
|
||||||
|
asset = context['asset']['name']
|
||||||
|
subset = context['subset']['name']
|
||||||
|
|
||||||
|
group_name = plugin.asset_name(asset, subset)
|
||||||
|
|
||||||
|
# We need to preserve the original names of the scenes, otherwise,
|
||||||
|
# if there are duplicate names in the current workfile, the imported
|
||||||
|
# scenes will be renamed by Blender to avoid conflicts.
|
||||||
|
original_scene_names = []
|
||||||
|
|
||||||
|
with bpy.data.libraries.load(fname) as (data_from, data_to):
|
||||||
|
for attr in dir(data_to):
|
||||||
|
if attr == "scenes":
|
||||||
|
for scene in data_from.scenes:
|
||||||
|
original_scene_names.append(scene)
|
||||||
|
setattr(data_to, attr, getattr(data_from, attr))
|
||||||
|
|
||||||
|
current_scene = bpy.context.scene
|
||||||
|
|
||||||
|
for scene, s_name in zip(data_to.scenes, original_scene_names):
|
||||||
|
scene.name = f"{group_name}_{s_name}"
|
||||||
|
if do_import:
|
||||||
|
collection = bpy.data.collections.new(f"{group_name}_{s_name}")
|
||||||
|
for obj in scene.objects:
|
||||||
|
collection.objects.link(obj)
|
||||||
|
current_scene.collection.children.link(collection)
|
||||||
|
for coll in scene.collection.children:
|
||||||
|
collection.children.link(coll)
|
||||||
|
|
||||||
|
|
||||||
|
class AppendBlendLoader(plugin.AssetLoader):
|
||||||
|
"""Append workfile in Blender (unmanaged)
|
||||||
|
|
||||||
|
Warning:
|
||||||
|
The loaded content will be unmanaged and is *not* visible in the
|
||||||
|
scene inventory. It's purely intended to merge content into your scene
|
||||||
|
so you could also use it as a new base.
|
||||||
|
"""
|
||||||
|
|
||||||
|
representations = ["blend"]
|
||||||
|
families = ["*"]
|
||||||
|
|
||||||
|
label = "Append Workfile"
|
||||||
|
order = 9
|
||||||
|
icon = "arrow-circle-down"
|
||||||
|
color = "#775555"
|
||||||
|
|
||||||
|
def load(self, context, name=None, namespace=None, data=None):
|
||||||
|
append_workfile(context, self.fname, False)
|
||||||
|
|
||||||
|
# We do not containerize imported content, it remains unmanaged
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class ImportBlendLoader(plugin.AssetLoader):
|
||||||
|
"""Import workfile in the current Blender scene (unmanaged)
|
||||||
|
|
||||||
|
Warning:
|
||||||
|
The loaded content will be unmanaged and is *not* visible in the
|
||||||
|
scene inventory. It's purely intended to merge content into your scene
|
||||||
|
so you could also use it as a new base.
|
||||||
|
"""
|
||||||
|
|
||||||
|
representations = ["blend"]
|
||||||
|
families = ["*"]
|
||||||
|
|
||||||
|
label = "Import Workfile"
|
||||||
|
order = 9
|
||||||
|
icon = "arrow-circle-down"
|
||||||
|
color = "#775555"
|
||||||
|
|
||||||
|
def load(self, context, name=None, namespace=None, data=None):
|
||||||
|
append_workfile(context, self.fname, True)
|
||||||
|
|
||||||
|
# We do not containerize imported content, it remains unmanaged
|
||||||
|
return
|
||||||
|
|
@ -6,12 +6,12 @@ from typing import Dict, List, Optional
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype import lib
|
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline import (
|
||||||
legacy_create,
|
legacy_create,
|
||||||
get_representation_path,
|
get_representation_path,
|
||||||
AVALON_CONTAINER_ID,
|
AVALON_CONTAINER_ID,
|
||||||
)
|
)
|
||||||
|
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
from openpype.hosts.blender.api.pipeline import (
|
from openpype.hosts.blender.api.pipeline import (
|
||||||
AVALON_CONTAINERS,
|
AVALON_CONTAINERS,
|
||||||
|
|
@ -48,8 +48,14 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
bpy.data.objects.remove(obj)
|
bpy.data.objects.remove(obj)
|
||||||
|
|
||||||
def _remove_asset_and_library(self, asset_group):
|
def _remove_asset_and_library(self, asset_group):
|
||||||
|
if not asset_group.get(AVALON_PROPERTY):
|
||||||
|
return
|
||||||
|
|
||||||
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
|
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
|
||||||
|
|
||||||
|
if not libpath:
|
||||||
|
return
|
||||||
|
|
||||||
# Check how many assets use the same library
|
# Check how many assets use the same library
|
||||||
count = 0
|
count = 0
|
||||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects:
|
for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects:
|
||||||
|
|
@ -63,10 +69,12 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
# If it is the last object to use that library, remove it
|
# If it is the last object to use that library, remove it
|
||||||
if count == 1:
|
if count == 1:
|
||||||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||||
bpy.data.libraries.remove(library)
|
if library:
|
||||||
|
bpy.data.libraries.remove(library)
|
||||||
|
|
||||||
def _process(
|
def _process(
|
||||||
self, libpath, asset_group, group_name, asset, representation, actions
|
self, libpath, asset_group, group_name, asset, representation,
|
||||||
|
actions, anim_instances
|
||||||
):
|
):
|
||||||
with bpy.data.libraries.load(
|
with bpy.data.libraries.load(
|
||||||
libpath, link=True, relative=False
|
libpath, link=True, relative=False
|
||||||
|
|
@ -140,12 +148,12 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
elif local_obj.type == 'ARMATURE':
|
elif local_obj.type == 'ARMATURE':
|
||||||
plugin.prepare_data(local_obj.data)
|
plugin.prepare_data(local_obj.data)
|
||||||
|
|
||||||
if action is not None:
|
if action:
|
||||||
if local_obj.animation_data is None:
|
if local_obj.animation_data is None:
|
||||||
local_obj.animation_data_create()
|
local_obj.animation_data_create()
|
||||||
local_obj.animation_data.action = action
|
local_obj.animation_data.action = action
|
||||||
elif (local_obj.animation_data and
|
elif (local_obj.animation_data and
|
||||||
local_obj.animation_data.action is not None):
|
local_obj.animation_data.action):
|
||||||
plugin.prepare_data(
|
plugin.prepare_data(
|
||||||
local_obj.animation_data.action)
|
local_obj.animation_data.action)
|
||||||
|
|
||||||
|
|
@ -157,19 +165,26 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
t.id = local_obj
|
t.id = local_obj
|
||||||
|
|
||||||
elif local_obj.type == 'EMPTY':
|
elif local_obj.type == 'EMPTY':
|
||||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
if (not anim_instances or
|
||||||
if not creator_plugin:
|
(anim_instances and
|
||||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
local_obj.name not in anim_instances.keys())):
|
||||||
"not found.")
|
avalon = local_obj.get(AVALON_PROPERTY)
|
||||||
|
if avalon and avalon.get('family') == 'rig':
|
||||||
|
creator_plugin = get_legacy_creator_by_name(
|
||||||
|
"CreateAnimation")
|
||||||
|
if not creator_plugin:
|
||||||
|
raise ValueError(
|
||||||
|
"Creator plugin \"CreateAnimation\" was "
|
||||||
|
"not found.")
|
||||||
|
|
||||||
legacy_create(
|
legacy_create(
|
||||||
creator_plugin,
|
creator_plugin,
|
||||||
name=local_obj.name.split(':')[-1] + "_animation",
|
name=local_obj.name.split(':')[-1] + "_animation",
|
||||||
asset=asset,
|
asset=asset,
|
||||||
options={"useSelection": False,
|
options={"useSelection": False,
|
||||||
"asset_group": local_obj},
|
"asset_group": local_obj},
|
||||||
data={"dependencies": representation}
|
data={"dependencies": representation}
|
||||||
)
|
)
|
||||||
|
|
||||||
if not local_obj.get(AVALON_PROPERTY):
|
if not local_obj.get(AVALON_PROPERTY):
|
||||||
local_obj[AVALON_PROPERTY] = dict()
|
local_obj[AVALON_PROPERTY] = dict()
|
||||||
|
|
@ -272,7 +287,8 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
avalon_container.objects.link(asset_group)
|
avalon_container.objects.link(asset_group)
|
||||||
|
|
||||||
objects = self._process(
|
objects = self._process(
|
||||||
libpath, asset_group, group_name, asset, representation, None)
|
libpath, asset_group, group_name, asset, representation,
|
||||||
|
None, None)
|
||||||
|
|
||||||
for child in asset_group.children:
|
for child in asset_group.children:
|
||||||
if child.get(AVALON_PROPERTY):
|
if child.get(AVALON_PROPERTY):
|
||||||
|
|
@ -352,10 +368,20 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
return
|
return
|
||||||
|
|
||||||
actions = {}
|
actions = {}
|
||||||
|
anim_instances = {}
|
||||||
|
|
||||||
for obj in asset_group.children:
|
for obj in asset_group.children:
|
||||||
obj_meta = obj.get(AVALON_PROPERTY)
|
obj_meta = obj.get(AVALON_PROPERTY)
|
||||||
if obj_meta.get('family') == 'rig':
|
if obj_meta.get('family') == 'rig':
|
||||||
|
# Get animation instance
|
||||||
|
collections = list(obj.users_collection)
|
||||||
|
for c in collections:
|
||||||
|
avalon = c.get(AVALON_PROPERTY)
|
||||||
|
if avalon and avalon.get('family') == 'animation':
|
||||||
|
anim_instances[obj.name] = c.name
|
||||||
|
break
|
||||||
|
|
||||||
|
# Get armature's action
|
||||||
rig = None
|
rig = None
|
||||||
for child in obj.children:
|
for child in obj.children:
|
||||||
if child.type == 'ARMATURE':
|
if child.type == 'ARMATURE':
|
||||||
|
|
@ -384,9 +410,26 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
||||||
# If it is the last object to use that library, remove it
|
# If it is the last object to use that library, remove it
|
||||||
if count == 1:
|
if count == 1:
|
||||||
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
||||||
bpy.data.libraries.remove(library)
|
if library:
|
||||||
|
bpy.data.libraries.remove(library)
|
||||||
|
|
||||||
self._process(str(libpath), asset_group, object_name, actions)
|
asset = container.get("asset_name").split("_")[0]
|
||||||
|
|
||||||
|
self._process(
|
||||||
|
str(libpath), asset_group, object_name, asset,
|
||||||
|
str(representation.get("_id")), actions, anim_instances
|
||||||
|
)
|
||||||
|
|
||||||
|
# Link the new objects to the animation collection
|
||||||
|
for inst in anim_instances.keys():
|
||||||
|
try:
|
||||||
|
obj = bpy.data.objects[inst]
|
||||||
|
bpy.data.collections[anim_instances[inst]].objects.link(obj)
|
||||||
|
except KeyError:
|
||||||
|
self.log.info(f"Object {inst} does not exist anymore.")
|
||||||
|
coll = bpy.data.collections.get(anim_instances[inst])
|
||||||
|
if (coll):
|
||||||
|
bpy.data.collections.remove(coll)
|
||||||
|
|
||||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||||
for child in asset_group.children:
|
for child in asset_group.children:
|
||||||
|
|
|
||||||
|
|
@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
||||||
# Camera creation when loading a layout is not necessary for now,
|
# Camera creation when loading a layout is not necessary for now,
|
||||||
# but the code is worth keeping in case we need it in the future.
|
# but the code is worth keeping in case we need it in the future.
|
||||||
# # Create the camera asset and the camera instance
|
# # Create the camera asset and the camera instance
|
||||||
# creator_plugin = lib.get_creator_by_name("CreateCamera")
|
# creator_plugin = get_legacy_creator_by_name("CreateCamera")
|
||||||
# if not creator_plugin:
|
# if not creator_plugin:
|
||||||
# raise ValueError("Creator plugin \"CreateCamera\" was "
|
# raise ValueError("Creator plugin \"CreateCamera\" was "
|
||||||
# "not found.")
|
# "not found.")
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,12 @@ from typing import Dict, List, Optional
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype import lib
|
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline import (
|
||||||
legacy_create,
|
legacy_create,
|
||||||
get_representation_path,
|
get_representation_path,
|
||||||
AVALON_CONTAINER_ID,
|
AVALON_CONTAINER_ID,
|
||||||
)
|
)
|
||||||
|
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||||
from openpype.hosts.blender.api import (
|
from openpype.hosts.blender.api import (
|
||||||
plugin,
|
plugin,
|
||||||
get_selection,
|
get_selection,
|
||||||
|
|
@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader):
|
||||||
objects = self._process(libpath, asset_group, group_name, action)
|
objects = self._process(libpath, asset_group, group_name, action)
|
||||||
|
|
||||||
if create_animation:
|
if create_animation:
|
||||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
|
||||||
if not creator_plugin:
|
if not creator_plugin:
|
||||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||||
"not found.")
|
"not found.")
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,19 @@
|
||||||
|
import os
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
from openpype.pipeline import legacy_io
|
||||||
|
from openpype.hosts.blender.api import workio
|
||||||
|
|
||||||
|
|
||||||
|
class SaveWorkfiledAction(pyblish.api.Action):
|
||||||
|
"""Save Workfile."""
|
||||||
|
label = "Save Workfile"
|
||||||
|
on = "failed"
|
||||||
|
icon = "save"
|
||||||
|
|
||||||
|
def process(self, context, plugin):
|
||||||
|
bpy.ops.wm.avalon_workfiles()
|
||||||
|
|
||||||
|
|
||||||
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
|
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
|
||||||
|
|
@ -8,12 +21,52 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
order = pyblish.api.CollectorOrder - 0.5
|
order = pyblish.api.CollectorOrder - 0.5
|
||||||
label = "Blender Current File"
|
label = "Blender Current File"
|
||||||
hosts = ['blender']
|
hosts = ["blender"]
|
||||||
|
actions = [SaveWorkfiledAction]
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
"""Inject the current working file"""
|
"""Inject the current working file"""
|
||||||
current_file = bpy.data.filepath
|
current_file = workio.current_file()
|
||||||
context.data['currentFile'] = current_file
|
|
||||||
|
|
||||||
assert current_file != '', "Current file is empty. " \
|
context.data["currentFile"] = current_file
|
||||||
"Save the file before continuing."
|
|
||||||
|
assert current_file, (
|
||||||
|
"Current file is empty. Save the file before continuing."
|
||||||
|
)
|
||||||
|
|
||||||
|
folder, file = os.path.split(current_file)
|
||||||
|
filename, ext = os.path.splitext(file)
|
||||||
|
|
||||||
|
task = legacy_io.Session["AVALON_TASK"]
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
# create instance
|
||||||
|
instance = context.create_instance(name=filename)
|
||||||
|
subset = "workfile" + task.capitalize()
|
||||||
|
|
||||||
|
data.update({
|
||||||
|
"subset": subset,
|
||||||
|
"asset": os.getenv("AVALON_ASSET", None),
|
||||||
|
"label": subset,
|
||||||
|
"publish": True,
|
||||||
|
"family": "workfile",
|
||||||
|
"families": ["workfile"],
|
||||||
|
"setMembers": [current_file],
|
||||||
|
"frameStart": bpy.context.scene.frame_start,
|
||||||
|
"frameEnd": bpy.context.scene.frame_end,
|
||||||
|
})
|
||||||
|
|
||||||
|
data["representations"] = [{
|
||||||
|
"name": ext.lstrip("."),
|
||||||
|
"ext": ext.lstrip("."),
|
||||||
|
"files": file,
|
||||||
|
"stagingDir": folder,
|
||||||
|
}]
|
||||||
|
|
||||||
|
instance.data.update(data)
|
||||||
|
|
||||||
|
self.log.info("Collected instance: {}".format(file))
|
||||||
|
self.log.info("Scene path: {}".format(current_file))
|
||||||
|
self.log.info("staging Dir: {}".format(folder))
|
||||||
|
self.log.info("subset: {}".format(subset))
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,12 @@ import os
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype import api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||||
|
|
||||||
|
|
||||||
class ExtractABC(api.Extractor):
|
class ExtractABC(publish.Extractor):
|
||||||
"""Extract as ABC."""
|
"""Extract as ABC."""
|
||||||
|
|
||||||
label = "Extract ABC"
|
label = "Extract ABC"
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,72 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from openpype.hosts.blender.api import plugin
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractAnimationABC(publish.Extractor):
|
||||||
|
"""Extract as ABC."""
|
||||||
|
|
||||||
|
label = "Extract Animation ABC"
|
||||||
|
hosts = ["blender"]
|
||||||
|
families = ["animation"]
|
||||||
|
optional = True
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
# Define extract output file path
|
||||||
|
stagingdir = self.staging_dir(instance)
|
||||||
|
filename = f"{instance.name}.abc"
|
||||||
|
filepath = os.path.join(stagingdir, filename)
|
||||||
|
|
||||||
|
context = bpy.context
|
||||||
|
|
||||||
|
# Perform extraction
|
||||||
|
self.log.info("Performing extraction..")
|
||||||
|
|
||||||
|
plugin.deselect_all()
|
||||||
|
|
||||||
|
selected = []
|
||||||
|
asset_group = None
|
||||||
|
|
||||||
|
objects = []
|
||||||
|
for obj in instance:
|
||||||
|
if isinstance(obj, bpy.types.Collection):
|
||||||
|
for child in obj.all_objects:
|
||||||
|
objects.append(child)
|
||||||
|
for obj in objects:
|
||||||
|
children = [o for o in bpy.data.objects if o.parent == obj]
|
||||||
|
for child in children:
|
||||||
|
objects.append(child)
|
||||||
|
|
||||||
|
for obj in objects:
|
||||||
|
obj.select_set(True)
|
||||||
|
selected.append(obj)
|
||||||
|
|
||||||
|
context = plugin.create_blender_context(
|
||||||
|
active=asset_group, selected=selected)
|
||||||
|
|
||||||
|
# We export the abc
|
||||||
|
bpy.ops.wm.alembic_export(
|
||||||
|
context,
|
||||||
|
filepath=filepath,
|
||||||
|
selected=True,
|
||||||
|
flatten=False
|
||||||
|
)
|
||||||
|
|
||||||
|
plugin.deselect_all()
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'abc',
|
||||||
|
'ext': 'abc',
|
||||||
|
'files': filename,
|
||||||
|
"stagingDir": stagingdir,
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
|
|
||||||
|
self.log.info("Extracted instance '%s' to: %s",
|
||||||
|
instance.name, representation)
|
||||||
|
|
@ -2,10 +2,10 @@ import os
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
import openpype.api
|
from openpype.pipeline import publish
|
||||||
|
|
||||||
|
|
||||||
class ExtractBlend(openpype.api.Extractor):
|
class ExtractBlend(publish.Extractor):
|
||||||
"""Extract a blend file."""
|
"""Extract a blend file."""
|
||||||
|
|
||||||
label = "Extract Blend"
|
label = "Extract Blend"
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,10 @@ import os
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
import openpype.api
|
from openpype.pipeline import publish
|
||||||
|
|
||||||
|
|
||||||
class ExtractBlendAnimation(openpype.api.Extractor):
|
class ExtractBlendAnimation(publish.Extractor):
|
||||||
"""Extract a blend file."""
|
"""Extract a blend file."""
|
||||||
|
|
||||||
label = "Extract Blend"
|
label = "Extract Blend"
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,11 @@ import os
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype import api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
|
|
||||||
|
|
||||||
class ExtractCamera(api.Extractor):
|
class ExtractCamera(publish.Extractor):
|
||||||
"""Extract as the camera as FBX."""
|
"""Extract as the camera as FBX."""
|
||||||
|
|
||||||
label = "Extract Camera"
|
label = "Extract Camera"
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,12 @@ import os
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from openpype import api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||||
|
|
||||||
|
|
||||||
class ExtractFBX(api.Extractor):
|
class ExtractFBX(publish.Extractor):
|
||||||
"""Extract as FBX."""
|
"""Extract as FBX."""
|
||||||
|
|
||||||
label = "Extract FBX"
|
label = "Extract FBX"
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,12 @@ import bpy
|
||||||
import bpy_extras
|
import bpy_extras
|
||||||
import bpy_extras.anim_utils
|
import bpy_extras.anim_utils
|
||||||
|
|
||||||
from openpype import api
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||||
|
|
||||||
|
|
||||||
class ExtractAnimationFBX(api.Extractor):
|
class ExtractAnimationFBX(publish.Extractor):
|
||||||
"""Extract as animation."""
|
"""Extract as animation."""
|
||||||
|
|
||||||
label = "Extract FBX"
|
label = "Extract FBX"
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,12 @@ import bpy_extras
|
||||||
import bpy_extras.anim_utils
|
import bpy_extras.anim_utils
|
||||||
|
|
||||||
from openpype.client import get_representation_by_name
|
from openpype.client import get_representation_by_name
|
||||||
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.blender.api import plugin
|
from openpype.hosts.blender.api import plugin
|
||||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||||
import openpype.api
|
|
||||||
|
|
||||||
|
|
||||||
class ExtractLayout(openpype.api.Extractor):
|
class ExtractLayout(publish.Extractor):
|
||||||
"""Extract a layout."""
|
"""Extract a layout."""
|
||||||
|
|
||||||
label = "Extract Layout"
|
label = "Extract Layout"
|
||||||
|
|
@ -180,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor):
|
||||||
"rotation": {
|
"rotation": {
|
||||||
"x": asset.rotation_euler.x,
|
"x": asset.rotation_euler.x,
|
||||||
"y": asset.rotation_euler.y,
|
"y": asset.rotation_euler.y,
|
||||||
"z": asset.rotation_euler.z,
|
"z": asset.rotation_euler.z
|
||||||
},
|
},
|
||||||
"scale": {
|
"scale": {
|
||||||
"x": asset.scale.x,
|
"x": asset.scale.x,
|
||||||
|
|
@ -189,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
json_element["transform_matrix"] = []
|
||||||
|
|
||||||
|
for row in list(asset.matrix_world.transposed()):
|
||||||
|
json_element["transform_matrix"].append(list(row))
|
||||||
|
|
||||||
|
json_element["basis"] = [
|
||||||
|
[1, 0, 0, 0],
|
||||||
|
[0, -1, 0, 0],
|
||||||
|
[0, 0, 1, 0],
|
||||||
|
[0, 0, 0, 1]
|
||||||
|
]
|
||||||
|
|
||||||
# Extract the animation as well
|
# Extract the animation as well
|
||||||
if family == "rig":
|
if family == "rig":
|
||||||
f, n = self._export_animation(
|
f, n = self._export_animation(
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import mathutils
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
from openpype.pipeline.publish import ValidateContentsOrder
|
||||||
|
|
||||||
|
|
||||||
class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -14,21 +16,18 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
||||||
in Unreal and Blender.
|
in Unreal and Blender.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = openpype.api.ValidateContentsOrder
|
order = ValidateContentsOrder
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["camera"]
|
families = ["camera"]
|
||||||
category = "geometry"
|
|
||||||
version = (0, 1, 0)
|
version = (0, 1, 0)
|
||||||
label = "Zero Keyframe"
|
label = "Zero Keyframe"
|
||||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||||
|
|
||||||
_identity = mathutils.Matrix()
|
@staticmethod
|
||||||
|
def get_invalid(instance) -> List:
|
||||||
@classmethod
|
|
||||||
def get_invalid(cls, instance) -> List:
|
|
||||||
invalid = []
|
invalid = []
|
||||||
for obj in [obj for obj in instance]:
|
for obj in instance:
|
||||||
if obj.type == "CAMERA":
|
if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA":
|
||||||
if obj.animation_data and obj.animation_data.action:
|
if obj.animation_data and obj.animation_data.action:
|
||||||
action = obj.animation_data.action
|
action = obj.animation_data.action
|
||||||
frames_set = set()
|
frames_set = set()
|
||||||
|
|
@ -45,4 +44,5 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
||||||
invalid = self.get_invalid(instance)
|
invalid = self.get_invalid(instance)
|
||||||
if invalid:
|
if invalid:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Object found in instance is not in Object Mode: {invalid}")
|
f"Camera must have a keyframe at frame 0: {invalid}"
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,15 @@ from typing import List
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.pipeline.publish import ValidateContentsOrder
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
|
||||||
|
|
||||||
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||||
"""Validate that the current mesh has UV's."""
|
"""Validate that the current mesh has UV's."""
|
||||||
|
|
||||||
order = pyblish.api.ValidatorOrder
|
order = ValidateContentsOrder
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["model"]
|
families = ["model"]
|
||||||
category = "geometry"
|
category = "geometry"
|
||||||
|
|
@ -25,7 +27,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||||
for uv_layer in obj.data.uv_layers:
|
for uv_layer in obj.data.uv_layers:
|
||||||
for polygon in obj.data.polygons:
|
for polygon in obj.data.polygons:
|
||||||
for loop_index in polygon.loop_indices:
|
for loop_index in polygon.loop_indices:
|
||||||
if not uv_layer.data[loop_index].uv:
|
if (
|
||||||
|
loop_index >= len(uv_layer.data)
|
||||||
|
or not uv_layer.data[loop_index].uv
|
||||||
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
@ -33,20 +38,20 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_invalid(cls, instance) -> List:
|
def get_invalid(cls, instance) -> List:
|
||||||
invalid = []
|
invalid = []
|
||||||
# TODO (jasper): only check objects in the collection that will be published?
|
for obj in instance:
|
||||||
for obj in [
|
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
|
||||||
obj for obj in instance]:
|
if obj.mode != "OBJECT":
|
||||||
try:
|
cls.log.warning(
|
||||||
if obj.type == 'MESH':
|
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||||
# Make sure we are in object mode.
|
" to be properly checked."
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
)
|
||||||
if not cls.has_uvs(obj):
|
if not cls.has_uvs(obj):
|
||||||
invalid.append(obj)
|
invalid.append(obj)
|
||||||
except:
|
|
||||||
continue
|
|
||||||
return invalid
|
return invalid
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
invalid = self.get_invalid(instance)
|
invalid = self.get_invalid(instance)
|
||||||
if invalid:
|
if invalid:
|
||||||
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")
|
raise RuntimeError(
|
||||||
|
f"Meshes found in instance without valid UV's: {invalid}"
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -3,28 +3,28 @@ from typing import List
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.pipeline.publish import ValidateContentsOrder
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
|
||||||
|
|
||||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||||
"""Ensure that meshes don't have a negative scale."""
|
"""Ensure that meshes don't have a negative scale."""
|
||||||
|
|
||||||
order = pyblish.api.ValidatorOrder
|
order = ValidateContentsOrder
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["model"]
|
families = ["model"]
|
||||||
|
category = "geometry"
|
||||||
label = "Mesh No Negative Scale"
|
label = "Mesh No Negative Scale"
|
||||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_invalid(instance) -> List:
|
def get_invalid(instance) -> List:
|
||||||
invalid = []
|
invalid = []
|
||||||
# TODO (jasper): only check objects in the collection that will be published?
|
for obj in instance:
|
||||||
for obj in [
|
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
|
||||||
obj for obj in bpy.data.objects if obj.type == 'MESH'
|
if any(v < 0 for v in obj.scale):
|
||||||
]:
|
invalid.append(obj)
|
||||||
if any(v < 0 for v in obj.scale):
|
|
||||||
invalid.append(obj)
|
|
||||||
|
|
||||||
return invalid
|
return invalid
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,11 @@
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
from openpype.pipeline.publish import ValidateContentsOrder
|
||||||
|
|
||||||
|
|
||||||
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -12,20 +16,20 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = openpype.api.ValidateContentsOrder
|
order = ValidateContentsOrder
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["model", "rig"]
|
families = ["model", "rig"]
|
||||||
version = (0, 1, 0)
|
version = (0, 1, 0)
|
||||||
label = "No Colons in names"
|
label = "No Colons in names"
|
||||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||||
|
|
||||||
@classmethod
|
@staticmethod
|
||||||
def get_invalid(cls, instance) -> List:
|
def get_invalid(instance) -> List:
|
||||||
invalid = []
|
invalid = []
|
||||||
for obj in [obj for obj in instance]:
|
for obj in instance:
|
||||||
if ':' in obj.name:
|
if ':' in obj.name:
|
||||||
invalid.append(obj)
|
invalid.append(obj)
|
||||||
if obj.type == 'ARMATURE':
|
if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE':
|
||||||
for bone in obj.data.bones:
|
for bone in obj.data.bones:
|
||||||
if ':' in bone.name:
|
if ':' in bone.name:
|
||||||
invalid.append(obj)
|
invalid.append(obj)
|
||||||
|
|
@ -36,4 +40,5 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
||||||
invalid = self.get_invalid(instance)
|
invalid = self.get_invalid(instance)
|
||||||
if invalid:
|
if invalid:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Objects found with colon in name: {invalid}")
|
f"Objects found with colon in name: {invalid}"
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
|
||||||
|
|
@ -10,26 +12,21 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.ValidatorOrder - 0.01
|
order = pyblish.api.ValidatorOrder - 0.01
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["model", "rig", "layout"]
|
families = ["model", "rig", "layout"]
|
||||||
category = "geometry"
|
|
||||||
label = "Validate Object Mode"
|
label = "Validate Object Mode"
|
||||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||||
optional = False
|
optional = False
|
||||||
|
|
||||||
@classmethod
|
@staticmethod
|
||||||
def get_invalid(cls, instance) -> List:
|
def get_invalid(instance) -> List:
|
||||||
invalid = []
|
invalid = []
|
||||||
for obj in [obj for obj in instance]:
|
for obj in instance:
|
||||||
try:
|
if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT":
|
||||||
if obj.type == 'MESH' or obj.type == 'ARMATURE':
|
invalid.append(obj)
|
||||||
# Check if the object is in object mode.
|
|
||||||
if not obj.mode == 'OBJECT':
|
|
||||||
invalid.append(obj)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
return invalid
|
return invalid
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
invalid = self.get_invalid(instance)
|
invalid = self.get_invalid(instance)
|
||||||
if invalid:
|
if invalid:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Object found in instance is not in Object Mode: {invalid}")
|
f"Object found in instance is not in Object Mode: {invalid}"
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,12 @@
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import mathutils
|
import mathutils
|
||||||
|
import bpy
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
import openpype.hosts.blender.api.action
|
import openpype.hosts.blender.api.action
|
||||||
|
from openpype.pipeline.publish import ValidateContentsOrder
|
||||||
|
|
||||||
|
|
||||||
class ValidateTransformZero(pyblish.api.InstancePlugin):
|
class ValidateTransformZero(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -15,10 +18,9 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = openpype.api.ValidateContentsOrder
|
order = ValidateContentsOrder
|
||||||
hosts = ["blender"]
|
hosts = ["blender"]
|
||||||
families = ["model"]
|
families = ["model"]
|
||||||
category = "geometry"
|
|
||||||
version = (0, 1, 0)
|
version = (0, 1, 0)
|
||||||
label = "Transform Zero"
|
label = "Transform Zero"
|
||||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||||
|
|
@ -28,8 +30,11 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_invalid(cls, instance) -> List:
|
def get_invalid(cls, instance) -> List:
|
||||||
invalid = []
|
invalid = []
|
||||||
for obj in [obj for obj in instance]:
|
for obj in instance:
|
||||||
if obj.matrix_basis != cls._identity:
|
if (
|
||||||
|
isinstance(obj, bpy.types.Object)
|
||||||
|
and obj.matrix_basis != cls._identity
|
||||||
|
):
|
||||||
invalid.append(obj)
|
invalid.append(obj)
|
||||||
return invalid
|
return invalid
|
||||||
|
|
||||||
|
|
@ -37,4 +42,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
||||||
invalid = self.get_invalid(instance)
|
invalid = self.get_invalid(instance)
|
||||||
if invalid:
|
if invalid:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Object found in instance is not in Object Mode: {invalid}")
|
"Object found in instance has not"
|
||||||
|
f" transform to zero: {invalid}"
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
from .addon import (
|
||||||
|
CELACTION_ROOT_DIR,
|
||||||
|
CelactionAddon,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"CELACTION_ROOT_DIR",
|
||||||
|
"CelactionAddon",
|
||||||
|
)
|
||||||
31
openpype/hosts/celaction/addon.py
Normal file
31
openpype/hosts/celaction/addon.py
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
import os
|
||||||
|
from openpype.modules import OpenPypeModule, IHostAddon
|
||||||
|
|
||||||
|
CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class CelactionAddon(OpenPypeModule, IHostAddon):
|
||||||
|
name = "celaction"
|
||||||
|
host_name = "celaction"
|
||||||
|
|
||||||
|
def initialize(self, module_settings):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def get_launch_hook_paths(self, app):
|
||||||
|
if app.host_name != self.host_name:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
os.path.join(CELACTION_ROOT_DIR, "hooks")
|
||||||
|
]
|
||||||
|
|
||||||
|
def add_implementation_envs(self, env, _app):
|
||||||
|
# Set default values if are not already set via settings
|
||||||
|
defaults = {
|
||||||
|
"LOGLEVEL": "DEBUG"
|
||||||
|
}
|
||||||
|
for key, value in defaults.items():
|
||||||
|
if not env.get(key):
|
||||||
|
env[key] = value
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".scn"]
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
kwargs = None
|
|
||||||
|
|
@ -1,88 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import copy
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
import pyblish.api
|
|
||||||
import pyblish.util
|
|
||||||
|
|
||||||
from openpype.api import Logger
|
|
||||||
import openpype
|
|
||||||
import openpype.hosts.celaction
|
|
||||||
from openpype.hosts.celaction import api as celaction
|
|
||||||
from openpype.tools.utils import host_tools
|
|
||||||
from openpype.pipeline import install_openpype_plugins
|
|
||||||
|
|
||||||
|
|
||||||
log = Logger().get_logger("Celaction_cli_publisher")
|
|
||||||
|
|
||||||
publish_host = "celaction"
|
|
||||||
|
|
||||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
|
|
||||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
|
||||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|
||||||
|
|
||||||
|
|
||||||
def cli():
|
|
||||||
parser = argparse.ArgumentParser(prog="celaction_publish")
|
|
||||||
|
|
||||||
parser.add_argument("--currentFile",
|
|
||||||
help="Pass file to Context as `currentFile`")
|
|
||||||
|
|
||||||
parser.add_argument("--chunk",
|
|
||||||
help=("Render chanks on farm"))
|
|
||||||
|
|
||||||
parser.add_argument("--frameStart",
|
|
||||||
help=("Start of frame range"))
|
|
||||||
|
|
||||||
parser.add_argument("--frameEnd",
|
|
||||||
help=("End of frame range"))
|
|
||||||
|
|
||||||
parser.add_argument("--resolutionWidth",
|
|
||||||
help=("Width of resolution"))
|
|
||||||
|
|
||||||
parser.add_argument("--resolutionHeight",
|
|
||||||
help=("Height of resolution"))
|
|
||||||
|
|
||||||
celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_publish_environments():
|
|
||||||
"""Prepares environments based on request data."""
|
|
||||||
env = copy.deepcopy(os.environ)
|
|
||||||
|
|
||||||
project_name = os.getenv("AVALON_PROJECT")
|
|
||||||
asset_name = os.getenv("AVALON_ASSET")
|
|
||||||
|
|
||||||
env["AVALON_PROJECT"] = project_name
|
|
||||||
env["AVALON_ASSET"] = asset_name
|
|
||||||
env["AVALON_TASK"] = os.getenv("AVALON_TASK")
|
|
||||||
env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR")
|
|
||||||
env["AVALON_APP"] = f"hosts.{publish_host}"
|
|
||||||
env["AVALON_APP_NAME"] = "celaction/local"
|
|
||||||
|
|
||||||
env["PYBLISH_HOSTS"] = publish_host
|
|
||||||
|
|
||||||
os.environ.update(env)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# prepare all environments
|
|
||||||
_prepare_publish_environments()
|
|
||||||
|
|
||||||
# Registers pype's Global pyblish plugins
|
|
||||||
install_openpype_plugins()
|
|
||||||
|
|
||||||
if os.path.exists(PUBLISH_PATH):
|
|
||||||
log.info(f"Registering path: {PUBLISH_PATH}")
|
|
||||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
|
||||||
|
|
||||||
pyblish.api.register_host(publish_host)
|
|
||||||
|
|
||||||
return host_tools.show_publish()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
cli()
|
|
||||||
result = main()
|
|
||||||
sys.exit(not bool(result))
|
|
||||||
|
|
@ -1,122 +0,0 @@
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import winreg
|
|
||||||
from openpype.lib import PreLaunchHook
|
|
||||||
from openpype.hosts.celaction import api as celaction
|
|
||||||
|
|
||||||
|
|
||||||
class CelactionPrelaunchHook(PreLaunchHook):
|
|
||||||
"""
|
|
||||||
Bootstrap celacion with pype
|
|
||||||
"""
|
|
||||||
workfile_ext = "scn"
|
|
||||||
app_groups = ["celaction"]
|
|
||||||
platforms = ["windows"]
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
# Add workfile path to launch arguments
|
|
||||||
workfile_path = self.workfile_path()
|
|
||||||
if workfile_path:
|
|
||||||
self.launch_context.launch_args.append(workfile_path)
|
|
||||||
|
|
||||||
project_name = self.data["project_name"]
|
|
||||||
asset_name = self.data["asset_name"]
|
|
||||||
task_name = self.data["task_name"]
|
|
||||||
|
|
||||||
# get publish version of celaction
|
|
||||||
app = "celaction_publish"
|
|
||||||
|
|
||||||
# setting output parameters
|
|
||||||
path = r"Software\CelAction\CelAction2D\User Settings"
|
|
||||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
|
||||||
hKey = winreg.OpenKey(
|
|
||||||
winreg.HKEY_CURRENT_USER,
|
|
||||||
"Software\\CelAction\\CelAction2D\\User Settings", 0,
|
|
||||||
winreg.KEY_ALL_ACCESS)
|
|
||||||
|
|
||||||
# TODO: this will need to be checked more thoroughly
|
|
||||||
pype_exe = os.getenv("OPENPYPE_EXECUTABLE")
|
|
||||||
|
|
||||||
winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe)
|
|
||||||
|
|
||||||
parameters = [
|
|
||||||
"launch",
|
|
||||||
f"--app {app}",
|
|
||||||
f"--project {project_name}",
|
|
||||||
f"--asset {asset_name}",
|
|
||||||
f"--task {task_name}",
|
|
||||||
"--currentFile \\\"\"*SCENE*\"\\\"",
|
|
||||||
"--chunk 10",
|
|
||||||
"--frameStart *START*",
|
|
||||||
"--frameEnd *END*",
|
|
||||||
"--resolutionWidth *X*",
|
|
||||||
"--resolutionHeight *Y*",
|
|
||||||
# "--programDir \"'*PROGPATH*'\""
|
|
||||||
]
|
|
||||||
winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
|
|
||||||
" ".join(parameters))
|
|
||||||
|
|
||||||
# setting resolution parameters
|
|
||||||
path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
|
|
||||||
path += r"\SubmitOutput"
|
|
||||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
|
||||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
|
||||||
winreg.KEY_ALL_ACCESS)
|
|
||||||
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
|
|
||||||
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
|
|
||||||
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
|
|
||||||
|
|
||||||
# making sure message dialogs don't appear when overwriting
|
|
||||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
|
||||||
path += r"\OverwriteScene"
|
|
||||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
|
||||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
|
||||||
winreg.KEY_ALL_ACCESS)
|
|
||||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
|
|
||||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
|
||||||
|
|
||||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
|
||||||
path += r"\SceneSaved"
|
|
||||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
|
||||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
|
||||||
winreg.KEY_ALL_ACCESS)
|
|
||||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
|
|
||||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
|
||||||
|
|
||||||
def workfile_path(self):
|
|
||||||
workfile_path = self.data["last_workfile_path"]
|
|
||||||
|
|
||||||
# copy workfile from template if doesnt exist any on path
|
|
||||||
if not os.path.exists(workfile_path):
|
|
||||||
# TODO add ability to set different template workfile path via
|
|
||||||
# settings
|
|
||||||
pype_celaction_dir = os.path.dirname(os.path.dirname(
|
|
||||||
os.path.abspath(celaction.__file__)
|
|
||||||
))
|
|
||||||
template_path = os.path.join(
|
|
||||||
pype_celaction_dir,
|
|
||||||
"resources",
|
|
||||||
"celaction_template_scene.scn"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(template_path):
|
|
||||||
self.log.warning(
|
|
||||||
"Couldn't find workfile template file in {}".format(
|
|
||||||
template_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.log.info(
|
|
||||||
f"Creating workfile from template: \"{template_path}\""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Copy template workfile to new destinantion
|
|
||||||
shutil.copy2(
|
|
||||||
os.path.normpath(template_path),
|
|
||||||
os.path.normpath(workfile_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.log.info(f"Workfile to open: \"{workfile_path}\"")
|
|
||||||
|
|
||||||
return workfile_path
|
|
||||||
137
openpype/hosts/celaction/hooks/pre_celaction_setup.py
Normal file
137
openpype/hosts/celaction/hooks/pre_celaction_setup.py
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import winreg
|
||||||
|
import subprocess
|
||||||
|
from openpype.lib import PreLaunchHook, get_openpype_execute_args
|
||||||
|
from openpype.hosts.celaction import scripts
|
||||||
|
|
||||||
|
CELACTION_SCRIPTS_DIR = os.path.dirname(
|
||||||
|
os.path.abspath(scripts.__file__)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CelactionPrelaunchHook(PreLaunchHook):
|
||||||
|
"""
|
||||||
|
Bootstrap celacion with pype
|
||||||
|
"""
|
||||||
|
app_groups = ["celaction"]
|
||||||
|
platforms = ["windows"]
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
asset_doc = self.data["asset_doc"]
|
||||||
|
width = asset_doc["data"]["resolutionWidth"]
|
||||||
|
height = asset_doc["data"]["resolutionHeight"]
|
||||||
|
|
||||||
|
# Add workfile path to launch arguments
|
||||||
|
workfile_path = self.workfile_path()
|
||||||
|
if workfile_path:
|
||||||
|
self.launch_context.launch_args.append(workfile_path)
|
||||||
|
|
||||||
|
# setting output parameters
|
||||||
|
path_user_settings = "\\".join([
|
||||||
|
"Software", "CelAction", "CelAction2D", "User Settings"
|
||||||
|
])
|
||||||
|
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings)
|
||||||
|
hKey = winreg.OpenKey(
|
||||||
|
winreg.HKEY_CURRENT_USER, path_user_settings, 0,
|
||||||
|
winreg.KEY_ALL_ACCESS
|
||||||
|
)
|
||||||
|
|
||||||
|
path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
|
||||||
|
subproces_args = get_openpype_execute_args("run", path_to_cli)
|
||||||
|
openpype_executable = subproces_args.pop(0)
|
||||||
|
|
||||||
|
winreg.SetValueEx(
|
||||||
|
hKey,
|
||||||
|
"SubmitAppTitle",
|
||||||
|
0,
|
||||||
|
winreg.REG_SZ,
|
||||||
|
openpype_executable
|
||||||
|
)
|
||||||
|
|
||||||
|
parameters = subproces_args + [
|
||||||
|
"--currentFile", "*SCENE*",
|
||||||
|
"--chunk", "*CHUNK*",
|
||||||
|
"--frameStart", "*START*",
|
||||||
|
"--frameEnd", "*END*",
|
||||||
|
"--resolutionWidth", "*X*",
|
||||||
|
"--resolutionHeight", "*Y*"
|
||||||
|
]
|
||||||
|
|
||||||
|
winreg.SetValueEx(
|
||||||
|
hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
|
||||||
|
subprocess.list2cmdline(parameters)
|
||||||
|
)
|
||||||
|
|
||||||
|
# setting resolution parameters
|
||||||
|
path_submit = "\\".join([
|
||||||
|
path_user_settings, "Dialogs", "SubmitOutput"
|
||||||
|
])
|
||||||
|
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit)
|
||||||
|
hKey = winreg.OpenKey(
|
||||||
|
winreg.HKEY_CURRENT_USER, path_submit, 0,
|
||||||
|
winreg.KEY_ALL_ACCESS
|
||||||
|
)
|
||||||
|
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
|
||||||
|
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width)
|
||||||
|
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height)
|
||||||
|
|
||||||
|
# making sure message dialogs don't appear when overwriting
|
||||||
|
path_overwrite_scene = "\\".join([
|
||||||
|
path_user_settings, "Messages", "OverwriteScene"
|
||||||
|
])
|
||||||
|
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene)
|
||||||
|
hKey = winreg.OpenKey(
|
||||||
|
winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0,
|
||||||
|
winreg.KEY_ALL_ACCESS
|
||||||
|
)
|
||||||
|
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
|
||||||
|
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||||
|
|
||||||
|
# set scane as not saved
|
||||||
|
path_scene_saved = "\\".join([
|
||||||
|
path_user_settings, "Messages", "SceneSaved"
|
||||||
|
])
|
||||||
|
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved)
|
||||||
|
hKey = winreg.OpenKey(
|
||||||
|
winreg.HKEY_CURRENT_USER, path_scene_saved, 0,
|
||||||
|
winreg.KEY_ALL_ACCESS
|
||||||
|
)
|
||||||
|
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
|
||||||
|
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||||
|
|
||||||
|
def workfile_path(self):
|
||||||
|
workfile_path = self.data["last_workfile_path"]
|
||||||
|
|
||||||
|
# copy workfile from template if doesnt exist any on path
|
||||||
|
if not os.path.exists(workfile_path):
|
||||||
|
# TODO add ability to set different template workfile path via
|
||||||
|
# settings
|
||||||
|
openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
|
||||||
|
template_path = os.path.join(
|
||||||
|
openpype_celaction_dir,
|
||||||
|
"resources",
|
||||||
|
"celaction_template_scene.scn"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(template_path):
|
||||||
|
self.log.warning(
|
||||||
|
"Couldn't find workfile template file in {}".format(
|
||||||
|
template_path
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info(
|
||||||
|
f"Creating workfile from template: \"{template_path}\""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Copy template workfile to new destinantion
|
||||||
|
shutil.copy2(
|
||||||
|
os.path.normpath(template_path),
|
||||||
|
os.path.normpath(workfile_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info(f"Workfile to open: \"{workfile_path}\"")
|
||||||
|
|
||||||
|
return workfile_path
|
||||||
|
|
@ -1,113 +0,0 @@
|
||||||
import os
|
|
||||||
import collections
|
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
import pyblish.api
|
|
||||||
|
|
||||||
from openpype.client import (
|
|
||||||
get_subsets,
|
|
||||||
get_last_versions,
|
|
||||||
get_representations
|
|
||||||
)
|
|
||||||
from openpype.pipeline import legacy_io
|
|
||||||
|
|
||||||
|
|
||||||
class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
|
||||||
|
|
||||||
label = "Colect Audio for publishing"
|
|
||||||
order = pyblish.api.CollectorOrder + 0.1
|
|
||||||
|
|
||||||
def process(self, context):
|
|
||||||
self.log.info('Collecting Audio Data')
|
|
||||||
asset_doc = context.data["assetEntity"]
|
|
||||||
|
|
||||||
# get all available representations
|
|
||||||
subsets = self.get_subsets(
|
|
||||||
asset_doc,
|
|
||||||
representations=["audio", "wav"]
|
|
||||||
)
|
|
||||||
self.log.info(f"subsets is: {pformat(subsets)}")
|
|
||||||
|
|
||||||
if not subsets.get("audioMain"):
|
|
||||||
raise AttributeError("`audioMain` subset does not exist")
|
|
||||||
|
|
||||||
reprs = subsets.get("audioMain", {}).get("representations", [])
|
|
||||||
self.log.info(f"reprs is: {pformat(reprs)}")
|
|
||||||
|
|
||||||
repr = next((r for r in reprs), None)
|
|
||||||
if not repr:
|
|
||||||
raise "Missing `audioMain` representation"
|
|
||||||
self.log.info(f"representation is: {repr}")
|
|
||||||
|
|
||||||
audio_file = repr.get('data', {}).get('path', "")
|
|
||||||
|
|
||||||
if os.path.exists(audio_file):
|
|
||||||
context.data["audioFile"] = audio_file
|
|
||||||
self.log.info(
|
|
||||||
'audio_file: {}, has been added to context'.format(audio_file))
|
|
||||||
else:
|
|
||||||
self.log.warning("Couldn't find any audio file on Ftrack.")
|
|
||||||
|
|
||||||
def get_subsets(self, asset_doc, representations):
|
|
||||||
"""
|
|
||||||
Query subsets with filter on name.
|
|
||||||
|
|
||||||
The method will return all found subsets and its defined version
|
|
||||||
and subsets. Version could be specified with number. Representation
|
|
||||||
can be filtered.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
asset_doct (dict): Asset (shot) mongo document
|
|
||||||
representations (list): list for all representations
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: subsets with version and representations in keys
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Query all subsets for asset
|
|
||||||
project_name = legacy_io.active_project()
|
|
||||||
subset_docs = get_subsets(
|
|
||||||
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
|
|
||||||
)
|
|
||||||
# Collect all subset ids
|
|
||||||
subset_ids = [
|
|
||||||
subset_doc["_id"]
|
|
||||||
for subset_doc in subset_docs
|
|
||||||
]
|
|
||||||
|
|
||||||
# Check if we found anything
|
|
||||||
assert subset_ids, (
|
|
||||||
"No subsets found. Check correct filter. "
|
|
||||||
"Try this for start `r'.*'`: asset: `{}`"
|
|
||||||
).format(asset_doc["name"])
|
|
||||||
|
|
||||||
last_versions_by_subset_id = get_last_versions(
|
|
||||||
project_name, subset_ids, fields=["_id", "parent"]
|
|
||||||
)
|
|
||||||
|
|
||||||
version_docs_by_id = {}
|
|
||||||
for version_doc in last_versions_by_subset_id.values():
|
|
||||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
|
||||||
|
|
||||||
repre_docs = get_representations(
|
|
||||||
project_name,
|
|
||||||
version_ids=version_docs_by_id.keys(),
|
|
||||||
representation_names=representations
|
|
||||||
)
|
|
||||||
repre_docs_by_version_id = collections.defaultdict(list)
|
|
||||||
for repre_doc in repre_docs:
|
|
||||||
version_id = repre_doc["parent"]
|
|
||||||
repre_docs_by_version_id[version_id].append(repre_doc)
|
|
||||||
|
|
||||||
output_dict = {}
|
|
||||||
for version_id, repre_docs in repre_docs_by_version_id.items():
|
|
||||||
version_doc = version_docs_by_id[version_id]
|
|
||||||
subset_id = version_doc["parent"]
|
|
||||||
subset_doc = last_versions_by_subset_id[subset_id]
|
|
||||||
# Store queried docs by subset name
|
|
||||||
output_dict[subset_doc["name"]] = {
|
|
||||||
"representations": repre_docs,
|
|
||||||
"version": version_doc
|
|
||||||
}
|
|
||||||
|
|
||||||
return output_dict
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from openpype.hosts.celaction import api as celaction
|
import argparse
|
||||||
|
import sys
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
|
||||||
class CollectCelactionCliKwargs(pyblish.api.Collector):
|
class CollectCelactionCliKwargs(pyblish.api.Collector):
|
||||||
|
|
@ -9,15 +11,31 @@ class CollectCelactionCliKwargs(pyblish.api.Collector):
|
||||||
order = pyblish.api.Collector.order - 0.1
|
order = pyblish.api.Collector.order - 0.1
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
kwargs = celaction.kwargs.copy()
|
parser = argparse.ArgumentParser(prog="celaction")
|
||||||
|
parser.add_argument("--currentFile",
|
||||||
|
help="Pass file to Context as `currentFile`")
|
||||||
|
parser.add_argument("--chunk",
|
||||||
|
help=("Render chanks on farm"))
|
||||||
|
parser.add_argument("--frameStart",
|
||||||
|
help=("Start of frame range"))
|
||||||
|
parser.add_argument("--frameEnd",
|
||||||
|
help=("End of frame range"))
|
||||||
|
parser.add_argument("--resolutionWidth",
|
||||||
|
help=("Width of resolution"))
|
||||||
|
parser.add_argument("--resolutionHeight",
|
||||||
|
help=("Height of resolution"))
|
||||||
|
passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__
|
||||||
|
|
||||||
self.log.info("Storing kwargs: %s" % kwargs)
|
self.log.info("Storing kwargs ...")
|
||||||
context.set_data("kwargs", kwargs)
|
self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs)))
|
||||||
|
|
||||||
|
# set kwargs to context data
|
||||||
|
context.set_data("passingKwargs", passing_kwargs)
|
||||||
|
|
||||||
# get kwargs onto context data as keys with values
|
# get kwargs onto context data as keys with values
|
||||||
for k, v in kwargs.items():
|
for k, v in passing_kwargs.items():
|
||||||
self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
|
self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
|
||||||
if k in ["frameStart", "frameEnd"]:
|
if k in ["frameStart", "frameEnd"]:
|
||||||
context.data[k] = kwargs[k] = int(v)
|
context.data[k] = passing_kwargs[k] = int(v)
|
||||||
else:
|
else:
|
||||||
context.data[k] = v
|
context.data[k] = v
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||||
"version": version
|
"version": version
|
||||||
}
|
}
|
||||||
|
|
||||||
celaction_kwargs = context.data.get("kwargs", {})
|
celaction_kwargs = context.data.get(
|
||||||
|
"passingKwargs", {})
|
||||||
|
|
||||||
if celaction_kwargs:
|
if celaction_kwargs:
|
||||||
shared_instance_data.update(celaction_kwargs)
|
shared_instance_data.update(celaction_kwargs)
|
||||||
|
|
@ -52,8 +53,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||||
"subset": subset,
|
"subset": subset,
|
||||||
"label": scene_file,
|
"label": scene_file,
|
||||||
"family": family,
|
"family": family,
|
||||||
"families": [family, "ftrack"],
|
"families": [],
|
||||||
"representations": list()
|
"representations": []
|
||||||
})
|
})
|
||||||
|
|
||||||
# adding basic script data
|
# adding basic script data
|
||||||
|
|
@ -72,7 +73,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||||
self.log.info('Publishing Celaction workfile')
|
self.log.info('Publishing Celaction workfile')
|
||||||
|
|
||||||
# render instance
|
# render instance
|
||||||
family = "render.farm"
|
|
||||||
subset = f"render{task}Main"
|
subset = f"render{task}Main"
|
||||||
instance = context.create_instance(name=subset)
|
instance = context.create_instance(name=subset)
|
||||||
# getting instance state
|
# getting instance state
|
||||||
|
|
@ -81,8 +81,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||||
# add assetEntity data into instance
|
# add assetEntity data into instance
|
||||||
instance.data.update({
|
instance.data.update({
|
||||||
"label": "{} - farm".format(subset),
|
"label": "{} - farm".format(subset),
|
||||||
"family": family,
|
"family": "render.farm",
|
||||||
"families": [family],
|
"families": [],
|
||||||
"subset": subset
|
"subset": subset
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,28 +11,31 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
||||||
families = ["render.farm"]
|
families = ["render.farm"]
|
||||||
|
|
||||||
# Presets
|
# Presets
|
||||||
anatomy_render_key = None
|
output_extension = "png"
|
||||||
publish_render_metadata = None
|
anatomy_template_key_render_files = None
|
||||||
|
anatomy_template_key_metadata = None
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
anatomy = instance.context.data["anatomy"]
|
anatomy = instance.context.data["anatomy"]
|
||||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||||
anatomy_data["family"] = "render"
|
|
||||||
padding = anatomy.templates.get("frame_padding", 4)
|
padding = anatomy.templates.get("frame_padding", 4)
|
||||||
anatomy_data.update({
|
anatomy_data.update({
|
||||||
"frame": f"%0{padding}d",
|
"frame": f"%0{padding}d",
|
||||||
"representation": "png"
|
"family": "render",
|
||||||
|
"representation": self.output_extension,
|
||||||
|
"ext": self.output_extension
|
||||||
})
|
})
|
||||||
|
|
||||||
anatomy_filled = anatomy.format(anatomy_data)
|
anatomy_filled = anatomy.format(anatomy_data)
|
||||||
|
|
||||||
# get anatomy rendering keys
|
# get anatomy rendering keys
|
||||||
anatomy_render_key = self.anatomy_render_key or "render"
|
r_anatomy_key = self.anatomy_template_key_render_files
|
||||||
publish_render_metadata = self.publish_render_metadata or "render"
|
m_anatomy_key = self.anatomy_template_key_metadata
|
||||||
|
|
||||||
# get folder and path for rendering images from celaction
|
# get folder and path for rendering images from celaction
|
||||||
render_dir = anatomy_filled[anatomy_render_key]["folder"]
|
render_dir = anatomy_filled[r_anatomy_key]["folder"]
|
||||||
render_path = anatomy_filled[anatomy_render_key]["path"]
|
render_path = anatomy_filled[r_anatomy_key]["path"]
|
||||||
|
self.log.debug("__ render_path: `{}`".format(render_path))
|
||||||
|
|
||||||
# create dir if it doesnt exists
|
# create dir if it doesnt exists
|
||||||
try:
|
try:
|
||||||
|
|
@ -46,9 +49,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
||||||
instance.data["path"] = render_path
|
instance.data["path"] = render_path
|
||||||
|
|
||||||
# get anatomy for published renders folder path
|
# get anatomy for published renders folder path
|
||||||
if anatomy_filled.get(publish_render_metadata):
|
if anatomy_filled.get(m_anatomy_key):
|
||||||
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
|
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
|
||||||
publish_render_metadata]["folder"]
|
m_anatomy_key]["folder"]
|
||||||
self.log.info("Metadata render path: `{}`".format(
|
self.log.info("Metadata render path: `{}`".format(
|
||||||
instance.data["publishRenderMetadataFolder"]
|
instance.data["publishRenderMetadataFolder"]
|
||||||
))
|
))
|
||||||
|
|
|
||||||
0
openpype/hosts/celaction/scripts/__init__.py
Normal file
0
openpype/hosts/celaction/scripts/__init__.py
Normal file
37
openpype/hosts/celaction/scripts/publish_cli.py
Normal file
37
openpype/hosts/celaction/scripts/publish_cli.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pyblish.api
|
||||||
|
import pyblish.util
|
||||||
|
|
||||||
|
import openpype.hosts.celaction
|
||||||
|
from openpype.lib import Logger
|
||||||
|
from openpype.tools.utils import host_tools
|
||||||
|
from openpype.pipeline import install_openpype_plugins
|
||||||
|
|
||||||
|
|
||||||
|
log = Logger.get_logger("celaction")
|
||||||
|
|
||||||
|
PUBLISH_HOST = "celaction"
|
||||||
|
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
|
||||||
|
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||||
|
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Registers pype's Global pyblish plugins
|
||||||
|
install_openpype_plugins()
|
||||||
|
|
||||||
|
if os.path.exists(PUBLISH_PATH):
|
||||||
|
log.info(f"Registering path: {PUBLISH_PATH}")
|
||||||
|
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||||
|
|
||||||
|
pyblish.api.register_host(PUBLISH_HOST)
|
||||||
|
pyblish.api.register_target("local")
|
||||||
|
|
||||||
|
return host_tools.show_publish()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
result = main()
|
||||||
|
sys.exit(not bool(result))
|
||||||
|
|
@ -1,22 +1,10 @@
|
||||||
import os
|
from .addon import (
|
||||||
|
HOST_DIR,
|
||||||
HOST_DIR = os.path.dirname(
|
FlameAddon,
|
||||||
os.path.abspath(__file__)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def add_implementation_envs(env, _app):
|
__all__ = (
|
||||||
# Add requirements to DL_PYTHON_HOOK_PATH
|
"HOST_DIR",
|
||||||
pype_root = os.environ["OPENPYPE_REPOS_ROOT"]
|
"FlameAddon",
|
||||||
|
)
|
||||||
env["DL_PYTHON_HOOK_PATH"] = os.path.join(
|
|
||||||
pype_root, "openpype", "hosts", "flame", "startup")
|
|
||||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
|
||||||
|
|
||||||
# Set default values if are not already set via settings
|
|
||||||
defaults = {
|
|
||||||
"LOGLEVEL": "DEBUG"
|
|
||||||
}
|
|
||||||
for key, value in defaults.items():
|
|
||||||
if not env.get(key):
|
|
||||||
env[key] = value
|
|
||||||
|
|
|
||||||
35
openpype/hosts/flame/addon.py
Normal file
35
openpype/hosts/flame/addon.py
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
import os
|
||||||
|
from openpype.modules import OpenPypeModule, IHostAddon
|
||||||
|
|
||||||
|
HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class FlameAddon(OpenPypeModule, IHostAddon):
|
||||||
|
name = "flame"
|
||||||
|
host_name = "flame"
|
||||||
|
|
||||||
|
def initialize(self, module_settings):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def add_implementation_envs(self, env, _app):
|
||||||
|
# Add requirements to DL_PYTHON_HOOK_PATH
|
||||||
|
env["DL_PYTHON_HOOK_PATH"] = os.path.join(HOST_DIR, "startup")
|
||||||
|
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||||
|
|
||||||
|
# Set default values if are not already set via settings
|
||||||
|
defaults = {
|
||||||
|
"LOGLEVEL": "DEBUG"
|
||||||
|
}
|
||||||
|
for key, value in defaults.items():
|
||||||
|
if not env.get(key):
|
||||||
|
env[key] = value
|
||||||
|
|
||||||
|
def get_launch_hook_paths(self, app):
|
||||||
|
if app.host_name != self.host_name:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
os.path.join(HOST_DIR, "hooks")
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".otoc"]
|
||||||
|
|
@ -30,7 +30,8 @@ from .lib import (
|
||||||
maintained_temp_file_path,
|
maintained_temp_file_path,
|
||||||
get_clip_segment,
|
get_clip_segment,
|
||||||
get_batch_group_from_desktop,
|
get_batch_group_from_desktop,
|
||||||
MediaInfoFile
|
MediaInfoFile,
|
||||||
|
TimeEffectMetadata
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
setup,
|
setup,
|
||||||
|
|
@ -50,7 +51,8 @@ from .pipeline import (
|
||||||
)
|
)
|
||||||
from .menu import (
|
from .menu import (
|
||||||
FlameMenuProjectConnect,
|
FlameMenuProjectConnect,
|
||||||
FlameMenuTimeline
|
FlameMenuTimeline,
|
||||||
|
FlameMenuUniversal
|
||||||
)
|
)
|
||||||
from .plugin import (
|
from .plugin import (
|
||||||
Creator,
|
Creator,
|
||||||
|
|
@ -107,6 +109,7 @@ __all__ = [
|
||||||
"get_clip_segment",
|
"get_clip_segment",
|
||||||
"get_batch_group_from_desktop",
|
"get_batch_group_from_desktop",
|
||||||
"MediaInfoFile",
|
"MediaInfoFile",
|
||||||
|
"TimeEffectMetadata",
|
||||||
|
|
||||||
# pipeline
|
# pipeline
|
||||||
"install",
|
"install",
|
||||||
|
|
@ -129,6 +132,7 @@ __all__ = [
|
||||||
# menu
|
# menu
|
||||||
"FlameMenuProjectConnect",
|
"FlameMenuProjectConnect",
|
||||||
"FlameMenuTimeline",
|
"FlameMenuTimeline",
|
||||||
|
"FlameMenuUniversal",
|
||||||
|
|
||||||
# plugin
|
# plugin
|
||||||
"Creator",
|
"Creator",
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,16 @@ import json
|
||||||
import pickle
|
import pickle
|
||||||
import clique
|
import clique
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import traceback
|
||||||
import itertools
|
import itertools
|
||||||
import contextlib
|
import contextlib
|
||||||
import xml.etree.cElementTree as cET
|
import xml.etree.cElementTree as cET
|
||||||
from copy import deepcopy
|
from copy import deepcopy, copy
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
|
from openpype.lib import Logger, run_subprocess
|
||||||
|
|
||||||
from .constants import (
|
from .constants import (
|
||||||
MARKER_COLOR,
|
MARKER_COLOR,
|
||||||
MARKER_DURATION,
|
MARKER_DURATION,
|
||||||
|
|
@ -19,9 +23,7 @@ from .constants import (
|
||||||
MARKER_PUBLISH_DEFAULT
|
MARKER_PUBLISH_DEFAULT
|
||||||
)
|
)
|
||||||
|
|
||||||
import openpype.api as openpype
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
log = openpype.Logger.get_logger(__name__)
|
|
||||||
|
|
||||||
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
||||||
|
|
||||||
|
|
@ -266,7 +268,7 @@ def get_current_sequence(selection):
|
||||||
def rescan_hooks():
|
def rescan_hooks():
|
||||||
import flame
|
import flame
|
||||||
try:
|
try:
|
||||||
flame.execute_shortcut('Rescan Python Hooks')
|
flame.execute_shortcut("Rescan Python Hooks")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -765,11 +767,11 @@ class MediaInfoFile(object):
|
||||||
_drop_mode = None
|
_drop_mode = None
|
||||||
_file_pattern = None
|
_file_pattern = None
|
||||||
|
|
||||||
def __init__(self, path, **kwargs):
|
def __init__(self, path, logger=None):
|
||||||
|
|
||||||
# replace log if any
|
# replace log if any
|
||||||
if kwargs.get("logger"):
|
if logger:
|
||||||
self.log = kwargs["logger"]
|
self.log = logger
|
||||||
|
|
||||||
# test if `dl_get_media_info` paht exists
|
# test if `dl_get_media_info` paht exists
|
||||||
self._validate_media_script_path()
|
self._validate_media_script_path()
|
||||||
|
|
@ -1015,7 +1017,7 @@ class MediaInfoFile(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# execute creation of clip xml template data
|
# execute creation of clip xml template data
|
||||||
openpype.run_subprocess(cmd_args)
|
run_subprocess(cmd_args)
|
||||||
except TypeError as error:
|
except TypeError as error:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"Error creating `{}` due: {}".format(fpath, error))
|
"Error creating `{}` due: {}".format(fpath, error))
|
||||||
|
|
@ -1082,21 +1084,21 @@ class MediaInfoFile(object):
|
||||||
xml_data (ET.Element): clip data
|
xml_data (ET.Element): clip data
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
for out_track in xml_data.iter('track'):
|
for out_track in xml_data.iter("track"):
|
||||||
for out_feed in out_track.iter('feed'):
|
for out_feed in out_track.iter("feed"):
|
||||||
# start frame
|
# start frame
|
||||||
out_feed_nb_ticks_obj = out_feed.find(
|
out_feed_nb_ticks_obj = out_feed.find(
|
||||||
'startTimecode/nbTicks')
|
"startTimecode/nbTicks")
|
||||||
self.start_frame = out_feed_nb_ticks_obj.text
|
self.start_frame = out_feed_nb_ticks_obj.text
|
||||||
|
|
||||||
# fps
|
# fps
|
||||||
out_feed_fps_obj = out_feed.find(
|
out_feed_fps_obj = out_feed.find(
|
||||||
'startTimecode/rate')
|
"startTimecode/rate")
|
||||||
self.fps = out_feed_fps_obj.text
|
self.fps = out_feed_fps_obj.text
|
||||||
|
|
||||||
# drop frame mode
|
# drop frame mode
|
||||||
out_feed_drop_mode_obj = out_feed.find(
|
out_feed_drop_mode_obj = out_feed.find(
|
||||||
'startTimecode/dropMode')
|
"startTimecode/dropMode")
|
||||||
self.drop_mode = out_feed_drop_mode_obj.text
|
self.drop_mode = out_feed_drop_mode_obj.text
|
||||||
break
|
break
|
||||||
except Exception as msg:
|
except Exception as msg:
|
||||||
|
|
@ -1118,8 +1120,153 @@ class MediaInfoFile(object):
|
||||||
tree = cET.ElementTree(xml_element_data)
|
tree = cET.ElementTree(xml_element_data)
|
||||||
tree.write(
|
tree.write(
|
||||||
fpath, xml_declaration=True,
|
fpath, xml_declaration=True,
|
||||||
method='xml', encoding='UTF-8'
|
method="xml", encoding="UTF-8"
|
||||||
)
|
)
|
||||||
except IOError as error:
|
except IOError as error:
|
||||||
raise IOError(
|
raise IOError(
|
||||||
"Not able to write data to file: {}".format(error))
|
"Not able to write data to file: {}".format(error))
|
||||||
|
|
||||||
|
|
||||||
|
class TimeEffectMetadata(object):
|
||||||
|
log = log
|
||||||
|
_data = {}
|
||||||
|
_retime_modes = {
|
||||||
|
0: "speed",
|
||||||
|
1: "timewarp",
|
||||||
|
2: "duration"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, segment, logger=None):
|
||||||
|
if logger:
|
||||||
|
self.log = logger
|
||||||
|
|
||||||
|
self._data = self._get_metadata(segment)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
""" Returns timewarp effect data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: retime data
|
||||||
|
"""
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
def _get_metadata(self, segment):
|
||||||
|
effects = segment.effects or []
|
||||||
|
for effect in effects:
|
||||||
|
if effect.type == "Timewarp":
|
||||||
|
with maintained_temp_file_path(".timewarp_node") as tmp_path:
|
||||||
|
self.log.info("Temp File: {}".format(tmp_path))
|
||||||
|
effect.save_setup(tmp_path)
|
||||||
|
return self._get_attributes_from_xml(tmp_path)
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _get_attributes_from_xml(self, tmp_path):
|
||||||
|
with open(tmp_path, "r") as tw_setup_file:
|
||||||
|
tw_setup_string = tw_setup_file.read()
|
||||||
|
tw_setup_file.close()
|
||||||
|
|
||||||
|
tw_setup_xml = ET.fromstring(tw_setup_string)
|
||||||
|
tw_setup = self._dictify(tw_setup_xml)
|
||||||
|
# pprint(tw_setup)
|
||||||
|
try:
|
||||||
|
tw_setup_state = tw_setup["Setup"]["State"][0]
|
||||||
|
mode = int(
|
||||||
|
tw_setup_state["TW_RetimerMode"][0]["_text"]
|
||||||
|
)
|
||||||
|
r_data = {
|
||||||
|
"type": self._retime_modes[mode],
|
||||||
|
"effectStart": int(
|
||||||
|
tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]),
|
||||||
|
"effectEnd": int(
|
||||||
|
tw_setup["Setup"]["Base"][0]["Range"][0]["End"])
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode == 0: # speed
|
||||||
|
r_data[self._retime_modes[mode]] = float(
|
||||||
|
tw_setup_state["TW_Speed"]
|
||||||
|
[0]["Channel"][0]["Value"][0]["_text"]
|
||||||
|
) / 100
|
||||||
|
elif mode == 1: # timewarp
|
||||||
|
print("timing")
|
||||||
|
r_data[self._retime_modes[mode]] = self._get_anim_keys(
|
||||||
|
tw_setup_state["TW_Timing"]
|
||||||
|
)
|
||||||
|
elif mode == 2: # duration
|
||||||
|
r_data[self._retime_modes[mode]] = {
|
||||||
|
"start": {
|
||||||
|
"source": int(
|
||||||
|
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||||
|
[0]["KFrames"][0]["Key"][0]["Value"][0]["_text"]
|
||||||
|
),
|
||||||
|
"timeline": int(
|
||||||
|
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||||
|
[0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"source": int(
|
||||||
|
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||||
|
[0]["KFrames"][0]["Key"][1]["Value"][0]["_text"]
|
||||||
|
),
|
||||||
|
"timeline": int(
|
||||||
|
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||||
|
[0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
lines = traceback.format_exception(*sys.exc_info())
|
||||||
|
self.log.error("\n".join(lines))
|
||||||
|
return
|
||||||
|
|
||||||
|
return r_data
|
||||||
|
|
||||||
|
def _get_anim_keys(self, setup_cat, index=None):
|
||||||
|
return_data = {
|
||||||
|
"extrapolation": (
|
||||||
|
setup_cat[0]["Channel"][0]["Extrap"][0]["_text"]
|
||||||
|
),
|
||||||
|
"animKeys": []
|
||||||
|
}
|
||||||
|
for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]:
|
||||||
|
if index and int(key["Index"]) != index:
|
||||||
|
continue
|
||||||
|
key_data = {
|
||||||
|
"source": float(key["Value"][0]["_text"]),
|
||||||
|
"timeline": float(key["Frame"][0]["_text"]),
|
||||||
|
"index": int(key["Index"]),
|
||||||
|
"curveMode": key["CurveMode"][0]["_text"],
|
||||||
|
"curveOrder": key["CurveOrder"][0]["_text"]
|
||||||
|
}
|
||||||
|
if key.get("TangentMode"):
|
||||||
|
key_data["tangentMode"] = key["TangentMode"][0]["_text"]
|
||||||
|
|
||||||
|
return_data["animKeys"].append(key_data)
|
||||||
|
|
||||||
|
return return_data
|
||||||
|
|
||||||
|
def _dictify(self, xml_, root=True):
|
||||||
|
""" Convert xml object to dictionary
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xml_ (xml.etree.ElementTree.Element): xml data
|
||||||
|
root (bool, optional): is root available. Defaults to True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: dictionarized xml
|
||||||
|
"""
|
||||||
|
|
||||||
|
if root:
|
||||||
|
return {xml_.tag: self._dictify(xml_, False)}
|
||||||
|
|
||||||
|
d = copy(xml_.attrib)
|
||||||
|
if xml_.text:
|
||||||
|
d["_text"] = xml_.text
|
||||||
|
|
||||||
|
for x in xml_.findall("./*"):
|
||||||
|
if x.tag not in d:
|
||||||
|
d[x.tag] = []
|
||||||
|
d[x.tag].append(self._dictify(x, False))
|
||||||
|
return d
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import os
|
import os
|
||||||
from Qt import QtWidgets
|
from qtpy import QtWidgets
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from openpype.tools.utils.host_tools import HostToolsHelper
|
from openpype.tools.utils.host_tools import HostToolsHelper
|
||||||
|
|
@ -201,3 +201,54 @@ class FlameMenuTimeline(_FlameMenuApp):
|
||||||
if self.flame:
|
if self.flame:
|
||||||
self.flame.execute_shortcut('Rescan Python Hooks')
|
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||||
self.log.info('Rescan Python Hooks')
|
self.log.info('Rescan Python Hooks')
|
||||||
|
|
||||||
|
|
||||||
|
class FlameMenuUniversal(_FlameMenuApp):
|
||||||
|
|
||||||
|
# flameMenuProjectconnect app takes care of the preferences dialog as well
|
||||||
|
|
||||||
|
def __init__(self, framework):
|
||||||
|
_FlameMenuApp.__init__(self, framework)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
def method(*args, **kwargs):
|
||||||
|
project = self.dynamic_menu_data.get(name)
|
||||||
|
if project:
|
||||||
|
self.link_project(project)
|
||||||
|
return method
|
||||||
|
|
||||||
|
def build_menu(self):
|
||||||
|
if not self.flame:
|
||||||
|
return []
|
||||||
|
|
||||||
|
menu = deepcopy(self.menu)
|
||||||
|
|
||||||
|
menu['actions'].append({
|
||||||
|
"name": "Load...",
|
||||||
|
"execute": lambda x: callback_selection(
|
||||||
|
x, self.tools_helper.show_loader)
|
||||||
|
})
|
||||||
|
menu['actions'].append({
|
||||||
|
"name": "Manage...",
|
||||||
|
"execute": lambda x: self.tools_helper.show_scene_inventory()
|
||||||
|
})
|
||||||
|
menu['actions'].append({
|
||||||
|
"name": "Library...",
|
||||||
|
"execute": lambda x: self.tools_helper.show_library_loader()
|
||||||
|
})
|
||||||
|
return menu
|
||||||
|
|
||||||
|
def refresh(self, *args, **kwargs):
|
||||||
|
self.rescan()
|
||||||
|
|
||||||
|
def rescan(self, *args, **kwargs):
|
||||||
|
if not self.flame:
|
||||||
|
try:
|
||||||
|
import flame
|
||||||
|
self.flame = flame
|
||||||
|
except ImportError:
|
||||||
|
self.flame = None
|
||||||
|
|
||||||
|
if self.flame:
|
||||||
|
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||||
|
self.log.info('Rescan Python Hooks')
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import os
|
||||||
import contextlib
|
import contextlib
|
||||||
from pyblish import api as pyblish
|
from pyblish import api as pyblish
|
||||||
|
|
||||||
from openpype.api import Logger
|
from openpype.lib import Logger
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline import (
|
||||||
register_loader_plugin_path,
|
register_loader_plugin_path,
|
||||||
register_creator_plugin_path,
|
register_creator_plugin_path,
|
||||||
|
|
@ -90,8 +90,7 @@ def containerise(flame_clip_segment,
|
||||||
def ls():
|
def ls():
|
||||||
"""List available containers.
|
"""List available containers.
|
||||||
"""
|
"""
|
||||||
# TODO: ls
|
return []
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def parse_container(tl_segment, validate=True):
|
def parse_container(tl_segment, validate=True):
|
||||||
|
|
@ -107,6 +106,7 @@ def update_container(tl_segment, data=None):
|
||||||
# TODO: update_container
|
# TODO: update_container
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||||
"""Toggle node passthrough states on instance toggles."""
|
"""Toggle node passthrough states on instance toggles."""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,18 +4,19 @@ import shutil
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
from Qt import QtCore, QtWidgets
|
|
||||||
|
|
||||||
import openpype.api as openpype
|
|
||||||
import qargparse
|
import qargparse
|
||||||
|
from qtpy import QtCore, QtWidgets
|
||||||
|
|
||||||
from openpype import style
|
from openpype import style
|
||||||
|
from openpype.lib import Logger, StringTemplate
|
||||||
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
||||||
|
from openpype.settings import get_current_project_settings
|
||||||
|
|
||||||
from . import constants
|
from . import constants
|
||||||
from . import lib as flib
|
from . import lib as flib
|
||||||
from . import pipeline as fpipeline
|
from . import pipeline as fpipeline
|
||||||
|
|
||||||
log = openpype.Logger.get_logger(__name__)
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CreatorWidget(QtWidgets.QDialog):
|
class CreatorWidget(QtWidgets.QDialog):
|
||||||
|
|
@ -305,7 +306,7 @@ class Creator(LegacyCreator):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(Creator, self).__init__(*args, **kwargs)
|
super(Creator, self).__init__(*args, **kwargs)
|
||||||
self.presets = openpype.get_current_project_settings()[
|
self.presets = get_current_project_settings()[
|
||||||
"flame"]["create"].get(self.__class__.__name__, {})
|
"flame"]["create"].get(self.__class__.__name__, {})
|
||||||
|
|
||||||
# adding basic current context flame objects
|
# adding basic current context flame objects
|
||||||
|
|
@ -361,6 +362,8 @@ class PublishableClip:
|
||||||
index_from_segment_default = False
|
index_from_segment_default = False
|
||||||
use_shot_name_default = False
|
use_shot_name_default = False
|
||||||
include_handles_default = False
|
include_handles_default = False
|
||||||
|
retimed_handles_default = True
|
||||||
|
retimed_framerange_default = True
|
||||||
|
|
||||||
def __init__(self, segment, **kwargs):
|
def __init__(self, segment, **kwargs):
|
||||||
self.rename_index = kwargs["rename_index"]
|
self.rename_index = kwargs["rename_index"]
|
||||||
|
|
@ -496,6 +499,14 @@ class PublishableClip:
|
||||||
"audio", {}).get("value") or False
|
"audio", {}).get("value") or False
|
||||||
self.include_handles = self.ui_inputs.get(
|
self.include_handles = self.ui_inputs.get(
|
||||||
"includeHandles", {}).get("value") or self.include_handles_default
|
"includeHandles", {}).get("value") or self.include_handles_default
|
||||||
|
self.retimed_handles = (
|
||||||
|
self.ui_inputs.get("retimedHandles", {}).get("value")
|
||||||
|
or self.retimed_handles_default
|
||||||
|
)
|
||||||
|
self.retimed_framerange = (
|
||||||
|
self.ui_inputs.get("retimedFramerange", {}).get("value")
|
||||||
|
or self.retimed_framerange_default
|
||||||
|
)
|
||||||
|
|
||||||
# build subset name from layer name
|
# build subset name from layer name
|
||||||
if self.subset_name == "[ track name ]":
|
if self.subset_name == "[ track name ]":
|
||||||
|
|
@ -585,18 +596,28 @@ class PublishableClip:
|
||||||
if not hero_track and self.vertical_sync:
|
if not hero_track and self.vertical_sync:
|
||||||
# driving layer is set as negative match
|
# driving layer is set as negative match
|
||||||
for (_in, _out), hero_data in self.vertical_clip_match.items():
|
for (_in, _out), hero_data in self.vertical_clip_match.items():
|
||||||
hero_data.update({"heroTrack": False})
|
"""
|
||||||
if _in == self.clip_in and _out == self.clip_out:
|
Since only one instance of hero clip is expected in
|
||||||
|
`self.vertical_clip_match`, this will loop only once
|
||||||
|
until none hero clip will be matched with hero clip.
|
||||||
|
|
||||||
|
`tag_hierarchy_data` will be set only once for every
|
||||||
|
clip which is not hero clip.
|
||||||
|
"""
|
||||||
|
_hero_data = deepcopy(hero_data)
|
||||||
|
_hero_data.update({"heroTrack": False})
|
||||||
|
if _in <= self.clip_in and _out >= self.clip_out:
|
||||||
data_subset = hero_data["subset"]
|
data_subset = hero_data["subset"]
|
||||||
# add track index in case duplicity of names in hero data
|
# add track index in case duplicity of names in hero data
|
||||||
if self.subset in data_subset:
|
if self.subset in data_subset:
|
||||||
hero_data["subset"] = self.subset + str(
|
_hero_data["subset"] = self.subset + str(
|
||||||
self.track_index)
|
self.track_index)
|
||||||
# in case track name and subset name is the same then add
|
# in case track name and subset name is the same then add
|
||||||
if self.subset_name == self.track_name:
|
if self.subset_name == self.track_name:
|
||||||
hero_data["subset"] = self.subset
|
_hero_data["subset"] = self.subset
|
||||||
# assing data to return hierarchy data to tag
|
# assing data to return hierarchy data to tag
|
||||||
tag_hierarchy_data = hero_data
|
tag_hierarchy_data = _hero_data
|
||||||
|
break
|
||||||
|
|
||||||
# add data to return data dict
|
# add data to return data dict
|
||||||
self.marker_data.update(tag_hierarchy_data)
|
self.marker_data.update(tag_hierarchy_data)
|
||||||
|
|
@ -668,6 +689,7 @@ class ClipLoader(LoaderPlugin):
|
||||||
`update` logic.
|
`update` logic.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
log = log
|
||||||
|
|
||||||
options = [
|
options = [
|
||||||
qargparse.Boolean(
|
qargparse.Boolean(
|
||||||
|
|
@ -678,22 +700,74 @@ class ClipLoader(LoaderPlugin):
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
_mapping = None
|
||||||
|
|
||||||
|
def get_colorspace(self, context):
|
||||||
|
"""Get colorspace name
|
||||||
|
|
||||||
|
Look either to version data or representation data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context (dict): version context data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: colorspace name or None
|
||||||
|
"""
|
||||||
|
version = context['version']
|
||||||
|
version_data = version.get("data", {})
|
||||||
|
colorspace = version_data.get(
|
||||||
|
"colorspace", None
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not colorspace
|
||||||
|
or colorspace == "Unknown"
|
||||||
|
):
|
||||||
|
colorspace = context["representation"]["data"].get(
|
||||||
|
"colorspace", None)
|
||||||
|
|
||||||
|
return colorspace
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_native_colorspace(cls, input_colorspace):
|
||||||
|
"""Return native colorspace name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
input_colorspace (str | None): colorspace name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: native colorspace name defined in mapping or None
|
||||||
|
"""
|
||||||
|
if not cls._mapping:
|
||||||
|
settings = get_current_project_settings()["flame"]
|
||||||
|
mapping = settings["imageio"]["profilesMapping"]["inputs"]
|
||||||
|
cls._mapping = {
|
||||||
|
input["ocioName"]: input["flameName"]
|
||||||
|
for input in mapping
|
||||||
|
}
|
||||||
|
|
||||||
|
return cls._mapping.get(input_colorspace)
|
||||||
|
|
||||||
|
|
||||||
class OpenClipSolver(flib.MediaInfoFile):
|
class OpenClipSolver(flib.MediaInfoFile):
|
||||||
create_new_clip = False
|
create_new_clip = False
|
||||||
|
|
||||||
log = log
|
log = log
|
||||||
|
|
||||||
def __init__(self, openclip_file_path, feed_data):
|
def __init__(self, openclip_file_path, feed_data, logger=None):
|
||||||
self.out_file = openclip_file_path
|
self.out_file = openclip_file_path
|
||||||
|
|
||||||
|
# replace log if any
|
||||||
|
if logger:
|
||||||
|
self.log = logger
|
||||||
|
|
||||||
# new feed variables:
|
# new feed variables:
|
||||||
feed_path = feed_data.pop("path")
|
feed_path = feed_data.pop("path")
|
||||||
|
|
||||||
# initialize parent class
|
# initialize parent class
|
||||||
super(OpenClipSolver, self).__init__(
|
super(OpenClipSolver, self).__init__(
|
||||||
feed_path,
|
feed_path,
|
||||||
**feed_data
|
logger=logger
|
||||||
)
|
)
|
||||||
|
|
||||||
# get other metadata
|
# get other metadata
|
||||||
|
|
@ -701,6 +775,11 @@ class OpenClipSolver(flib.MediaInfoFile):
|
||||||
self.feed_colorspace = feed_data.get("colorspace")
|
self.feed_colorspace = feed_data.get("colorspace")
|
||||||
self.log.debug("feed_version_name: {}".format(self.feed_version_name))
|
self.log.debug("feed_version_name: {}".format(self.feed_version_name))
|
||||||
|
|
||||||
|
# layer rename variables
|
||||||
|
self.layer_rename_template = feed_data["layer_rename_template"]
|
||||||
|
self.layer_rename_patterns = feed_data["layer_rename_patterns"]
|
||||||
|
self.context_data = feed_data["context_data"]
|
||||||
|
|
||||||
# derivate other feed variables
|
# derivate other feed variables
|
||||||
self.feed_basename = os.path.basename(feed_path)
|
self.feed_basename = os.path.basename(feed_path)
|
||||||
self.feed_dir = os.path.dirname(feed_path)
|
self.feed_dir = os.path.dirname(feed_path)
|
||||||
|
|
@ -739,19 +818,22 @@ class OpenClipSolver(flib.MediaInfoFile):
|
||||||
|
|
||||||
def _create_new_open_clip(self):
|
def _create_new_open_clip(self):
|
||||||
self.log.info("Building new openClip")
|
self.log.info("Building new openClip")
|
||||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
|
||||||
|
|
||||||
# clip data comming from MediaInfoFile
|
for tmp_xml_track in self.clip_data.iter("track"):
|
||||||
tmp_xml_feeds = self.clip_data.find('tracks/track/feeds')
|
# solve track (layer) name
|
||||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
self._rename_track_name(tmp_xml_track)
|
||||||
for tmp_feed in tmp_xml_feeds:
|
|
||||||
tmp_feed.set('vuid', self.feed_version_name)
|
|
||||||
|
|
||||||
# add colorspace if any is set
|
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||||
if self.feed_colorspace:
|
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||||
self._add_colorspace(tmp_feed, self.feed_colorspace)
|
|
||||||
|
|
||||||
self._clear_handler(tmp_feed)
|
for tmp_feed in tmp_xml_track.iter("feed"):
|
||||||
|
tmp_feed.set('vuid', self.feed_version_name)
|
||||||
|
|
||||||
|
# add colorspace if any is set
|
||||||
|
if self.feed_colorspace:
|
||||||
|
self._add_colorspace(tmp_feed, self.feed_colorspace)
|
||||||
|
|
||||||
|
self._clear_handler(tmp_feed)
|
||||||
|
|
||||||
tmp_xml_versions_obj = self.clip_data.find('versions')
|
tmp_xml_versions_obj = self.clip_data.find('versions')
|
||||||
tmp_xml_versions_obj.set('currentVersion', self.feed_version_name)
|
tmp_xml_versions_obj.set('currentVersion', self.feed_version_name)
|
||||||
|
|
@ -764,6 +846,59 @@ class OpenClipSolver(flib.MediaInfoFile):
|
||||||
|
|
||||||
self.write_clip_data_to_file(self.out_file, self.clip_data)
|
self.write_clip_data_to_file(self.out_file, self.clip_data)
|
||||||
|
|
||||||
|
def _get_xml_track_obj_by_uid(self, xml_data, uid):
|
||||||
|
# loop all tracks of input xml data
|
||||||
|
for xml_track in xml_data.iter("track"):
|
||||||
|
track_uid = xml_track.get("uid")
|
||||||
|
self.log.debug(
|
||||||
|
">> track_uid:uid: {}:{}".format(track_uid, uid))
|
||||||
|
|
||||||
|
# get matching uids
|
||||||
|
if uid == track_uid:
|
||||||
|
return xml_track
|
||||||
|
|
||||||
|
def _rename_track_name(self, xml_track_data):
|
||||||
|
layer_uid = xml_track_data.get("uid")
|
||||||
|
name_obj = xml_track_data.find("name")
|
||||||
|
layer_name = name_obj.text
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.layer_rename_patterns
|
||||||
|
and not any(
|
||||||
|
re.search(lp_.lower(), layer_name.lower())
|
||||||
|
for lp_ in self.layer_rename_patterns
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
formating_data = self._update_formating_data(
|
||||||
|
layerName=layer_name,
|
||||||
|
layerUID=layer_uid
|
||||||
|
)
|
||||||
|
name_obj.text = StringTemplate(
|
||||||
|
self.layer_rename_template
|
||||||
|
).format(formating_data)
|
||||||
|
|
||||||
|
def _update_formating_data(self, **kwargs):
|
||||||
|
""" Updating formating data for layer rename
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
key=value (optional): will be included to formating data
|
||||||
|
as {key: value}
|
||||||
|
Returns:
|
||||||
|
dict: anatomy context data for formating
|
||||||
|
"""
|
||||||
|
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
||||||
|
clip_name_obj = self.clip_data.find("name")
|
||||||
|
data = {
|
||||||
|
"originalBasename": clip_name_obj.text
|
||||||
|
}
|
||||||
|
# include version context data
|
||||||
|
data.update(self.context_data)
|
||||||
|
# include input kwargs data
|
||||||
|
data.update(kwargs)
|
||||||
|
return data
|
||||||
|
|
||||||
def _update_open_clip(self):
|
def _update_open_clip(self):
|
||||||
self.log.info("Updating openClip ..")
|
self.log.info("Updating openClip ..")
|
||||||
|
|
||||||
|
|
@ -771,54 +906,84 @@ class OpenClipSolver(flib.MediaInfoFile):
|
||||||
out_xml = out_xml.getroot()
|
out_xml = out_xml.getroot()
|
||||||
|
|
||||||
self.log.debug(">> out_xml: {}".format(out_xml))
|
self.log.debug(">> out_xml: {}".format(out_xml))
|
||||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
# loop tmp tracks
|
||||||
|
updated_any = False
|
||||||
|
for tmp_xml_track in self.clip_data.iter("track"):
|
||||||
|
# solve track (layer) name
|
||||||
|
self._rename_track_name(tmp_xml_track)
|
||||||
|
|
||||||
# Get new feed from tmp file
|
# get tmp track uid
|
||||||
tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed')
|
tmp_track_uid = tmp_xml_track.get("uid")
|
||||||
|
self.log.debug(">> tmp_track_uid: {}".format(tmp_track_uid))
|
||||||
|
|
||||||
self._clear_handler(tmp_xml_feed)
|
# get out data track by uid
|
||||||
|
out_track_element = self._get_xml_track_obj_by_uid(
|
||||||
|
out_xml, tmp_track_uid)
|
||||||
|
self.log.debug(
|
||||||
|
">> out_track_element: {}".format(out_track_element))
|
||||||
|
|
||||||
# update fps from MediaInfoFile class
|
# loop tmp feeds
|
||||||
if self.fps:
|
for tmp_xml_feed in tmp_xml_track.iter("feed"):
|
||||||
tmp_feed_fps_obj = tmp_xml_feed.find(
|
new_path_obj = tmp_xml_feed.find(
|
||||||
"startTimecode/rate")
|
"spans/span/path")
|
||||||
tmp_feed_fps_obj.text = str(self.fps)
|
new_path = new_path_obj.text
|
||||||
|
|
||||||
# update start_frame from MediaInfoFile class
|
# check if feed path already exists in track's feeds
|
||||||
if self.start_frame:
|
if (
|
||||||
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
out_track_element is not None
|
||||||
"startTimecode/nbTicks")
|
and self._feed_exists(out_track_element, new_path)
|
||||||
tmp_feed_nb_ticks_obj.text = str(self.start_frame)
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
# update drop_mode from MediaInfoFile class
|
# rename versions on feeds
|
||||||
if self.drop_mode:
|
tmp_xml_feed.set('vuid', self.feed_version_name)
|
||||||
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
self._clear_handler(tmp_xml_feed)
|
||||||
"startTimecode/dropMode")
|
|
||||||
tmp_feed_drop_mode_obj.text = str(self.drop_mode)
|
|
||||||
|
|
||||||
new_path_obj = tmp_xml_feed.find(
|
# update fps from MediaInfoFile class
|
||||||
"spans/span/path")
|
if self.fps is not None:
|
||||||
new_path = new_path_obj.text
|
tmp_feed_fps_obj = tmp_xml_feed.find(
|
||||||
|
"startTimecode/rate")
|
||||||
|
tmp_feed_fps_obj.text = str(self.fps)
|
||||||
|
|
||||||
feed_added = False
|
# update start_frame from MediaInfoFile class
|
||||||
if not self._feed_exists(out_xml, new_path):
|
if self.start_frame is not None:
|
||||||
tmp_xml_feed.set('vuid', self.feed_version_name)
|
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
||||||
# Append new temp file feed to .clip source out xml
|
"startTimecode/nbTicks")
|
||||||
out_track = out_xml.find("tracks/track")
|
tmp_feed_nb_ticks_obj.text = str(self.start_frame)
|
||||||
# add colorspace if any is set
|
|
||||||
if self.feed_colorspace:
|
|
||||||
self._add_colorspace(tmp_xml_feed, self.feed_colorspace)
|
|
||||||
|
|
||||||
out_feeds = out_track.find('feeds')
|
# update drop_mode from MediaInfoFile class
|
||||||
out_feeds.set('currentVersion', self.feed_version_name)
|
if self.drop_mode is not None:
|
||||||
out_feeds.append(tmp_xml_feed)
|
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
||||||
|
"startTimecode/dropMode")
|
||||||
|
tmp_feed_drop_mode_obj.text = str(self.drop_mode)
|
||||||
|
|
||||||
self.log.info(
|
# add colorspace if any is set
|
||||||
"Appending new feed: {}".format(
|
if self.feed_colorspace is not None:
|
||||||
self.feed_version_name))
|
self._add_colorspace(tmp_xml_feed, self.feed_colorspace)
|
||||||
feed_added = True
|
|
||||||
|
|
||||||
if feed_added:
|
# then append/update feed to correct track in output
|
||||||
|
if out_track_element:
|
||||||
|
self.log.debug("updating track element ..")
|
||||||
|
# update already present track
|
||||||
|
out_feeds = out_track_element.find('feeds')
|
||||||
|
out_feeds.set('currentVersion', self.feed_version_name)
|
||||||
|
out_feeds.append(tmp_xml_feed)
|
||||||
|
|
||||||
|
self.log.info(
|
||||||
|
"Appending new feed: {}".format(
|
||||||
|
self.feed_version_name))
|
||||||
|
else:
|
||||||
|
self.log.debug("adding new track element ..")
|
||||||
|
# create new track as it doesnt exists yet
|
||||||
|
# set current version to feeds on tmp
|
||||||
|
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||||
|
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||||
|
out_tracks = out_xml.find("tracks")
|
||||||
|
out_tracks.append(tmp_xml_track)
|
||||||
|
|
||||||
|
updated_any = True
|
||||||
|
|
||||||
|
if updated_any:
|
||||||
# Append vUID to versions
|
# Append vUID to versions
|
||||||
out_xml_versions_obj = out_xml.find('versions')
|
out_xml_versions_obj = out_xml.find('versions')
|
||||||
out_xml_versions_obj.set(
|
out_xml_versions_obj.set(
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import os
|
import os
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
from openpype.api import Logger
|
from openpype.lib import Logger
|
||||||
|
|
||||||
log = Logger.get_logger(__name__)
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue