mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/attribute_defs_dialog
This commit is contained in:
commit
81a9f26459
544 changed files with 23947 additions and 10262 deletions
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -6,6 +6,8 @@ labels: bug
|
|||
assignees: ''
|
||||
|
||||
---
|
||||
**Running version**
|
||||
[ex. 3.14.1-nightly.2]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
|
|
|||
28
.github/workflows/milestone_assign.yml
vendored
Normal file
28
.github/workflows/milestone_assign.yml
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
name: Milestone - assign to PRs
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, edited, synchronize]
|
||||
|
||||
jobs:
|
||||
run_if_release:
|
||||
if: startsWith(github.base_ref, 'release/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Assign Milestone [next-minor]'
|
||||
if: github.event.pull_request.milestone == null
|
||||
uses: zoispag/action-assign-milestone@v1
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
milestone: 'next-minor'
|
||||
|
||||
run_if_develop:
|
||||
if: ${{ github.base_ref == 'develop' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Assign Milestone [next-patch]'
|
||||
if: github.event.pull_request.milestone == null
|
||||
uses: zoispag/action-assign-milestone@v1
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
milestone: 'next-patch'
|
||||
62
.github/workflows/milestone_create.yml
vendored
Normal file
62
.github/workflows/milestone_create.yml
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
name: Milestone - create default
|
||||
|
||||
on:
|
||||
milestone:
|
||||
types: [closed, edited]
|
||||
|
||||
jobs:
|
||||
generate-next-patch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Get Milestones'
|
||||
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||
id: milestones
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||
id: querymilestone
|
||||
env:
|
||||
MILESTONES: ${{ steps.milestones.outputs.milestones }}
|
||||
MILESTONE: "next-patch"
|
||||
|
||||
- name: Read output
|
||||
run: |
|
||||
echo "${{ steps.querymilestone.outputs.number }}"
|
||||
|
||||
- name: 'Create `next-patch` milestone'
|
||||
if: steps.querymilestone.outputs.number == ''
|
||||
id: createmilestone
|
||||
uses: "WyriHaximus/github-action-create-milestone@v1"
|
||||
with:
|
||||
title: 'next-patch'
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
generate-next-minor:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Get Milestones'
|
||||
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||
id: milestones
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||
id: querymilestone
|
||||
env:
|
||||
MILESTONES: ${{ steps.milestones.outputs.milestones }}
|
||||
MILESTONE: "next-minor"
|
||||
|
||||
- name: Read output
|
||||
run: |
|
||||
echo "${{ steps.querymilestone.outputs.number }}"
|
||||
|
||||
- name: 'Create `next-minor` milestone'
|
||||
if: steps.querymilestone.outputs.number == ''
|
||||
id: createmilestone
|
||||
uses: "WyriHaximus/github-action-create-milestone@v1"
|
||||
with:
|
||||
title: 'next-minor'
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
46
.github/workflows/prerelease.yml
vendored
46
.github/workflows/prerelease.yml
vendored
|
|
@ -37,27 +37,27 @@ jobs:
|
|||
|
||||
echo ::set-output name=next_tag::$RESULT
|
||||
|
||||
- name: "✏️ Generate full changelog"
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
id: generate-full-changelog
|
||||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
maxIssues: 100
|
||||
pullRequests: true
|
||||
prWoLabels: false
|
||||
author: false
|
||||
unreleased: true
|
||||
compareLink: true
|
||||
stripGeneratorNotice: true
|
||||
verbose: true
|
||||
unreleasedLabel: ${{ steps.version.outputs.next_tag }}
|
||||
excludeTagsRegex: "CI/.+"
|
||||
releaseBranch: "main"
|
||||
# - name: "✏️ Generate full changelog"
|
||||
# if: steps.version_type.outputs.type != 'skip'
|
||||
# id: generate-full-changelog
|
||||
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||
# with:
|
||||
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
# issues: false
|
||||
# issuesWoLabels: false
|
||||
# sinceTag: "3.12.0"
|
||||
# maxIssues: 100
|
||||
# pullRequests: true
|
||||
# prWoLabels: false
|
||||
# author: false
|
||||
# unreleased: true
|
||||
# compareLink: true
|
||||
# stripGeneratorNotice: true
|
||||
# verbose: true
|
||||
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
|
||||
# excludeTagsRegex: "CI/.+"
|
||||
# releaseBranch: "main"
|
||||
|
||||
- name: "🖨️ Print changelog to console"
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
|
|
@ -85,11 +85,11 @@ jobs:
|
|||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: 🔨 Merge main back to develop
|
||||
- name: 🔨 Merge main back to develop
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
|
|
|
|||
54
.github/workflows/release.yml
vendored
54
.github/workflows/release.yml
vendored
|
|
@ -2,7 +2,7 @@ name: Stable Release
|
|||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
types:
|
||||
- prereleased
|
||||
|
||||
jobs:
|
||||
|
|
@ -13,7 +13,7 @@ jobs:
|
|||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
|
|
@ -33,27 +33,27 @@ jobs:
|
|||
echo ::set-output name=last_release::$LASTRELEASE
|
||||
echo ::set-output name=release_tag::$RESULT
|
||||
|
||||
- name: "✏️ Generate full changelog"
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
id: generate-full-changelog
|
||||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
maxIssues: 100
|
||||
pullRequests: true
|
||||
prWoLabels: false
|
||||
author: false
|
||||
unreleased: true
|
||||
compareLink: true
|
||||
stripGeneratorNotice: true
|
||||
verbose: true
|
||||
futureRelease: ${{ steps.version.outputs.release_tag }}
|
||||
excludeTagsRegex: "CI/.+"
|
||||
releaseBranch: "main"
|
||||
# - name: "✏️ Generate full changelog"
|
||||
# if: steps.version.outputs.release_tag != 'skip'
|
||||
# id: generate-full-changelog
|
||||
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||
# with:
|
||||
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
# issues: false
|
||||
# issuesWoLabels: false
|
||||
# sinceTag: "3.12.0"
|
||||
# maxIssues: 100
|
||||
# pullRequests: true
|
||||
# prWoLabels: false
|
||||
# author: false
|
||||
# unreleased: true
|
||||
# compareLink: true
|
||||
# stripGeneratorNotice: true
|
||||
# verbose: true
|
||||
# futureRelease: ${{ steps.version.outputs.release_tag }}
|
||||
# excludeTagsRegex: "CI/.+"
|
||||
# releaseBranch: "main"
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
|
|
@ -73,8 +73,8 @@ jobs:
|
|||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: "✏️ Generate last changelog"
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
id: generate-last-changelog
|
||||
|
|
@ -114,11 +114,11 @@ jobs:
|
|||
with:
|
||||
tag: "${{ steps.version.outputs.current_version }}"
|
||||
|
||||
- name: 🔁 Merge main back to develop
|
||||
- name: 🔁 Merge main back to develop
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
||||
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
||||
|
|
|
|||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -107,3 +107,8 @@ website/.docusaurus
|
|||
mypy.ini
|
||||
|
||||
tools/run_eventserver.*
|
||||
|
||||
# Developer tools
|
||||
tools/dev_*
|
||||
|
||||
.github_changelog_generator
|
||||
|
|
|
|||
1773
CHANGELOG.md
1773
CHANGELOG.md
File diff suppressed because it is too large
Load diff
1941
HISTORY.md
1941
HISTORY.md
File diff suppressed because it is too large
Load diff
10
README.md
10
README.md
|
|
@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll
|
|||
- **Linux**
|
||||
- **Ubuntu** 20.04 LTS
|
||||
- **Centos** 7
|
||||
- **Mac OSX**
|
||||
- **Mac OSX**
|
||||
- **10.15** Catalina
|
||||
- **11.1** Big Sur (using Rosetta2)
|
||||
|
||||
|
|
@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
|
|||
|
||||
**Note that it needs existing virtual environment.**
|
||||
|
||||
|
||||
Developer tools
|
||||
-------------
|
||||
|
||||
In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`).
|
||||
|
||||
|
||||
|
||||
## Contributors ✨
|
||||
|
||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
|
|
|||
18
common/openpype_common/distribution/README.md
Normal file
18
common/openpype_common/distribution/README.md
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
Addon distribution tool
|
||||
------------------------
|
||||
|
||||
Code in this folder is backend portion of Addon distribution logic for v4 server.
|
||||
|
||||
Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons.
|
||||
|
||||
Client (running on artist machine) will in the first step ask v4 for list of enabled addons.
|
||||
(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.)
|
||||
Next it will compare presence of enabled addon version in local folder. In the case of missing version of
|
||||
an addon, client will use information in the addon to download (from http/shared local disk/git) zip file
|
||||
and unzip it.
|
||||
|
||||
Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder.
|
||||
|
||||
Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably.
|
||||
|
||||
This code needs to be independent on Openpype code as much as possible!
|
||||
0
common/openpype_common/distribution/__init__.py
Normal file
0
common/openpype_common/distribution/__init__.py
Normal file
208
common/openpype_common/distribution/addon_distribution.py
Normal file
208
common/openpype_common/distribution/addon_distribution.py
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
import os
|
||||
from enum import Enum
|
||||
from abc import abstractmethod
|
||||
import attr
|
||||
import logging
|
||||
import requests
|
||||
import platform
|
||||
import shutil
|
||||
|
||||
from .file_handler import RemoteFileHandler
|
||||
from .addon_info import AddonInfo
|
||||
|
||||
|
||||
class UpdateState(Enum):
|
||||
EXISTS = "exists"
|
||||
UPDATED = "updated"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class AddonDownloader:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self._downloaders = {}
|
||||
|
||||
def register_format(self, downloader_type, downloader):
|
||||
self._downloaders[downloader_type.value] = downloader
|
||||
|
||||
def get_downloader(self, downloader_type):
|
||||
downloader = self._downloaders.get(downloader_type)
|
||||
if not downloader:
|
||||
raise ValueError(f"{downloader_type} not implemented")
|
||||
return downloader()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def download(cls, source, destination):
|
||||
"""Returns url to downloaded addon zip file.
|
||||
|
||||
Args:
|
||||
source (dict): {type:"http", "url":"https://} ...}
|
||||
destination (str): local folder to unzip
|
||||
Returns:
|
||||
(str) local path to addon zip file
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def check_hash(cls, addon_path, addon_hash):
|
||||
"""Compares 'hash' of downloaded 'addon_url' file.
|
||||
|
||||
Args:
|
||||
addon_path (str): local path to addon zip file
|
||||
addon_hash (str): sha256 hash of zip file
|
||||
Raises:
|
||||
ValueError if hashes doesn't match
|
||||
"""
|
||||
if not os.path.exists(addon_path):
|
||||
raise ValueError(f"{addon_path} doesn't exist.")
|
||||
if not RemoteFileHandler.check_integrity(addon_path,
|
||||
addon_hash,
|
||||
hash_type="sha256"):
|
||||
raise ValueError(f"{addon_path} doesn't match expected hash.")
|
||||
|
||||
@classmethod
|
||||
def unzip(cls, addon_zip_path, destination):
|
||||
"""Unzips local 'addon_zip_path' to 'destination'.
|
||||
|
||||
Args:
|
||||
addon_zip_path (str): local path to addon zip file
|
||||
destination (str): local folder to unzip
|
||||
"""
|
||||
RemoteFileHandler.unzip(addon_zip_path, destination)
|
||||
os.remove(addon_zip_path)
|
||||
|
||||
@classmethod
|
||||
def remove(cls, addon_url):
|
||||
pass
|
||||
|
||||
|
||||
class OSAddonDownloader(AddonDownloader):
|
||||
|
||||
@classmethod
|
||||
def download(cls, source, destination):
|
||||
# OS doesnt need to download, unzip directly
|
||||
addon_url = source["path"].get(platform.system().lower())
|
||||
if not os.path.exists(addon_url):
|
||||
raise ValueError("{} is not accessible".format(addon_url))
|
||||
return addon_url
|
||||
|
||||
|
||||
class HTTPAddonDownloader(AddonDownloader):
|
||||
CHUNK_SIZE = 100000
|
||||
|
||||
@classmethod
|
||||
def download(cls, source, destination):
|
||||
source_url = source["url"]
|
||||
cls.log.debug(f"Downloading {source_url} to {destination}")
|
||||
file_name = os.path.basename(destination)
|
||||
_, ext = os.path.splitext(file_name)
|
||||
if (ext.replace(".", '') not
|
||||
in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)):
|
||||
file_name += ".zip"
|
||||
RemoteFileHandler.download_url(source_url,
|
||||
destination,
|
||||
filename=file_name)
|
||||
|
||||
return os.path.join(destination, file_name)
|
||||
|
||||
|
||||
def get_addons_info(server_endpoint):
|
||||
"""Returns list of addon information from Server"""
|
||||
# TODO temp
|
||||
# addon_info = AddonInfo(
|
||||
# **{"name": "openpype_slack",
|
||||
# "version": "1.0.0",
|
||||
# "addon_url": "c:/projects/openpype_slack_1.0.0.zip",
|
||||
# "type": UrlType.FILESYSTEM,
|
||||
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
|
||||
#
|
||||
# http_addon = AddonInfo(
|
||||
# **{"name": "openpype_slack",
|
||||
# "version": "1.0.0",
|
||||
# "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa
|
||||
# "type": UrlType.HTTP,
|
||||
# "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
|
||||
|
||||
response = requests.get(server_endpoint)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
addons_info = []
|
||||
for addon in response.json():
|
||||
addons_info.append(AddonInfo(**addon))
|
||||
return addons_info
|
||||
|
||||
|
||||
def update_addon_state(addon_infos, destination_folder, factory,
|
||||
log=None):
|
||||
"""Loops through all 'addon_infos', compares local version, unzips.
|
||||
|
||||
Loops through server provided list of dictionaries with information about
|
||||
available addons. Looks if each addon is already present and deployed.
|
||||
If isn't, addon zip gets downloaded and unzipped into 'destination_folder'.
|
||||
Args:
|
||||
addon_infos (list of AddonInfo)
|
||||
destination_folder (str): local path
|
||||
factory (AddonDownloader): factory to get appropriate downloader per
|
||||
addon type
|
||||
log (logging.Logger)
|
||||
Returns:
|
||||
(dict): {"addon_full_name": UpdateState.value
|
||||
(eg. "exists"|"updated"|"failed")
|
||||
"""
|
||||
if not log:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
download_states = {}
|
||||
for addon in addon_infos:
|
||||
full_name = "{}_{}".format(addon.name, addon.version)
|
||||
addon_dest = os.path.join(destination_folder, full_name)
|
||||
|
||||
if os.path.isdir(addon_dest):
|
||||
log.debug(f"Addon version folder {addon_dest} already exists.")
|
||||
download_states[full_name] = UpdateState.EXISTS.value
|
||||
continue
|
||||
|
||||
for source in addon.sources:
|
||||
download_states[full_name] = UpdateState.FAILED.value
|
||||
try:
|
||||
downloader = factory.get_downloader(source.type)
|
||||
zip_file_path = downloader.download(attr.asdict(source),
|
||||
addon_dest)
|
||||
downloader.check_hash(zip_file_path, addon.hash)
|
||||
downloader.unzip(zip_file_path, addon_dest)
|
||||
download_states[full_name] = UpdateState.UPDATED.value
|
||||
break
|
||||
except Exception:
|
||||
log.warning(f"Error happened during updating {addon.name}",
|
||||
exc_info=True)
|
||||
if os.path.isdir(addon_dest):
|
||||
log.debug(f"Cleaning {addon_dest}")
|
||||
shutil.rmtree(addon_dest)
|
||||
|
||||
return download_states
|
||||
|
||||
|
||||
def check_addons(server_endpoint, addon_folder, downloaders):
|
||||
"""Main entry point to compare existing addons with those on server.
|
||||
|
||||
Args:
|
||||
server_endpoint (str): url to v4 server endpoint
|
||||
addon_folder (str): local dir path for addons
|
||||
downloaders (AddonDownloader): factory of downloaders
|
||||
|
||||
Raises:
|
||||
(RuntimeError) if any addon failed update
|
||||
"""
|
||||
addons_info = get_addons_info(server_endpoint)
|
||||
result = update_addon_state(addons_info,
|
||||
addon_folder,
|
||||
downloaders)
|
||||
if UpdateState.FAILED.value in result.values():
|
||||
raise RuntimeError(f"Unable to update some addons {result}")
|
||||
|
||||
|
||||
def cli(*args):
|
||||
raise NotImplementedError
|
||||
80
common/openpype_common/distribution/addon_info.py
Normal file
80
common/openpype_common/distribution/addon_info.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
import attr
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class UrlType(Enum):
|
||||
HTTP = "http"
|
||||
GIT = "git"
|
||||
FILESYSTEM = "filesystem"
|
||||
|
||||
|
||||
@attr.s
|
||||
class MultiPlatformPath(object):
|
||||
windows = attr.ib(default=None)
|
||||
linux = attr.ib(default=None)
|
||||
darwin = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class AddonSource(object):
|
||||
type = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class LocalAddonSource(AddonSource):
|
||||
path = attr.ib(default=attr.Factory(MultiPlatformPath))
|
||||
|
||||
|
||||
@attr.s
|
||||
class WebAddonSource(AddonSource):
|
||||
url = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class VersionData(object):
|
||||
version_data = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class AddonInfo(object):
|
||||
"""Object matching json payload from Server"""
|
||||
name = attr.ib()
|
||||
version = attr.ib()
|
||||
title = attr.ib(default=None)
|
||||
sources = attr.ib(default=attr.Factory(dict))
|
||||
hash = attr.ib(default=None)
|
||||
description = attr.ib(default=None)
|
||||
license = attr.ib(default=None)
|
||||
authors = attr.ib(default=None)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
sources = []
|
||||
|
||||
production_version = data.get("productionVersion")
|
||||
if not production_version:
|
||||
return
|
||||
|
||||
# server payload contains info about all versions
|
||||
# active addon must have 'productionVersion' and matching version info
|
||||
version_data = data.get("versions", {})[production_version]
|
||||
|
||||
for source in version_data.get("clientSourceInfo", []):
|
||||
if source.get("type") == UrlType.FILESYSTEM.value:
|
||||
source_addon = LocalAddonSource(type=source["type"],
|
||||
path=source["path"])
|
||||
if source.get("type") == UrlType.HTTP.value:
|
||||
source_addon = WebAddonSource(type=source["type"],
|
||||
url=source["url"])
|
||||
|
||||
sources.append(source_addon)
|
||||
|
||||
return cls(name=data.get("name"),
|
||||
version=production_version,
|
||||
sources=sources,
|
||||
hash=data.get("hash"),
|
||||
description=data.get("description"),
|
||||
title=data.get("title"),
|
||||
license=data.get("license"),
|
||||
authors=data.get("authors"))
|
||||
|
||||
|
|
@ -21,7 +21,7 @@ class RemoteFileHandler:
|
|||
'tar.gz', 'tar.xz', 'tar.bz2']
|
||||
|
||||
@staticmethod
|
||||
def calculate_md5(fpath, chunk_size):
|
||||
def calculate_md5(fpath, chunk_size=10000):
|
||||
md5 = hashlib.md5()
|
||||
with open(fpath, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(chunk_size), b''):
|
||||
|
|
@ -33,17 +33,45 @@ class RemoteFileHandler:
|
|||
return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def check_integrity(fpath, md5=None):
|
||||
def calculate_sha256(fpath):
|
||||
"""Calculate sha256 for content of the file.
|
||||
|
||||
Args:
|
||||
fpath (str): Path to file.
|
||||
|
||||
Returns:
|
||||
str: hex encoded sha256
|
||||
|
||||
"""
|
||||
h = hashlib.sha256()
|
||||
b = bytearray(128 * 1024)
|
||||
mv = memoryview(b)
|
||||
with open(fpath, 'rb', buffering=0) as f:
|
||||
for n in iter(lambda: f.readinto(mv), 0):
|
||||
h.update(mv[:n])
|
||||
return h.hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def check_sha256(fpath, sha256, **kwargs):
|
||||
return sha256 == RemoteFileHandler.calculate_sha256(fpath, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def check_integrity(fpath, hash_value=None, hash_type=None):
|
||||
if not os.path.isfile(fpath):
|
||||
return False
|
||||
if md5 is None:
|
||||
if hash_value is None:
|
||||
return True
|
||||
return RemoteFileHandler.check_md5(fpath, md5)
|
||||
if not hash_type:
|
||||
raise ValueError("Provide hash type, md5 or sha256")
|
||||
if hash_type == 'md5':
|
||||
return RemoteFileHandler.check_md5(fpath, hash_value)
|
||||
if hash_type == "sha256":
|
||||
return RemoteFileHandler.check_sha256(fpath, hash_value)
|
||||
|
||||
@staticmethod
|
||||
def download_url(
|
||||
url, root, filename=None,
|
||||
md5=None, max_redirect_hops=3
|
||||
sha256=None, max_redirect_hops=3
|
||||
):
|
||||
"""Download a file from a url and place it in root.
|
||||
Args:
|
||||
|
|
@ -51,7 +79,7 @@ class RemoteFileHandler:
|
|||
root (str): Directory to place downloaded file in
|
||||
filename (str, optional): Name to save the file under.
|
||||
If None, use the basename of the URL
|
||||
md5 (str, optional): MD5 checksum of the download.
|
||||
sha256 (str, optional): sha256 checksum of the download.
|
||||
If None, do not check
|
||||
max_redirect_hops (int, optional): Maximum number of redirect
|
||||
hops allowed
|
||||
|
|
@ -64,7 +92,8 @@ class RemoteFileHandler:
|
|||
os.makedirs(root, exist_ok=True)
|
||||
|
||||
# check if file is already present locally
|
||||
if RemoteFileHandler.check_integrity(fpath, md5):
|
||||
if RemoteFileHandler.check_integrity(fpath,
|
||||
sha256, hash_type="sha256"):
|
||||
print('Using downloaded and verified file: ' + fpath)
|
||||
return
|
||||
|
||||
|
|
@ -76,7 +105,7 @@ class RemoteFileHandler:
|
|||
file_id = RemoteFileHandler._get_google_drive_file_id(url)
|
||||
if file_id is not None:
|
||||
return RemoteFileHandler.download_file_from_google_drive(
|
||||
file_id, root, filename, md5)
|
||||
file_id, root, filename, sha256)
|
||||
|
||||
# download the file
|
||||
try:
|
||||
|
|
@ -92,20 +121,21 @@ class RemoteFileHandler:
|
|||
raise e
|
||||
|
||||
# check integrity of downloaded file
|
||||
if not RemoteFileHandler.check_integrity(fpath, md5):
|
||||
if not RemoteFileHandler.check_integrity(fpath,
|
||||
sha256, hash_type="sha256"):
|
||||
raise RuntimeError("File not found or corrupted.")
|
||||
|
||||
@staticmethod
|
||||
def download_file_from_google_drive(file_id, root,
|
||||
filename=None,
|
||||
md5=None):
|
||||
sha256=None):
|
||||
"""Download a Google Drive file from and place it in root.
|
||||
Args:
|
||||
file_id (str): id of file to be downloaded
|
||||
root (str): Directory to place downloaded file in
|
||||
filename (str, optional): Name to save the file under.
|
||||
If None, use the id of the file.
|
||||
md5 (str, optional): MD5 checksum of the download.
|
||||
sha256 (str, optional): sha256 checksum of the download.
|
||||
If None, do not check
|
||||
"""
|
||||
# Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa
|
||||
|
|
@ -119,8 +149,8 @@ class RemoteFileHandler:
|
|||
|
||||
os.makedirs(root, exist_ok=True)
|
||||
|
||||
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath,
|
||||
md5):
|
||||
if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(
|
||||
fpath, sha256, hash_type="sha256"):
|
||||
print('Using downloaded and verified file: ' + fpath)
|
||||
else:
|
||||
session = requests.Session()
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
import pytest
|
||||
import attr
|
||||
import tempfile
|
||||
|
||||
from common.openpype_common.distribution.addon_distribution import (
|
||||
AddonDownloader,
|
||||
OSAddonDownloader,
|
||||
HTTPAddonDownloader,
|
||||
AddonInfo,
|
||||
update_addon_state,
|
||||
UpdateState
|
||||
)
|
||||
from common.openpype_common.distribution.addon_info import UrlType
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def addon_downloader():
|
||||
addon_downloader = AddonDownloader()
|
||||
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
|
||||
addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader)
|
||||
|
||||
yield addon_downloader
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http_downloader(addon_downloader):
|
||||
yield addon_downloader.get_downloader(UrlType.HTTP.value)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_folder():
|
||||
yield tempfile.mkdtemp()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_addon_info():
|
||||
addon_info = {
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"clientPyproject": {
|
||||
"tool": {
|
||||
"poetry": {
|
||||
"dependencies": {
|
||||
"nxtools": "^1.6",
|
||||
"orjson": "^3.6.7",
|
||||
"typer": "^0.4.1",
|
||||
"email-validator": "^1.1.3",
|
||||
"python": "^3.10",
|
||||
"fastapi": "^0.73.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"hasSettings": True,
|
||||
"clientSourceInfo": [
|
||||
{
|
||||
"type": "http",
|
||||
"url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa
|
||||
},
|
||||
{
|
||||
"type": "filesystem",
|
||||
"path": {
|
||||
"windows": ["P:/sources/some_file.zip",
|
||||
"W:/sources/some_file.zip"], # noqa
|
||||
"linux": ["/mnt/srv/sources/some_file.zip"],
|
||||
"darwin": ["/Volumes/srv/sources/some_file.zip"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"frontendScopes": {
|
||||
"project": {
|
||||
"sidebar": "hierarchy"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
"title": "Slack addon",
|
||||
"name": "openpype_slack",
|
||||
"productionVersion": "1.0.0",
|
||||
"hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa
|
||||
}
|
||||
yield addon_info
|
||||
|
||||
|
||||
def test_register(printer):
|
||||
addon_downloader = AddonDownloader()
|
||||
|
||||
assert len(addon_downloader._downloaders) == 0, "Contains registered"
|
||||
|
||||
addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
|
||||
assert len(addon_downloader._downloaders) == 1, "Should contain one"
|
||||
|
||||
|
||||
def test_get_downloader(printer, addon_downloader):
|
||||
assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
addon_downloader.get_downloader("unknown"), "Shouldn't find"
|
||||
|
||||
|
||||
def test_addon_info(printer, sample_addon_info):
|
||||
"""Tests parsing of expected payload from v4 server into AadonInfo."""
|
||||
valid_minimum = {
|
||||
"name": "openpype_slack",
|
||||
"productionVersion": "1.0.0",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"clientSourceInfo": [
|
||||
{
|
||||
"type": "filesystem",
|
||||
"path": {
|
||||
"windows": [
|
||||
"P:/sources/some_file.zip",
|
||||
"W:/sources/some_file.zip"],
|
||||
"linux": [
|
||||
"/mnt/srv/sources/some_file.zip"],
|
||||
"darwin": [
|
||||
"/Volumes/srv/sources/some_file.zip"] # noqa
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert AddonInfo.from_dict(valid_minimum), "Missing required fields"
|
||||
|
||||
valid_minimum["versions"].pop("1.0.0")
|
||||
with pytest.raises(KeyError):
|
||||
assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa
|
||||
|
||||
valid_minimum.pop("productionVersion")
|
||||
assert not AddonInfo.from_dict(
|
||||
valid_minimum), "none if not productionVersion" # noqa
|
||||
|
||||
addon = AddonInfo.from_dict(sample_addon_info)
|
||||
assert addon, "Should be created"
|
||||
assert addon.name == "openpype_slack", "Incorrect name"
|
||||
assert addon.version == "1.0.0", "Incorrect version"
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
assert addon["name"], "Dict approach not implemented"
|
||||
|
||||
addon_as_dict = attr.asdict(addon)
|
||||
assert addon_as_dict["name"], "Dict approach should work"
|
||||
|
||||
|
||||
def test_update_addon_state(printer, sample_addon_info,
|
||||
temp_folder, addon_downloader):
|
||||
"""Tests possible cases of addon update."""
|
||||
addon_info = AddonInfo.from_dict(sample_addon_info)
|
||||
orig_hash = addon_info.hash
|
||||
|
||||
addon_info.hash = "brokenhash"
|
||||
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||
assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \
|
||||
"Update should failed because of wrong hash"
|
||||
|
||||
addon_info.hash = orig_hash
|
||||
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||
assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \
|
||||
"Addon should have been updated"
|
||||
|
||||
result = update_addon_state([addon_info], temp_folder, addon_downloader)
|
||||
assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \
|
||||
"Addon should already exist"
|
||||
|
|
@ -815,6 +815,13 @@ class BootstrapRepos:
|
|||
except Exception as e:
|
||||
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||
return None
|
||||
if not destination_dir.exists():
|
||||
destination_dir.mkdir(parents=True)
|
||||
elif not destination_dir.is_dir():
|
||||
self._print(
|
||||
"Destination exists but is not directory.", LOG_ERROR)
|
||||
return None
|
||||
|
||||
try:
|
||||
shutil.move(zip_file.as_posix(), destination_dir.as_posix())
|
||||
except shutil.Error as e:
|
||||
|
|
|
|||
|
|
@ -388,8 +388,11 @@ class InstallDialog(QtWidgets.QDialog):
|
|||
install_thread.start()
|
||||
|
||||
def _installation_finished(self):
|
||||
# TODO we should find out why status can be set to 'None'?
|
||||
# - 'InstallThread.run' should handle all cases so not sure where
|
||||
# that come from
|
||||
status = self._install_thread.result()
|
||||
if status >= 0:
|
||||
if status is not None and status >= 0:
|
||||
self._update_progress(100)
|
||||
QtWidgets.QApplication.processEvents()
|
||||
self.done(3)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ from .lib import (
|
|||
PypeLogger,
|
||||
Logger,
|
||||
Anatomy,
|
||||
config,
|
||||
execute,
|
||||
run_subprocess,
|
||||
version_up,
|
||||
|
|
@ -72,7 +71,6 @@ __all__ = [
|
|||
"PypeLogger",
|
||||
"Logger",
|
||||
"Anatomy",
|
||||
"config",
|
||||
"execute",
|
||||
"get_default_components",
|
||||
"ApplicationManager",
|
||||
|
|
|
|||
|
|
@ -277,6 +277,13 @@ def projectmanager():
|
|||
PypeCommands().launch_project_manager()
|
||||
|
||||
|
||||
@main.command(context_settings={"ignore_unknown_options": True})
|
||||
def publish_report_viewer():
|
||||
from openpype.tools.publisher.publish_report_viewer import main
|
||||
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("output_path")
|
||||
@click.option("--project", help="Define project context")
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ from bson.objectid import ObjectId
|
|||
|
||||
from .mongo import get_project_database, get_project_connection
|
||||
|
||||
PatternType = type(re.compile(""))
|
||||
|
||||
|
||||
def _prepare_fields(fields, required_fields=None):
|
||||
if not fields:
|
||||
|
|
@ -1054,11 +1056,11 @@ def _regex_filters(filters):
|
|||
for key, value in filters.items():
|
||||
regexes = []
|
||||
a_values = []
|
||||
if isinstance(value, re.Pattern):
|
||||
if isinstance(value, PatternType):
|
||||
regexes.append(value)
|
||||
elif isinstance(value, (list, tuple, set)):
|
||||
for item in value:
|
||||
if isinstance(item, re.Pattern):
|
||||
if isinstance(item, PatternType):
|
||||
regexes.append(item)
|
||||
else:
|
||||
a_values.append(item)
|
||||
|
|
@ -1194,7 +1196,7 @@ def get_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
|
|
@ -1240,7 +1242,7 @@ def get_archived_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from .mongo import get_project_connection
|
|||
from .entities import (
|
||||
get_assets,
|
||||
get_asset_by_id,
|
||||
get_version_by_id,
|
||||
get_representation_by_id,
|
||||
convert_id,
|
||||
)
|
||||
|
|
@ -127,12 +128,20 @@ def get_linked_representation_id(
|
|||
if not version_id:
|
||||
return []
|
||||
|
||||
version_doc = get_version_by_id(
|
||||
project_name, version_id, fields=["type", "version_id"]
|
||||
)
|
||||
if version_doc["type"] == "hero_version":
|
||||
version_id = version_doc["version_id"]
|
||||
|
||||
if max_depth is None:
|
||||
max_depth = 0
|
||||
|
||||
match = {
|
||||
"_id": version_id,
|
||||
"type": {"$in": ["version", "hero_version"]}
|
||||
# Links are not stored to hero versions at this moment so filter
|
||||
# is limited to just versions
|
||||
"type": "version"
|
||||
}
|
||||
|
||||
graph_lookup = {
|
||||
|
|
@ -187,7 +196,7 @@ def _process_referenced_pipeline_result(result, link_type):
|
|||
referenced_version_ids = set()
|
||||
correctly_linked_ids = set()
|
||||
for item in result:
|
||||
input_links = item["data"].get("inputLinks")
|
||||
input_links = item.get("data", {}).get("inputLinks")
|
||||
if not input_links:
|
||||
continue
|
||||
|
||||
|
|
@ -203,7 +212,7 @@ def _process_referenced_pipeline_result(result, link_type):
|
|||
continue
|
||||
|
||||
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
|
||||
output_links = output["data"].get("inputLinks")
|
||||
output_links = output.get("data", {}).get("inputLinks")
|
||||
if not output_links:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
|||
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||
CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
|
||||
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
|
||||
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
|
||||
|
|
@ -162,6 +163,34 @@ def new_version_doc(version, subset_id, data=None, entity_id=None):
|
|||
}
|
||||
|
||||
|
||||
def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of hero version document.
|
||||
|
||||
Args:
|
||||
version_id (ObjectId): Is considered as unique identifier of version
|
||||
under subset.
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_HERO_VERSION_SCHEMA,
|
||||
"type": "hero_version",
|
||||
"version_id": version_id,
|
||||
"parent": subset_id,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_representation_doc(
|
||||
name, version_id, context, data=None, entity_id=None
|
||||
):
|
||||
|
|
@ -293,6 +322,20 @@ def prepare_version_update_data(old_doc, new_doc, replace=True):
|
|||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two hero version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two representation documents and prepare update data.
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"hiero",
|
||||
"houdini",
|
||||
"nukestudio",
|
||||
"fusion",
|
||||
"blender",
|
||||
"photoshop",
|
||||
"tvpaint",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
create_workdir_extra_folders
|
||||
)
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.pipeline.workfile import create_workdir_extra_folders
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from .host import (
|
|||
from .interfaces import (
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost,
|
||||
INewPublisher,
|
||||
)
|
||||
|
||||
|
|
@ -16,6 +17,7 @@ __all__ = (
|
|||
|
||||
"IWorkfileHost",
|
||||
"ILoadHost",
|
||||
"IPublishHost",
|
||||
"INewPublisher",
|
||||
|
||||
"HostDirmap",
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ class IWorkfileHost:
|
|||
return self.workfile_has_unsaved_changes()
|
||||
|
||||
|
||||
class INewPublisher:
|
||||
class IPublishHost:
|
||||
"""Functions related to new creation system in new publisher.
|
||||
|
||||
New publisher is not storing information only about each created instance
|
||||
|
|
@ -306,12 +306,14 @@ class INewPublisher:
|
|||
workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, INewPublisher):
|
||||
if isinstance(host, IPublishHost):
|
||||
return []
|
||||
|
||||
required = [
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
"get_current_context",
|
||||
]
|
||||
missing = []
|
||||
for name in required:
|
||||
|
|
@ -330,7 +332,7 @@ class INewPublisher:
|
|||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = INewPublisher.get_missing_publish_methods(host)
|
||||
missing = IPublishHost.get_missing_publish_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
|
|
@ -368,3 +370,17 @@ class INewPublisher:
|
|||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class INewPublisher(IPublishHost):
|
||||
"""Legacy interface replaced by 'IPublishHost'.
|
||||
|
||||
Deprecated:
|
||||
'INewPublisher' is replaced by 'IPublishHost' please change your
|
||||
imports.
|
||||
There is no "reasonable" way hot mark these classes as deprecated
|
||||
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||
removed in 3.15.*
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from wsrpc_aiohttp import (
|
|||
|
||||
from Qt import QtCore
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
|
|
@ -84,8 +85,6 @@ class ProcessLauncher(QtCore.QObject):
|
|||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
from openpype.api import Logger
|
||||
|
||||
self._log = Logger.get_logger("{}-launcher".format(
|
||||
self.route_name))
|
||||
return self._log
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ from Qt import QtWidgets
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger, register_event_callback
|
||||
from openpype.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
|
|
@ -16,9 +15,8 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
import openpype.hosts.aftereffects
|
||||
from openpype.lib import register_event_callback
|
||||
|
||||
from .launch_logic import get_stub
|
||||
from .launch_logic import get_stub, ConnectionNotEstablishedYet
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -111,7 +109,7 @@ def ls():
|
|||
"""
|
||||
try:
|
||||
stub = get_stub() # only after AfterEffects is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
except ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
|
|
@ -284,7 +282,7 @@ def _get_stub():
|
|||
"""
|
||||
try:
|
||||
stub = get_stub() # only after Photoshop is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
except ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from typing import Dict, List, Union
|
|||
|
||||
import bpy
|
||||
import addon_utils
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
|
||||
from . import pipeline
|
||||
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ from openpype.pipeline import (
|
|||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
register_event_callback,
|
||||
emit_event
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.hosts.blender.api.action
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
|
||||
|
|
|
|||
|
|
@ -3,14 +3,15 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current mesh has UV's."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
|
|
|
|||
|
|
@ -3,14 +3,15 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.hosts.blender.api.action
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import mathutils
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.hosts.blender.api.action
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
|
||||
|
|
|
|||
|
|
@ -6,9 +6,8 @@ import argparse
|
|||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
from openpype.api import Logger
|
||||
import openpype
|
||||
import openpype.hosts.celaction
|
||||
from openpype.lib import Logger
|
||||
from openpype.hosts.celaction import api as celaction
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.pipeline import install_openpype_plugins
|
||||
|
|
|
|||
|
|
@ -1,113 +0,0 @@
|
|||
import os
|
||||
import collections
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_subsets,
|
||||
get_last_versions,
|
||||
get_representations
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
||||
|
||||
label = "Colect Audio for publishing"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
self.log.info('Collecting Audio Data')
|
||||
asset_doc = context.data["assetEntity"]
|
||||
|
||||
# get all available representations
|
||||
subsets = self.get_subsets(
|
||||
asset_doc,
|
||||
representations=["audio", "wav"]
|
||||
)
|
||||
self.log.info(f"subsets is: {pformat(subsets)}")
|
||||
|
||||
if not subsets.get("audioMain"):
|
||||
raise AttributeError("`audioMain` subset does not exist")
|
||||
|
||||
reprs = subsets.get("audioMain", {}).get("representations", [])
|
||||
self.log.info(f"reprs is: {pformat(reprs)}")
|
||||
|
||||
repr = next((r for r in reprs), None)
|
||||
if not repr:
|
||||
raise "Missing `audioMain` representation"
|
||||
self.log.info(f"representation is: {repr}")
|
||||
|
||||
audio_file = repr.get('data', {}).get('path', "")
|
||||
|
||||
if os.path.exists(audio_file):
|
||||
context.data["audioFile"] = audio_file
|
||||
self.log.info(
|
||||
'audio_file: {}, has been added to context'.format(audio_file))
|
||||
else:
|
||||
self.log.warning("Couldn't find any audio file on Ftrack.")
|
||||
|
||||
def get_subsets(self, asset_doc, representations):
|
||||
"""
|
||||
Query subsets with filter on name.
|
||||
|
||||
The method will return all found subsets and its defined version
|
||||
and subsets. Version could be specified with number. Representation
|
||||
can be filtered.
|
||||
|
||||
Arguments:
|
||||
asset_doct (dict): Asset (shot) mongo document
|
||||
representations (list): list for all representations
|
||||
|
||||
Returns:
|
||||
dict: subsets with version and representations in keys
|
||||
"""
|
||||
|
||||
# Query all subsets for asset
|
||||
project_name = legacy_io.active_project()
|
||||
subset_docs = get_subsets(
|
||||
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
|
||||
)
|
||||
# Collect all subset ids
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
# Check if we found anything
|
||||
assert subset_ids, (
|
||||
"No subsets found. Check correct filter. "
|
||||
"Try this for start `r'.*'`: asset: `{}`"
|
||||
).format(asset_doc["name"])
|
||||
|
||||
last_versions_by_subset_id = get_last_versions(
|
||||
project_name, subset_ids, fields=["_id", "parent"]
|
||||
)
|
||||
|
||||
version_docs_by_id = {}
|
||||
for version_doc in last_versions_by_subset_id.values():
|
||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
||||
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
version_ids=version_docs_by_id.keys(),
|
||||
representation_names=representations
|
||||
)
|
||||
repre_docs_by_version_id = collections.defaultdict(list)
|
||||
for repre_doc in repre_docs:
|
||||
version_id = repre_doc["parent"]
|
||||
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||
|
||||
output_dict = {}
|
||||
for version_id, repre_docs in repre_docs_by_version_id.items():
|
||||
version_doc = version_docs_by_id[version_id]
|
||||
subset_id = version_doc["parent"]
|
||||
subset_doc = last_versions_by_subset_id[subset_id]
|
||||
# Store queried docs by subset name
|
||||
output_dict[subset_doc["name"]] = {
|
||||
"representations": repre_docs,
|
||||
"version": version_doc
|
||||
}
|
||||
|
||||
return output_dict
|
||||
|
|
@ -51,7 +51,8 @@ from .pipeline import (
|
|||
)
|
||||
from .menu import (
|
||||
FlameMenuProjectConnect,
|
||||
FlameMenuTimeline
|
||||
FlameMenuTimeline,
|
||||
FlameMenuUniversal
|
||||
)
|
||||
from .plugin import (
|
||||
Creator,
|
||||
|
|
@ -131,6 +132,7 @@ __all__ = [
|
|||
# menu
|
||||
"FlameMenuProjectConnect",
|
||||
"FlameMenuTimeline",
|
||||
"FlameMenuUniversal",
|
||||
|
||||
# plugin
|
||||
"Creator",
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ import xml.etree.cElementTree as cET
|
|||
from copy import deepcopy, copy
|
||||
from xml.etree import ElementTree as ET
|
||||
from pprint import pformat
|
||||
|
||||
from openpype.lib import Logger, run_subprocess
|
||||
|
||||
from .constants import (
|
||||
MARKER_COLOR,
|
||||
MARKER_DURATION,
|
||||
|
|
@ -20,9 +23,7 @@ from .constants import (
|
|||
MARKER_PUBLISH_DEFAULT
|
||||
)
|
||||
|
||||
import openpype.api as openpype
|
||||
|
||||
log = openpype.Logger.get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
||||
|
||||
|
|
@ -766,11 +767,11 @@ class MediaInfoFile(object):
|
|||
_drop_mode = None
|
||||
_file_pattern = None
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
def __init__(self, path, logger=None):
|
||||
|
||||
# replace log if any
|
||||
if kwargs.get("logger"):
|
||||
self.log = kwargs["logger"]
|
||||
if logger:
|
||||
self.log = logger
|
||||
|
||||
# test if `dl_get_media_info` paht exists
|
||||
self._validate_media_script_path()
|
||||
|
|
@ -1016,7 +1017,7 @@ class MediaInfoFile(object):
|
|||
|
||||
try:
|
||||
# execute creation of clip xml template data
|
||||
openpype.run_subprocess(cmd_args)
|
||||
run_subprocess(cmd_args)
|
||||
except TypeError as error:
|
||||
raise TypeError(
|
||||
"Error creating `{}` due: {}".format(fpath, error))
|
||||
|
|
|
|||
|
|
@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp):
|
|||
if self.flame:
|
||||
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||
self.log.info('Rescan Python Hooks')
|
||||
|
||||
|
||||
class FlameMenuUniversal(_FlameMenuApp):
|
||||
|
||||
# flameMenuProjectconnect app takes care of the preferences dialog as well
|
||||
|
||||
def __init__(self, framework):
|
||||
_FlameMenuApp.__init__(self, framework)
|
||||
|
||||
def __getattr__(self, name):
|
||||
def method(*args, **kwargs):
|
||||
project = self.dynamic_menu_data.get(name)
|
||||
if project:
|
||||
self.link_project(project)
|
||||
return method
|
||||
|
||||
def build_menu(self):
|
||||
if not self.flame:
|
||||
return []
|
||||
|
||||
menu = deepcopy(self.menu)
|
||||
|
||||
menu['actions'].append({
|
||||
"name": "Load...",
|
||||
"execute": lambda x: self.tools_helper.show_loader()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Manage...",
|
||||
"execute": lambda x: self.tools_helper.show_scene_inventory()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Library...",
|
||||
"execute": lambda x: self.tools_helper.show_library_loader()
|
||||
})
|
||||
return menu
|
||||
|
||||
def refresh(self, *args, **kwargs):
|
||||
self.rescan()
|
||||
|
||||
def rescan(self, *args, **kwargs):
|
||||
if not self.flame:
|
||||
try:
|
||||
import flame
|
||||
self.flame = flame
|
||||
except ImportError:
|
||||
self.flame = None
|
||||
|
||||
if self.flame:
|
||||
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||
self.log.info('Rescan Python Hooks')
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import os
|
|||
import contextlib
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
|
|
@ -90,8 +90,7 @@ def containerise(flame_clip_segment,
|
|||
def ls():
|
||||
"""List available containers.
|
||||
"""
|
||||
# TODO: ls
|
||||
pass
|
||||
return []
|
||||
|
||||
|
||||
def parse_container(tl_segment, validate=True):
|
||||
|
|
@ -107,6 +106,7 @@ def update_container(tl_segment, data=None):
|
|||
# TODO: update_container
|
||||
pass
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node passthrough states on instance toggles."""
|
||||
|
||||
|
|
|
|||
|
|
@ -6,16 +6,17 @@ from xml.etree import ElementTree as ET
|
|||
|
||||
from Qt import QtCore, QtWidgets
|
||||
|
||||
import openpype.api as openpype
|
||||
import qargparse
|
||||
from openpype import style
|
||||
from openpype.settings import get_current_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
||||
|
||||
from . import constants
|
||||
from . import lib as flib
|
||||
from . import pipeline as fpipeline
|
||||
|
||||
log = openpype.Logger.get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class CreatorWidget(QtWidgets.QDialog):
|
||||
|
|
@ -305,7 +306,7 @@ class Creator(LegacyCreator):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Creator, self).__init__(*args, **kwargs)
|
||||
self.presets = openpype.get_current_project_settings()[
|
||||
self.presets = get_current_project_settings()[
|
||||
"flame"]["create"].get(self.__class__.__name__, {})
|
||||
|
||||
# adding basic current context flame objects
|
||||
|
|
@ -361,6 +362,8 @@ class PublishableClip:
|
|||
index_from_segment_default = False
|
||||
use_shot_name_default = False
|
||||
include_handles_default = False
|
||||
retimed_handles_default = True
|
||||
retimed_framerange_default = True
|
||||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
|
|
@ -496,6 +499,14 @@ class PublishableClip:
|
|||
"audio", {}).get("value") or False
|
||||
self.include_handles = self.ui_inputs.get(
|
||||
"includeHandles", {}).get("value") or self.include_handles_default
|
||||
self.retimed_handles = (
|
||||
self.ui_inputs.get("retimedHandles", {}).get("value")
|
||||
or self.retimed_handles_default
|
||||
)
|
||||
self.retimed_framerange = (
|
||||
self.ui_inputs.get("retimedFramerange", {}).get("value")
|
||||
or self.retimed_framerange_default
|
||||
)
|
||||
|
||||
# build subset name from layer name
|
||||
if self.subset_name == "[ track name ]":
|
||||
|
|
@ -668,6 +679,7 @@ class ClipLoader(LoaderPlugin):
|
|||
`update` logic.
|
||||
|
||||
"""
|
||||
log = log
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
|
|
@ -684,16 +696,20 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
|
||||
log = log
|
||||
|
||||
def __init__(self, openclip_file_path, feed_data):
|
||||
def __init__(self, openclip_file_path, feed_data, logger=None):
|
||||
self.out_file = openclip_file_path
|
||||
|
||||
# replace log if any
|
||||
if logger:
|
||||
self.log = logger
|
||||
|
||||
# new feed variables:
|
||||
feed_path = feed_data.pop("path")
|
||||
|
||||
# initialize parent class
|
||||
super(OpenClipSolver, self).__init__(
|
||||
feed_path,
|
||||
**feed_data
|
||||
logger=logger
|
||||
)
|
||||
|
||||
# get other metadata
|
||||
|
|
@ -741,17 +757,18 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
self.log.info("Building new openClip")
|
||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
||||
|
||||
# clip data comming from MediaInfoFile
|
||||
tmp_xml_feeds = self.clip_data.find('tracks/track/feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
for tmp_feed in tmp_xml_feeds:
|
||||
tmp_feed.set('vuid', self.feed_version_name)
|
||||
for tmp_xml_track in self.clip_data.iter("track"):
|
||||
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace:
|
||||
self._add_colorspace(tmp_feed, self.feed_colorspace)
|
||||
for tmp_feed in tmp_xml_track.iter("feed"):
|
||||
tmp_feed.set('vuid', self.feed_version_name)
|
||||
|
||||
self._clear_handler(tmp_feed)
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace:
|
||||
self._add_colorspace(tmp_feed, self.feed_colorspace)
|
||||
|
||||
self._clear_handler(tmp_feed)
|
||||
|
||||
tmp_xml_versions_obj = self.clip_data.find('versions')
|
||||
tmp_xml_versions_obj.set('currentVersion', self.feed_version_name)
|
||||
|
|
@ -764,6 +781,17 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
|
||||
self.write_clip_data_to_file(self.out_file, self.clip_data)
|
||||
|
||||
def _get_xml_track_obj_by_uid(self, xml_data, uid):
|
||||
# loop all tracks of input xml data
|
||||
for xml_track in xml_data.iter("track"):
|
||||
track_uid = xml_track.get("uid")
|
||||
self.log.debug(
|
||||
">> track_uid:uid: {}:{}".format(track_uid, uid))
|
||||
|
||||
# get matching uids
|
||||
if uid == track_uid:
|
||||
return xml_track
|
||||
|
||||
def _update_open_clip(self):
|
||||
self.log.info("Updating openClip ..")
|
||||
|
||||
|
|
@ -773,52 +801,81 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
self.log.debug(">> out_xml: {}".format(out_xml))
|
||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
||||
|
||||
# Get new feed from tmp file
|
||||
tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed')
|
||||
# loop tmp tracks
|
||||
updated_any = False
|
||||
for tmp_xml_track in self.clip_data.iter("track"):
|
||||
# get tmp track uid
|
||||
tmp_track_uid = tmp_xml_track.get("uid")
|
||||
self.log.debug(">> tmp_track_uid: {}".format(tmp_track_uid))
|
||||
|
||||
self._clear_handler(tmp_xml_feed)
|
||||
# get out data track by uid
|
||||
out_track_element = self._get_xml_track_obj_by_uid(
|
||||
out_xml, tmp_track_uid)
|
||||
self.log.debug(
|
||||
">> out_track_element: {}".format(out_track_element))
|
||||
|
||||
# update fps from MediaInfoFile class
|
||||
if self.fps:
|
||||
tmp_feed_fps_obj = tmp_xml_feed.find(
|
||||
"startTimecode/rate")
|
||||
tmp_feed_fps_obj.text = str(self.fps)
|
||||
# loop tmp feeds
|
||||
for tmp_xml_feed in tmp_xml_track.iter("feed"):
|
||||
new_path_obj = tmp_xml_feed.find(
|
||||
"spans/span/path")
|
||||
new_path = new_path_obj.text
|
||||
|
||||
# update start_frame from MediaInfoFile class
|
||||
if self.start_frame:
|
||||
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
||||
"startTimecode/nbTicks")
|
||||
tmp_feed_nb_ticks_obj.text = str(self.start_frame)
|
||||
# check if feed path already exists in track's feeds
|
||||
if (
|
||||
out_track_element is not None
|
||||
and self._feed_exists(out_track_element, new_path)
|
||||
):
|
||||
continue
|
||||
|
||||
# update drop_mode from MediaInfoFile class
|
||||
if self.drop_mode:
|
||||
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
||||
"startTimecode/dropMode")
|
||||
tmp_feed_drop_mode_obj.text = str(self.drop_mode)
|
||||
# rename versions on feeds
|
||||
tmp_xml_feed.set('vuid', self.feed_version_name)
|
||||
self._clear_handler(tmp_xml_feed)
|
||||
|
||||
new_path_obj = tmp_xml_feed.find(
|
||||
"spans/span/path")
|
||||
new_path = new_path_obj.text
|
||||
# update fps from MediaInfoFile class
|
||||
if self.fps is not None:
|
||||
tmp_feed_fps_obj = tmp_xml_feed.find(
|
||||
"startTimecode/rate")
|
||||
tmp_feed_fps_obj.text = str(self.fps)
|
||||
|
||||
feed_added = False
|
||||
if not self._feed_exists(out_xml, new_path):
|
||||
tmp_xml_feed.set('vuid', self.feed_version_name)
|
||||
# Append new temp file feed to .clip source out xml
|
||||
out_track = out_xml.find("tracks/track")
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace:
|
||||
self._add_colorspace(tmp_xml_feed, self.feed_colorspace)
|
||||
# update start_frame from MediaInfoFile class
|
||||
if self.start_frame is not None:
|
||||
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
||||
"startTimecode/nbTicks")
|
||||
tmp_feed_nb_ticks_obj.text = str(self.start_frame)
|
||||
|
||||
out_feeds = out_track.find('feeds')
|
||||
out_feeds.set('currentVersion', self.feed_version_name)
|
||||
out_feeds.append(tmp_xml_feed)
|
||||
# update drop_mode from MediaInfoFile class
|
||||
if self.drop_mode is not None:
|
||||
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
||||
"startTimecode/dropMode")
|
||||
tmp_feed_drop_mode_obj.text = str(self.drop_mode)
|
||||
|
||||
self.log.info(
|
||||
"Appending new feed: {}".format(
|
||||
self.feed_version_name))
|
||||
feed_added = True
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace is not None:
|
||||
self._add_colorspace(tmp_xml_feed, self.feed_colorspace)
|
||||
|
||||
if feed_added:
|
||||
# then append/update feed to correct track in output
|
||||
if out_track_element:
|
||||
self.log.debug("updating track element ..")
|
||||
# update already present track
|
||||
out_feeds = out_track_element.find('feeds')
|
||||
out_feeds.set('currentVersion', self.feed_version_name)
|
||||
out_feeds.append(tmp_xml_feed)
|
||||
|
||||
self.log.info(
|
||||
"Appending new feed: {}".format(
|
||||
self.feed_version_name))
|
||||
else:
|
||||
self.log.debug("adding new track element ..")
|
||||
# create new track as it doesnt exists yet
|
||||
# set current version to feeds on tmp
|
||||
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
out_tracks = out_xml.find("tracks")
|
||||
out_tracks.append(tmp_xml_track)
|
||||
|
||||
updated_any = True
|
||||
|
||||
if updated_any:
|
||||
# Append vUID to versions
|
||||
out_xml_versions_obj = out_xml.find('versions')
|
||||
out_xml_versions_obj.set(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from xml.etree import ElementTree as ET
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ Flame utils for syncing scripts
|
|||
|
||||
import os
|
||||
import shutil
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"""Host API required Work Files tool"""
|
||||
|
||||
import os
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
# from .. import (
|
||||
# get_project_manager,
|
||||
# get_current_project
|
||||
|
|
|
|||
|
|
@ -3,16 +3,17 @@ import json
|
|||
import tempfile
|
||||
import contextlib
|
||||
import socket
|
||||
from pprint import pformat
|
||||
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
get_openpype_username
|
||||
get_openpype_username,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.lib.applications import (
|
||||
ApplicationLaunchFailed
|
||||
)
|
||||
from openpype.hosts import flame as opflame
|
||||
import openpype
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class FlamePrelaunch(PreLaunchHook):
|
||||
|
|
@ -22,6 +23,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
in environment var FLAME_SCRIPT_DIR.
|
||||
"""
|
||||
app_groups = ["flame"]
|
||||
permissions = 0o777
|
||||
|
||||
wtc_script_path = os.path.join(
|
||||
opflame.HOST_DIR, "api", "scripts", "wiretap_com.py")
|
||||
|
|
@ -38,19 +40,12 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
"""Hook entry method."""
|
||||
project_doc = self.data["project_doc"]
|
||||
project_name = project_doc["name"]
|
||||
volume_name = _env.get("FLAME_WIRETAP_VOLUME")
|
||||
|
||||
# get image io
|
||||
project_anatomy = self.data["anatomy"]
|
||||
project_settings = self.data["project_settings"]
|
||||
|
||||
# make sure anatomy settings are having flame key
|
||||
if not project_anatomy["imageio"].get("flame"):
|
||||
raise ApplicationLaunchFailed((
|
||||
"Anatomy project settings are missing `flame` key. "
|
||||
"Please make sure you remove project overides on "
|
||||
"Anatomy Image io")
|
||||
)
|
||||
|
||||
imageio_flame = project_anatomy["imageio"]["flame"]
|
||||
imageio_flame = project_settings["flame"]["imageio"]
|
||||
|
||||
# get user name and host name
|
||||
user_name = get_openpype_username()
|
||||
|
|
@ -81,7 +76,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
data_to_script = {
|
||||
# from settings
|
||||
"host_name": _env.get("FLAME_WIRETAP_HOSTNAME") or hostname,
|
||||
"volume_name": _env.get("FLAME_WIRETAP_VOLUME"),
|
||||
"volume_name": volume_name,
|
||||
"group_name": _env.get("FLAME_WIRETAP_GROUP"),
|
||||
"color_policy": str(imageio_flame["project"]["colourPolicy"]),
|
||||
|
||||
|
|
@ -99,8 +94,40 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
|
||||
app_arguments = self._get_launch_arguments(data_to_script)
|
||||
|
||||
# fix project data permission issue
|
||||
self._fix_permissions(project_name, volume_name)
|
||||
|
||||
self.launch_context.launch_args.extend(app_arguments)
|
||||
|
||||
def _fix_permissions(self, project_name, volume_name):
|
||||
"""Work around for project data permissions
|
||||
|
||||
Reported issue: when project is created locally on one machine,
|
||||
it is impossible to migrate it to other machine. Autodesk Flame
|
||||
is crating some unmanagable files which needs to be opened to 0o777.
|
||||
|
||||
Args:
|
||||
project_name (str): project name
|
||||
volume_name (str): studio volume
|
||||
"""
|
||||
dirs_to_modify = [
|
||||
"/usr/discreet/project/{}".format(project_name),
|
||||
"/opt/Autodesk/clip/{}/{}.prj".format(volume_name, project_name),
|
||||
"/usr/discreet/clip/{}/{}.prj".format(volume_name, project_name)
|
||||
]
|
||||
|
||||
for dirtm in dirs_to_modify:
|
||||
for root, dirs, files in os.walk(dirtm):
|
||||
try:
|
||||
for name in set(dirs) | set(files):
|
||||
path = os.path.join(root, name)
|
||||
st = os.stat(path)
|
||||
if oct(st.st_mode) != self.permissions:
|
||||
os.chmod(path, self.permissions)
|
||||
|
||||
except OSError as exc:
|
||||
self.log.warning("Not able to open files: {}".format(exc))
|
||||
|
||||
def _get_flame_fps(self, fps_num):
|
||||
fps_table = {
|
||||
float(23.976): "23.976 fps",
|
||||
|
|
@ -152,7 +179,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
"env": self.launch_context.env
|
||||
}
|
||||
|
||||
openpype.api.run_subprocess(args, **process_kwargs)
|
||||
run_subprocess(args, **process_kwargs)
|
||||
|
||||
# process returned json file to pass launch args
|
||||
return_json_data = open(tmp_json_path).read()
|
||||
|
|
|
|||
|
|
@ -23,10 +23,11 @@ class CreateShotClip(opfapi.Creator):
|
|||
# nested dictionary (only one level allowed
|
||||
# for sections and dict)
|
||||
for _k, _v in v["value"].items():
|
||||
if presets.get(_k):
|
||||
if presets.get(_k) is not None:
|
||||
gui_inputs[k][
|
||||
"value"][_k]["value"] = presets[_k]
|
||||
if presets.get(k):
|
||||
|
||||
if presets.get(k) is not None:
|
||||
gui_inputs[k]["value"] = presets[k]
|
||||
|
||||
# open widget for plugins inputs
|
||||
|
|
@ -276,6 +277,22 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "tag",
|
||||
"toolTip": "By default handles are excluded", # noqa
|
||||
"order": 3
|
||||
},
|
||||
"retimedHandles": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Retimed handles",
|
||||
"target": "tag",
|
||||
"toolTip": "By default handles are retimed.", # noqa
|
||||
"order": 4
|
||||
},
|
||||
"retimedFramerange": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Retimed framerange",
|
||||
"target": "tag",
|
||||
"toolTip": "By default framerange is retimed.", # noqa
|
||||
"order": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from pprint import pformat
|
|||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
||||
|
|
@ -60,8 +61,6 @@ class LoadClip(opfapi.ClipLoader):
|
|||
"path": self.fname.replace("\\", "/"),
|
||||
"colorspace": colorspace,
|
||||
"version": "v{:0>3}".format(version_name),
|
||||
"logger": self.log
|
||||
|
||||
}
|
||||
self.log.debug(pformat(
|
||||
loading_context
|
||||
|
|
@ -69,7 +68,8 @@ class LoadClip(opfapi.ClipLoader):
|
|||
self.log.debug(openclip_path)
|
||||
|
||||
# make openpype clip file
|
||||
opfapi.OpenClipSolver(openclip_path, loading_context).make()
|
||||
opfapi.OpenClipSolver(
|
||||
openclip_path, loading_context, logger=self.log).make()
|
||||
|
||||
# prepare Reel group in actual desktop
|
||||
opc = self._get_clip(
|
||||
|
|
|
|||
|
|
@ -64,8 +64,6 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
"path": self.fname.replace("\\", "/"),
|
||||
"colorspace": colorspace,
|
||||
"version": "v{:0>3}".format(version_name),
|
||||
"logger": self.log
|
||||
|
||||
}
|
||||
self.log.debug(pformat(
|
||||
loading_context
|
||||
|
|
@ -73,7 +71,8 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
self.log.debug(openclip_path)
|
||||
|
||||
# make openpype clip file
|
||||
opfapi.OpenClipSolver(openclip_path, loading_context).make()
|
||||
opfapi.OpenClipSolver(
|
||||
openclip_path, loading_context, logger=self.log).make()
|
||||
|
||||
# prepare Reel group in actual desktop
|
||||
opc = self._get_clip(
|
||||
|
|
|
|||
|
|
@ -131,6 +131,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
"fps": self.fps,
|
||||
"workfileFrameStart": workfile_start,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"retimedHandles": marker_data.get("retimedHandles"),
|
||||
"shotDurationFromSource": (
|
||||
not marker_data.get("retimedFramerange")),
|
||||
"path": file_path,
|
||||
"flameAddTasks": self.add_tasks,
|
||||
"tasks": {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import opentimelineio as otio
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractOTIOFile(openpype.api.Extractor):
|
||||
class ExtractOTIOFile(publish.Extractor):
|
||||
"""
|
||||
Extractor export OTIO file
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from pprint import pformat
|
||||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.flame import api as opfapi
|
||||
from openpype.hosts.flame.api import MediaInfoFile
|
||||
from openpype.pipeline.editorial import (
|
||||
|
|
@ -15,7 +15,7 @@ from openpype.pipeline.editorial import (
|
|||
import flame
|
||||
|
||||
|
||||
class ExtractSubsetResources(openpype.api.Extractor):
|
||||
class ExtractSubsetResources(publish.Extractor):
|
||||
"""
|
||||
Extractor for transcoding files from Flame clip
|
||||
"""
|
||||
|
|
@ -80,40 +80,53 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
retimed_data = self._get_retimed_attributes(instance)
|
||||
|
||||
# get individual keys
|
||||
r_handle_start = retimed_data["handle_start"]
|
||||
r_handle_end = retimed_data["handle_end"]
|
||||
r_source_dur = retimed_data["source_duration"]
|
||||
r_speed = retimed_data["speed"]
|
||||
retimed_handle_start = retimed_data["handle_start"]
|
||||
retimed_handle_end = retimed_data["handle_end"]
|
||||
retimed_source_duration = retimed_data["source_duration"]
|
||||
retimed_speed = retimed_data["speed"]
|
||||
|
||||
# get handles value - take only the max from both
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
handles = max(handle_start, handle_end)
|
||||
include_handles = instance.data.get("includeHandles")
|
||||
retimed_handles = instance.data.get("retimedHandles")
|
||||
|
||||
# get media source range with handles
|
||||
source_start_handles = instance.data["sourceStartH"]
|
||||
source_end_handles = instance.data["sourceEndH"]
|
||||
|
||||
# retime if needed
|
||||
if r_speed != 1.0:
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - r_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ r_handle_start
|
||||
+ r_handle_end
|
||||
)
|
||||
if retimed_speed != 1.0:
|
||||
if retimed_handles:
|
||||
# handles are retimed
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - retimed_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (retimed_source_duration - 1)
|
||||
+ retimed_handle_start
|
||||
+ retimed_handle_end
|
||||
)
|
||||
|
||||
else:
|
||||
# handles are not retimed
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (retimed_source_duration - 1)
|
||||
+ handle_start
|
||||
+ handle_end
|
||||
)
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
repre_frame_start = frame_start_handle
|
||||
if include_handles:
|
||||
if r_speed == 1.0:
|
||||
if retimed_speed == 1.0 or not retimed_handles:
|
||||
frame_start_handle = frame_start
|
||||
else:
|
||||
frame_start_handle = (
|
||||
frame_start - handle_start) + r_handle_start
|
||||
frame_start - handle_start) + retimed_handle_start
|
||||
|
||||
self.log.debug("_ frame_start_handle: {}".format(
|
||||
frame_start_handle))
|
||||
|
|
@ -124,6 +137,9 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
source_duration_handles = (
|
||||
source_end_handles - source_start_handles) + 1
|
||||
|
||||
self.log.debug("_ source_duration_handles: {}".format(
|
||||
source_duration_handles))
|
||||
|
||||
# create staging dir path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
|
|
@ -147,15 +163,30 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
if version_data:
|
||||
instance.data["versionData"].update(version_data)
|
||||
|
||||
if r_speed != 1.0:
|
||||
instance.data["versionData"].update({
|
||||
"frameStart": frame_start_handle,
|
||||
"frameEnd": (
|
||||
(frame_start_handle + source_duration_handles - 1)
|
||||
- (r_handle_start + r_handle_end)
|
||||
)
|
||||
})
|
||||
self.log.debug("_ i_version_data: {}".format(
|
||||
# version data start frame
|
||||
version_frame_start = frame_start
|
||||
if include_handles:
|
||||
version_frame_start = frame_start_handle
|
||||
if retimed_speed != 1.0:
|
||||
if retimed_handles:
|
||||
instance.data["versionData"].update({
|
||||
"frameStart": version_frame_start,
|
||||
"frameEnd": (
|
||||
(version_frame_start + source_duration_handles - 1)
|
||||
- (retimed_handle_start + retimed_handle_end)
|
||||
)
|
||||
})
|
||||
else:
|
||||
instance.data["versionData"].update({
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": version_frame_start,
|
||||
"frameEnd": (
|
||||
(version_frame_start + source_duration_handles - 1)
|
||||
- (handle_start + handle_end)
|
||||
)
|
||||
})
|
||||
self.log.debug("_ version_data: {}".format(
|
||||
instance.data["versionData"]
|
||||
))
|
||||
|
||||
|
|
|
|||
|
|
@ -73,6 +73,8 @@ def load_apps():
|
|||
opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.flame_apps.append(
|
||||
opfapi.FlameMenuTimeline(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.flame_apps.append(
|
||||
opfapi.FlameMenuUniversal(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.app_framework.log.info("Apps are loaded")
|
||||
|
||||
|
||||
|
|
@ -191,3 +193,27 @@ def get_timeline_custom_ui_actions():
|
|||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuTimeline")
|
||||
|
||||
|
||||
def get_batch_custom_ui_actions():
|
||||
"""Hook to create submenu in batch
|
||||
|
||||
Returns:
|
||||
list: menu object
|
||||
"""
|
||||
# install openpype and the host
|
||||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuUniversal")
|
||||
|
||||
|
||||
def get_media_panel_custom_ui_actions():
|
||||
"""Hook to create submenu in desktop
|
||||
|
||||
Returns:
|
||||
list: menu object
|
||||
"""
|
||||
# install openpype and the host
|
||||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuUniversal")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
from .addon import (
|
||||
FusionAddon,
|
||||
FUSION_HOST_DIR,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FusionAddon",
|
||||
"FUSION_HOST_DIR",
|
||||
)
|
||||
32
openpype/hosts/fusion/addon.py
Normal file
32
openpype/hosts/fusion/addon.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
|
||||
FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class FusionAddon(OpenPypeModule, IHostAddon):
|
||||
name = "fusion"
|
||||
host_name = "fusion"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(FUSION_HOST_DIR, "hooks")
|
||||
]
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Set default values if are not already set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "Yes"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".comp"]
|
||||
|
|
@ -5,10 +5,7 @@ from .pipeline import (
|
|||
ls,
|
||||
|
||||
imprint_container,
|
||||
parse_container,
|
||||
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
parse_container
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
|
|
@ -22,8 +19,10 @@ from .workio import (
|
|||
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
get_additional_data,
|
||||
update_frame_range
|
||||
update_frame_range,
|
||||
set_asset_framerange,
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from .menu import launch_openpype_menu
|
||||
|
|
@ -38,9 +37,6 @@ __all__ = [
|
|||
"imprint_container",
|
||||
"parse_container",
|
||||
|
||||
"get_current_comp",
|
||||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
|
|
@ -51,8 +47,10 @@ __all__ = [
|
|||
|
||||
# lib
|
||||
"maintained_selection",
|
||||
"get_additional_data",
|
||||
"update_frame_range",
|
||||
"set_asset_framerange",
|
||||
"get_current_comp",
|
||||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# menu
|
||||
"launch_openpype_menu",
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@ import sys
|
|||
import re
|
||||
import contextlib
|
||||
|
||||
from Qt import QtGui
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_subset_by_name,
|
||||
|
|
@ -17,13 +16,14 @@ from openpype.pipeline import (
|
|||
switch_container,
|
||||
legacy_io,
|
||||
)
|
||||
from .pipeline import get_current_comp, comp_lock_and_undo_chunk
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._project = None
|
||||
|
||||
|
||||
def update_frame_range(start, end, comp=None, set_render_range=True):
|
||||
def update_frame_range(start, end, comp=None, set_render_range=True,
|
||||
handle_start=0, handle_end=0):
|
||||
"""Set Fusion comp's start and end frame range
|
||||
|
||||
Args:
|
||||
|
|
@ -32,6 +32,8 @@ def update_frame_range(start, end, comp=None, set_render_range=True):
|
|||
comp (object, Optional): comp object from fusion
|
||||
set_render_range (bool, Optional): When True this will also set the
|
||||
composition's render start and end frame.
|
||||
handle_start (float, int, Optional): frame handles before start frame
|
||||
handle_end (float, int, Optional): frame handles after end frame
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
|
@ -41,11 +43,16 @@ def update_frame_range(start, end, comp=None, set_render_range=True):
|
|||
if not comp:
|
||||
comp = get_current_comp()
|
||||
|
||||
# Convert any potential none type to zero
|
||||
handle_start = handle_start or 0
|
||||
handle_end = handle_end or 0
|
||||
|
||||
attrs = {
|
||||
"COMPN_GlobalStart": start,
|
||||
"COMPN_GlobalEnd": end
|
||||
"COMPN_GlobalStart": start - handle_start,
|
||||
"COMPN_GlobalEnd": end + handle_end
|
||||
}
|
||||
|
||||
# set frame range
|
||||
if set_render_range:
|
||||
attrs.update({
|
||||
"COMPN_RenderStart": start,
|
||||
|
|
@ -56,24 +63,122 @@ def update_frame_range(start, end, comp=None, set_render_range=True):
|
|||
comp.SetAttrs(attrs)
|
||||
|
||||
|
||||
def get_additional_data(container):
|
||||
"""Get Fusion related data for the container
|
||||
def set_asset_framerange():
|
||||
"""Set Comp's frame range based on current asset"""
|
||||
asset_doc = get_current_project_asset()
|
||||
start = asset_doc["data"]["frameStart"]
|
||||
end = asset_doc["data"]["frameEnd"]
|
||||
handle_start = asset_doc["data"]["handleStart"]
|
||||
handle_end = asset_doc["data"]["handleEnd"]
|
||||
update_frame_range(start, end, set_render_range=True,
|
||||
handle_start=handle_start,
|
||||
handle_end=handle_end)
|
||||
|
||||
Args:
|
||||
container(dict): the container found by the ls() function
|
||||
|
||||
Returns:
|
||||
dict
|
||||
def set_asset_resolution():
|
||||
"""Set Comp's resolution width x height default based on current asset"""
|
||||
asset_doc = get_current_project_asset()
|
||||
width = asset_doc["data"]["resolutionWidth"]
|
||||
height = asset_doc["data"]["resolutionHeight"]
|
||||
comp = get_current_comp()
|
||||
|
||||
print("Setting comp frame format resolution to {}x{}".format(width,
|
||||
height))
|
||||
comp.SetPrefs({
|
||||
"Comp.FrameFormat.Width": width,
|
||||
"Comp.FrameFormat.Height": height,
|
||||
})
|
||||
|
||||
|
||||
def validate_comp_prefs(comp=None, force_repair=False):
|
||||
"""Validate current comp defaults with asset settings.
|
||||
|
||||
Validates fps, resolutionWidth, resolutionHeight, aspectRatio.
|
||||
|
||||
This does *not* validate frameStart, frameEnd, handleStart and handleEnd.
|
||||
"""
|
||||
|
||||
tool = container["_tool"]
|
||||
tile_color = tool.TileColor
|
||||
if tile_color is None:
|
||||
return {}
|
||||
if comp is None:
|
||||
comp = get_current_comp()
|
||||
|
||||
return {"color": QtGui.QColor.fromRgbF(tile_color["R"],
|
||||
tile_color["G"],
|
||||
tile_color["B"])}
|
||||
log = Logger.get_logger("validate_comp_prefs")
|
||||
|
||||
fields = [
|
||||
"name",
|
||||
"data.fps",
|
||||
"data.resolutionWidth",
|
||||
"data.resolutionHeight",
|
||||
"data.pixelAspect"
|
||||
]
|
||||
asset_doc = get_current_project_asset(fields=fields)
|
||||
asset_data = asset_doc["data"]
|
||||
|
||||
comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat")
|
||||
|
||||
# Pixel aspect ratio in Fusion is set as AspectX and AspectY so we convert
|
||||
# the data to something that is more sensible to Fusion
|
||||
asset_data["pixelAspectX"] = asset_data.pop("pixelAspect")
|
||||
asset_data["pixelAspectY"] = 1.0
|
||||
|
||||
validations = [
|
||||
("fps", "Rate", "FPS"),
|
||||
("resolutionWidth", "Width", "Resolution Width"),
|
||||
("resolutionHeight", "Height", "Resolution Height"),
|
||||
("pixelAspectX", "AspectX", "Pixel Aspect Ratio X"),
|
||||
("pixelAspectY", "AspectY", "Pixel Aspect Ratio Y")
|
||||
]
|
||||
|
||||
invalid = []
|
||||
for key, comp_key, label in validations:
|
||||
asset_value = asset_data[key]
|
||||
comp_value = comp_frame_format_prefs.get(comp_key)
|
||||
if asset_value != comp_value:
|
||||
invalid_msg = "{} {} should be {}".format(label,
|
||||
comp_value,
|
||||
asset_value)
|
||||
invalid.append(invalid_msg)
|
||||
|
||||
if not force_repair:
|
||||
# Do not log warning if we force repair anyway
|
||||
log.warning(
|
||||
"Comp {pref} {value} does not match asset "
|
||||
"'{asset_name}' {pref} {asset_value}".format(
|
||||
pref=label,
|
||||
value=comp_value,
|
||||
asset_name=asset_doc["name"],
|
||||
asset_value=asset_value)
|
||||
)
|
||||
|
||||
if invalid:
|
||||
|
||||
def _on_repair():
|
||||
attributes = dict()
|
||||
for key, comp_key, _label in validations:
|
||||
value = asset_data[key]
|
||||
comp_key_full = "Comp.FrameFormat.{}".format(comp_key)
|
||||
attributes[comp_key_full] = value
|
||||
comp.SetPrefs(attributes)
|
||||
|
||||
if force_repair:
|
||||
log.info("Applying default Comp preferences..")
|
||||
_on_repair()
|
||||
return
|
||||
|
||||
from . import menu
|
||||
from openpype.widgets import popup
|
||||
from openpype.style import load_stylesheet
|
||||
dialog = popup.Popup(parent=menu.menu)
|
||||
dialog.setWindowTitle("Fusion comp has invalid configuration")
|
||||
|
||||
msg = "Comp preferences mismatches '{}'".format(asset_doc["name"])
|
||||
msg += "\n" + "\n".join(invalid)
|
||||
dialog.setMessage(msg)
|
||||
dialog.setButtonText("Repair")
|
||||
dialog.on_clicked.connect(_on_repair)
|
||||
dialog.show()
|
||||
dialog.raise_()
|
||||
dialog.activateWindow()
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
|
||||
def switch_item(container,
|
||||
|
|
@ -195,3 +300,21 @@ def get_frame_path(path):
|
|||
padding = 4 # default Fusion padding
|
||||
|
||||
return filename, padding, ext
|
||||
|
||||
|
||||
def get_current_comp():
|
||||
"""Hack to get current comp in this session"""
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
return fusion.CurrentComp if fusion else None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"):
|
||||
"""Lock comp and open an undo chunk during the context"""
|
||||
try:
|
||||
comp.Lock()
|
||||
comp.StartUndo(undo_queue_name)
|
||||
yield
|
||||
finally:
|
||||
comp.Unlock()
|
||||
comp.EndUndo()
|
||||
|
|
|
|||
|
|
@ -1,43 +1,26 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
|
||||
from openpype import style
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from openpype.style import load_stylesheet
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.hosts.fusion.scripts import (
|
||||
set_rendermode,
|
||||
duplicate_with_inputs
|
||||
)
|
||||
from openpype.hosts.fusion.api.lib import (
|
||||
set_asset_framerange,
|
||||
set_asset_resolution
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.resources import get_openpype_icon_filepath
|
||||
|
||||
from .pipeline import FusionEventHandler
|
||||
from .pulse import FusionPulse
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
if not os.path.exists(path):
|
||||
print("Unable to load stylesheet, file not found in resources")
|
||||
return ""
|
||||
|
||||
with open(path, "r") as file_stream:
|
||||
stylesheet = file_stream.read()
|
||||
return stylesheet
|
||||
|
||||
|
||||
class Spacer(QtWidgets.QWidget):
|
||||
def __init__(self, height, *args, **kwargs):
|
||||
super(Spacer, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setFixedHeight(height)
|
||||
|
||||
real_spacer = QtWidgets.QWidget(self)
|
||||
real_spacer.setObjectName("Spacer")
|
||||
real_spacer.setFixedHeight(height)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(real_spacer)
|
||||
|
||||
self.setLayout(layout)
|
||||
self = sys.modules[__name__]
|
||||
self.menu = None
|
||||
|
||||
|
||||
class OpenPypeMenu(QtWidgets.QWidget):
|
||||
|
|
@ -46,15 +29,29 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
|
||||
self.setObjectName("OpenPypeMenu")
|
||||
|
||||
icon_path = get_openpype_icon_filepath()
|
||||
icon = QtGui.QIcon(icon_path)
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowMinimizeButtonHint
|
||||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.render_mode_widget = None
|
||||
self.setWindowTitle("OpenPype")
|
||||
|
||||
asset_label = QtWidgets.QLabel("Context", self)
|
||||
asset_label.setStyleSheet("""QLabel {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
color: #5f9fb8;
|
||||
}""")
|
||||
asset_label.setAlignment(QtCore.Qt.AlignHCenter)
|
||||
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles...", self)
|
||||
create_btn = QtWidgets.QPushButton("Create...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish...", self)
|
||||
|
|
@ -62,77 +59,111 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
manager_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library...", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode...", self)
|
||||
set_framerange_btn = QtWidgets.QPushButton("Set Frame Range", self)
|
||||
set_resolution_btn = QtWidgets.QPushButton("Set Resolution", self)
|
||||
duplicate_with_inputs_btn = QtWidgets.QPushButton(
|
||||
"Duplicate with input connections", self
|
||||
)
|
||||
reset_resolution_btn = QtWidgets.QPushButton(
|
||||
"Reset Resolution from project", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(10, 20, 10, 20)
|
||||
|
||||
layout.addWidget(asset_label)
|
||||
|
||||
layout.addSpacing(20)
|
||||
|
||||
layout.addWidget(workfiles_btn)
|
||||
|
||||
layout.addSpacing(20)
|
||||
|
||||
layout.addWidget(create_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(load_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(manager_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
layout.addSpacing(20)
|
||||
|
||||
layout.addWidget(libload_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
layout.addSpacing(20)
|
||||
|
||||
layout.addWidget(set_framerange_btn)
|
||||
layout.addWidget(set_resolution_btn)
|
||||
layout.addWidget(rendermode_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
layout.addSpacing(20)
|
||||
|
||||
layout.addWidget(duplicate_with_inputs_btn)
|
||||
layout.addWidget(reset_resolution_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
# Store reference so we can update the label
|
||||
self.asset_label = asset_label
|
||||
|
||||
workfiles_btn.clicked.connect(self.on_workfile_clicked)
|
||||
create_btn.clicked.connect(self.on_create_clicked)
|
||||
publish_btn.clicked.connect(self.on_publish_clicked)
|
||||
load_btn.clicked.connect(self.on_load_clicked)
|
||||
manager_btn.clicked.connect(self.on_manager_clicked)
|
||||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendernode_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendermode_clicked)
|
||||
duplicate_with_inputs_btn.clicked.connect(
|
||||
self.on_duplicate_with_inputs_clicked)
|
||||
reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked)
|
||||
set_resolution_btn.clicked.connect(self.on_set_resolution_clicked)
|
||||
set_framerange_btn.clicked.connect(self.on_set_framerange_clicked)
|
||||
|
||||
self._callbacks = []
|
||||
self.register_callback("taskChanged", self.on_task_changed)
|
||||
self.on_task_changed()
|
||||
|
||||
# Force close current process if Fusion is closed
|
||||
self._pulse = FusionPulse(parent=self)
|
||||
self._pulse.start()
|
||||
|
||||
# Detect Fusion events as OpenPype events
|
||||
self._event_handler = FusionEventHandler(parent=self)
|
||||
self._event_handler.start()
|
||||
|
||||
def on_task_changed(self):
|
||||
# Update current context label
|
||||
label = legacy_io.Session["AVALON_ASSET"]
|
||||
self.asset_label.setText(label)
|
||||
|
||||
def register_callback(self, name, fn):
|
||||
|
||||
# Create a wrapper callback that we only store
|
||||
# for as long as we want it to persist as callback
|
||||
def _callback(*args):
|
||||
fn()
|
||||
|
||||
self._callbacks.append(_callback)
|
||||
register_event_callback(name, _callback)
|
||||
|
||||
def deregister_all_callbacks(self):
|
||||
self._callbacks[:] = []
|
||||
|
||||
def on_workfile_clicked(self):
|
||||
print("Clicked Workfile")
|
||||
host_tools.show_workfiles()
|
||||
|
||||
def on_create_clicked(self):
|
||||
print("Clicked Create")
|
||||
host_tools.show_creator()
|
||||
|
||||
def on_publish_clicked(self):
|
||||
print("Clicked Publish")
|
||||
host_tools.show_publish()
|
||||
|
||||
def on_load_clicked(self):
|
||||
print("Clicked Load")
|
||||
host_tools.show_loader(use_context=True)
|
||||
|
||||
def on_manager_clicked(self):
|
||||
print("Clicked Manager")
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
def on_libload_clicked(self):
|
||||
print("Clicked Library")
|
||||
host_tools.show_library_loader()
|
||||
|
||||
def on_rendernode_clicked(self):
|
||||
print("Clicked Set Render Mode")
|
||||
def on_rendermode_clicked(self):
|
||||
if self.render_mode_widget is None:
|
||||
window = set_rendermode.SetRenderMode()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.setStyleSheet(load_stylesheet())
|
||||
window.show()
|
||||
self.render_mode_widget = window
|
||||
else:
|
||||
|
|
@ -140,15 +171,16 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
|
||||
def on_duplicate_with_inputs_clicked(self):
|
||||
duplicate_with_inputs.duplicate_with_input_connections()
|
||||
print("Clicked Set Colorspace")
|
||||
|
||||
def on_reset_resolution_clicked(self):
|
||||
print("Clicked Reset Resolution")
|
||||
def on_set_resolution_clicked(self):
|
||||
set_asset_resolution()
|
||||
|
||||
def on_set_framerange_clicked(self):
|
||||
set_asset_framerange()
|
||||
|
||||
|
||||
def launch_openpype_menu():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
app.setQuitOnLastWindowClosed(False)
|
||||
|
||||
pype_menu = OpenPypeMenu()
|
||||
|
||||
|
|
@ -156,5 +188,8 @@ def launch_openpype_menu():
|
|||
pype_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
self.menu = pype_menu
|
||||
|
||||
sys.exit(app.exec_())
|
||||
result = app.exec_()
|
||||
print("Shutting down..")
|
||||
sys.exit(result)
|
||||
|
|
|
|||
|
|
@ -1,29 +0,0 @@
|
|||
QWidget {
|
||||
background-color: #282828;
|
||||
border-radius: 3;
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
border: 1px solid #090909;
|
||||
background-color: #201f1f;
|
||||
color: #ffffff;
|
||||
padding: 5;
|
||||
}
|
||||
|
||||
QPushButton:focus {
|
||||
background-color: "#171717";
|
||||
color: #d0d0d0;
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
background-color: "#171717";
|
||||
color: #e64b3d;
|
||||
}
|
||||
|
||||
#OpenPypeMenu {
|
||||
border: 1px solid #fef9ef;
|
||||
}
|
||||
|
||||
#Spacer {
|
||||
background-color: #282828;
|
||||
}
|
||||
|
|
@ -4,11 +4,15 @@ Basic avalon integration
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
from Qt import QtCore
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
register_event_callback,
|
||||
emit_event
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
|
|
@ -18,12 +22,19 @@ from openpype.pipeline import (
|
|||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.fusion
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
from openpype.hosts.fusion import FUSION_HOST_DIR
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .lib import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk,
|
||||
validate_comp_prefs
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins")
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
|
|
@ -31,16 +42,32 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class CompLogHandler(logging.Handler):
|
||||
class FusionLogHandler(logging.Handler):
|
||||
# Keep a reference to fusion's Print function (Remote Object)
|
||||
_print = None
|
||||
|
||||
@property
|
||||
def print(self):
|
||||
if self._print is not None:
|
||||
# Use cached
|
||||
return self._print
|
||||
|
||||
_print = getattr(sys.modules["__main__"], "fusion").Print
|
||||
if _print is None:
|
||||
# Backwards compatibility: Print method on Fusion instance was
|
||||
# added around Fusion 17.4 and wasn't available on PyRemote Object
|
||||
# before
|
||||
_print = get_current_comp().Print
|
||||
self._print = _print
|
||||
return _print
|
||||
|
||||
def emit(self, record):
|
||||
entry = self.format(record)
|
||||
comp = get_current_comp()
|
||||
if comp:
|
||||
comp.Print(entry)
|
||||
self.print(entry)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install fusion-specific functionality of avalon-core.
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
|
@ -52,20 +79,18 @@ def install():
|
|||
|
||||
"""
|
||||
# Remove all handlers associated with the root logger object, because
|
||||
# that one sometimes logs as "warnings" incorrectly.
|
||||
# that one always logs as "warnings" incorrectly.
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
# Attach default logging handler that prints to active comp
|
||||
logger = logging.getLogger()
|
||||
formatter = logging.Formatter(fmt="%(message)s\n")
|
||||
handler = CompLogHandler()
|
||||
handler = FusionLogHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
log.info("openpype.hosts.fusion installed")
|
||||
|
||||
pyblish.api.register_host("fusion")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
|
|
@ -78,6 +103,11 @@ def install():
|
|||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
# Register events
|
||||
register_event_callback("open", on_after_open)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("new", on_new)
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all that was installed
|
||||
|
|
@ -103,7 +133,7 @@ def uninstall():
|
|||
)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
|
|
@ -126,6 +156,48 @@ def on_pyblish_instance_toggled(instance, new_value, old_value):
|
|||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
|
||||
|
||||
def on_new(event):
|
||||
comp = event["Rets"]["comp"]
|
||||
validate_comp_prefs(comp, force_repair=True)
|
||||
|
||||
|
||||
def on_save(event):
|
||||
comp = event["sender"]
|
||||
validate_comp_prefs(comp)
|
||||
|
||||
|
||||
def on_after_open(event):
|
||||
comp = event["sender"]
|
||||
validate_comp_prefs(comp)
|
||||
|
||||
if any_outdated_containers():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Find OpenPype menu to attach to
|
||||
from . import menu
|
||||
|
||||
def _on_show_scene_inventory():
|
||||
# ensure that comp is active
|
||||
frame = comp.CurrentFrame
|
||||
if not frame:
|
||||
print("Comp is closed, skipping show scene inventory")
|
||||
return
|
||||
frame.ActivateFrame() # raise comp window
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
from openpype.widgets import popup
|
||||
from openpype.style import load_stylesheet
|
||||
dialog = popup.Popup(parent=menu.menu)
|
||||
dialog.setWindowTitle("Fusion comp has outdated content")
|
||||
dialog.setMessage("There are outdated containers in "
|
||||
"your Fusion comp.")
|
||||
dialog.on_clicked.connect(_on_show_scene_inventory)
|
||||
dialog.show()
|
||||
dialog.raise_()
|
||||
dialog.activateWindow()
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
|
||||
def ls():
|
||||
"""List containers from active Fusion scene
|
||||
|
||||
|
|
@ -139,7 +211,7 @@ def ls():
|
|||
"""
|
||||
|
||||
comp = get_current_comp()
|
||||
tools = comp.GetToolList(False, "Loader").values()
|
||||
tools = comp.GetToolList(False).values()
|
||||
|
||||
for tool in tools:
|
||||
container = parse_container(tool)
|
||||
|
|
@ -211,19 +283,114 @@ def parse_container(tool):
|
|||
return container
|
||||
|
||||
|
||||
def get_current_comp():
|
||||
"""Hack to get current comp in this session"""
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
return fusion.CurrentComp if fusion else None
|
||||
class FusionEventThread(QtCore.QThread):
|
||||
"""QThread which will periodically ping Fusion app for any events.
|
||||
|
||||
The fusion.UIManager must be set up to be notified of events before they'll
|
||||
be reported by this thread, for example:
|
||||
fusion.UIManager.AddNotify("Comp_Save", None)
|
||||
|
||||
"""
|
||||
|
||||
on_event = QtCore.Signal(dict)
|
||||
|
||||
def run(self):
|
||||
|
||||
app = getattr(sys.modules["__main__"], "app", None)
|
||||
if app is None:
|
||||
# No Fusion app found
|
||||
return
|
||||
|
||||
# As optimization store the GetEvent method directly because every
|
||||
# getattr of UIManager.GetEvent tries to resolve the Remote Function
|
||||
# through the PyRemoteObject
|
||||
get_event = app.UIManager.GetEvent
|
||||
delay = int(os.environ.get("OPENPYPE_FUSION_CALLBACK_INTERVAL", 1000))
|
||||
while True:
|
||||
if self.isInterruptionRequested():
|
||||
return
|
||||
|
||||
# Process all events that have been queued up until now
|
||||
while True:
|
||||
event = get_event(False)
|
||||
if not event:
|
||||
break
|
||||
self.on_event.emit(event)
|
||||
|
||||
# Wait some time before processing events again
|
||||
# to not keep blocking the UI
|
||||
self.msleep(delay)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"):
|
||||
"""Lock comp and open an undo chunk during the context"""
|
||||
try:
|
||||
comp.Lock()
|
||||
comp.StartUndo(undo_queue_name)
|
||||
yield
|
||||
finally:
|
||||
comp.Unlock()
|
||||
comp.EndUndo()
|
||||
class FusionEventHandler(QtCore.QObject):
|
||||
"""Emits OpenPype events based on Fusion events captured in a QThread.
|
||||
|
||||
This will emit the following OpenPype events based on Fusion actions:
|
||||
save: Comp_Save, Comp_SaveAs
|
||||
open: Comp_Opened
|
||||
new: Comp_New
|
||||
|
||||
To use this you can attach it to you Qt UI so it runs in the background.
|
||||
E.g.
|
||||
>>> handler = FusionEventHandler(parent=window)
|
||||
>>> handler.start()
|
||||
|
||||
|
||||
"""
|
||||
ACTION_IDS = [
|
||||
"Comp_Save",
|
||||
"Comp_SaveAs",
|
||||
"Comp_New",
|
||||
"Comp_Opened"
|
||||
]
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(FusionEventHandler, self).__init__(parent=parent)
|
||||
|
||||
# Set up Fusion event callbacks
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
ui = fusion.UIManager
|
||||
|
||||
# Add notifications for the ones we want to listen to
|
||||
notifiers = []
|
||||
for action_id in self.ACTION_IDS:
|
||||
notifier = ui.AddNotify(action_id, None)
|
||||
notifiers.append(notifier)
|
||||
|
||||
# TODO: Not entirely sure whether these must be kept to avoid
|
||||
# garbage collection
|
||||
self._notifiers = notifiers
|
||||
|
||||
self._event_thread = FusionEventThread(parent=self)
|
||||
self._event_thread.on_event.connect(self._on_event)
|
||||
|
||||
def start(self):
|
||||
self._event_thread.start()
|
||||
|
||||
def stop(self):
|
||||
self._event_thread.stop()
|
||||
|
||||
def _on_event(self, event):
|
||||
"""Handle Fusion events to emit OpenPype events"""
|
||||
if not event:
|
||||
return
|
||||
|
||||
what = event["what"]
|
||||
|
||||
# Comp Save
|
||||
if what in {"Comp_Save", "Comp_SaveAs"}:
|
||||
if not event["Rets"].get("success"):
|
||||
# If the Save action is cancelled it will still emit an
|
||||
# event but with "success": False so we ignore those cases
|
||||
return
|
||||
# Comp was saved
|
||||
emit_event("save", data=event)
|
||||
return
|
||||
|
||||
# Comp New
|
||||
elif what in {"Comp_New"}:
|
||||
emit_event("new", data=event)
|
||||
|
||||
# Comp Opened
|
||||
elif what in {"Comp_Opened"}:
|
||||
emit_event("open", data=event)
|
||||
|
|
|
|||
63
openpype/hosts/fusion/api/pulse.py
Normal file
63
openpype/hosts/fusion/api/pulse.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtCore
|
||||
|
||||
|
||||
class PulseThread(QtCore.QThread):
|
||||
no_response = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(PulseThread, self).__init__(parent=parent)
|
||||
|
||||
def run(self):
|
||||
app = getattr(sys.modules["__main__"], "app", None)
|
||||
|
||||
# Interval in milliseconds
|
||||
interval = os.environ.get("OPENPYPE_FUSION_PULSE_INTERVAL", 1000)
|
||||
|
||||
while True:
|
||||
if self.isInterruptionRequested():
|
||||
return
|
||||
|
||||
# We don't need to call Test because PyRemoteObject of the app
|
||||
# will actually fail to even resolve the Test function if it has
|
||||
# gone down. So we can actually already just check by confirming
|
||||
# the method is still getting resolved. (Optimization)
|
||||
if app.Test is None:
|
||||
self.no_response.emit()
|
||||
|
||||
self.msleep(interval)
|
||||
|
||||
|
||||
class FusionPulse(QtCore.QObject):
|
||||
"""A Timer that checks whether host app is still alive.
|
||||
|
||||
This checks whether the Fusion process is still active at a certain
|
||||
interval. This is useful due to how Fusion runs its scripts. Each script
|
||||
runs in its own environment and process (a `fusionscript` process each).
|
||||
If Fusion would go down and we have a UI process running at the same time
|
||||
then it can happen that the `fusionscript.exe` will remain running in the
|
||||
background in limbo due to e.g. a Qt interface's QApplication that keeps
|
||||
running infinitely.
|
||||
|
||||
Warning:
|
||||
When the host is not detected this will automatically exit
|
||||
the current process.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(FusionPulse, self).__init__(parent=parent)
|
||||
self._thread = PulseThread(parent=self)
|
||||
self._thread.no_response.connect(self.on_no_response)
|
||||
|
||||
def on_no_response(self):
|
||||
print("Pulse detected no response from Fusion..")
|
||||
sys.exit(1)
|
||||
|
||||
def start(self):
|
||||
self._thread.start()
|
||||
|
||||
def stop(self):
|
||||
self._thread.requestInterruption()
|
||||
|
|
@ -2,13 +2,11 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
|
||||
from .pipeline import get_current_comp
|
||||
from .lib import get_current_comp
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["fusion"]
|
||||
return [".comp"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
|
|||
60
openpype/hosts/fusion/deploy/Config/openpype_menu.fu
Normal file
60
openpype/hosts/fusion/deploy/Config/openpype_menu.fu
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
Action
|
||||
{
|
||||
ID = "OpenPype_Menu",
|
||||
Category = "OpenPype",
|
||||
Name = "OpenPype Menu",
|
||||
|
||||
Targets =
|
||||
{
|
||||
Composition =
|
||||
{
|
||||
Execute = _Lua [=[
|
||||
local scriptPath = app:MapPath("OpenPype:MenuScripts/openpype_menu.py")
|
||||
if bmd.fileexists(scriptPath) == false then
|
||||
print("[OpenPype Error] Can't run file: " .. scriptPath)
|
||||
else
|
||||
target:RunScript(scriptPath)
|
||||
end
|
||||
]=],
|
||||
},
|
||||
},
|
||||
},
|
||||
Action
|
||||
{
|
||||
ID = "OpenPype_Install_PySide2",
|
||||
Category = "OpenPype",
|
||||
Name = "Install PySide2",
|
||||
|
||||
Targets =
|
||||
{
|
||||
Composition =
|
||||
{
|
||||
Execute = _Lua [=[
|
||||
local scriptPath = app:MapPath("OpenPype:MenuScripts/install_pyside2.py")
|
||||
if bmd.fileexists(scriptPath) == false then
|
||||
print("[OpenPype Error] Can't run file: " .. scriptPath)
|
||||
else
|
||||
target:RunScript(scriptPath)
|
||||
end
|
||||
]=],
|
||||
},
|
||||
},
|
||||
},
|
||||
Menus
|
||||
{
|
||||
Target = "ChildFrame",
|
||||
|
||||
Before "Help"
|
||||
{
|
||||
Sub "OpenPype"
|
||||
{
|
||||
"OpenPype_Menu{}",
|
||||
"_",
|
||||
Sub "Admin" {
|
||||
"OpenPype_Install_PySide2{}"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
6
openpype/hosts/fusion/deploy/MenuScripts/README.md
Normal file
6
openpype/hosts/fusion/deploy/MenuScripts/README.md
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
### OpenPype deploy MenuScripts
|
||||
|
||||
Note that this `MenuScripts` is not an official Fusion folder.
|
||||
OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions.
|
||||
|
||||
They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`.
|
||||
29
openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py
Normal file
29
openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
# This is just a quick hack for users running Py3 locally but having no
|
||||
# Qt library installed
|
||||
import os
|
||||
import subprocess
|
||||
import importlib
|
||||
|
||||
|
||||
try:
|
||||
from Qt import QtWidgets # noqa: F401
|
||||
from Qt import __binding__
|
||||
print(f"Qt binding: {__binding__}")
|
||||
mod = importlib.import_module(__binding__)
|
||||
print(f"Qt path: {mod.__file__}")
|
||||
print("Qt library found, nothing to do..")
|
||||
|
||||
except ImportError:
|
||||
print("Assuming no Qt library is installed..")
|
||||
print('Installing PySide2 for Python 3.6: '
|
||||
f'{os.environ["FUSION16_PYTHON36_HOME"]}')
|
||||
|
||||
# Get full path to python executable
|
||||
exe = "python.exe" if os.name == 'nt' else "python"
|
||||
python = os.path.join(os.environ["FUSION16_PYTHON36_HOME"], exe)
|
||||
assert os.path.exists(python), f"Python doesn't exist: {python}"
|
||||
|
||||
# Do python -m pip install PySide2
|
||||
args = [python, "-m", "pip", "install", "PySide2"]
|
||||
print(f"Args: {args}")
|
||||
subprocess.Popen(args)
|
||||
|
|
@ -9,6 +9,10 @@ from openpype.pipeline import (
|
|||
|
||||
|
||||
def main(env):
|
||||
# This script working directory starts in Fusion application folder.
|
||||
# However the contents of that folder can conflict with Qt library dlls
|
||||
# so we make sure to move out of it to avoid DLL Load Failed errors.
|
||||
os.chdir("..")
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.hosts.fusion.api import menu
|
||||
|
||||
|
|
@ -20,6 +24,11 @@ def main(env):
|
|||
|
||||
menu.launch_openpype_menu()
|
||||
|
||||
# Initiate a QTimer to check if Fusion is still alive every X interval
|
||||
# If Fusion is not found - kill itself
|
||||
# todo(roy): Implement timer that ensures UI doesn't remain when e.g.
|
||||
# Fusion closes down
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = main(os.environ)
|
||||
19
openpype/hosts/fusion/deploy/fusion_shared.prefs
Normal file
19
openpype/hosts/fusion/deploy/fusion_shared.prefs
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
Locked = true,
|
||||
Global = {
|
||||
Paths = {
|
||||
Map = {
|
||||
["OpenPype:"] = "$(OPENPYPE_FUSION)/deploy",
|
||||
["Reactor:"] = "$(REACTOR)",
|
||||
|
||||
["Config:"] = "UserPaths:Config;OpenPype:Config",
|
||||
["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts;OpenPype:Scripts",
|
||||
["UserPaths:"] = "UserData:;AllData:;Fusion:;Reactor:Deploy"
|
||||
},
|
||||
},
|
||||
Script = {
|
||||
PythonVersion = 3,
|
||||
Python3Forced = true
|
||||
},
|
||||
},
|
||||
}
|
||||
34
openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py
Normal file
34
openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import os
|
||||
import platform
|
||||
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
|
||||
|
||||
class FusionPreLaunchOCIO(PreLaunchHook):
|
||||
"""Set OCIO environment variable for Fusion"""
|
||||
app_groups = ["fusion"]
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
||||
# get image io
|
||||
project_settings = self.data["project_settings"]
|
||||
|
||||
# make sure anatomy settings are having flame key
|
||||
imageio_fusion = project_settings["fusion"]["imageio"]
|
||||
|
||||
ocio = imageio_fusion.get("ocio")
|
||||
enabled = ocio.get("enabled", False)
|
||||
if not enabled:
|
||||
return
|
||||
|
||||
platform_key = platform.system().lower()
|
||||
ocio_path = ocio["configFilePath"][platform_key]
|
||||
if not ocio_path:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Fusion OCIO is enabled in project settings but no OCIO config"
|
||||
f"path is set for your current platform: {platform_key}"
|
||||
)
|
||||
|
||||
self.log.info(f"Setting OCIO config path: {ocio_path}")
|
||||
self.launch_context.env["OCIO"] = os.pathsep.join(ocio_path)
|
||||
|
|
@ -1,114 +1,61 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import openpype.hosts.fusion
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
from openpype.hosts.fusion import FUSION_HOST_DIR
|
||||
|
||||
|
||||
class FusionPrelaunch(PreLaunchHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Fusion
|
||||
project inside.
|
||||
"""Prepares OpenPype Fusion environment
|
||||
|
||||
Requires FUSION_PYTHON3_HOME to be defined in the environment for Fusion
|
||||
to point at a valid Python 3 build for Fusion. That is Python 3.3-3.10
|
||||
for Fusion 18 and Fusion 3.6 for Fusion 16 and 17.
|
||||
|
||||
This also sets FUSION16_MasterPrefs to apply the fusion master prefs
|
||||
as set in openpype/hosts/fusion/deploy/fusion_shared.prefs to enable
|
||||
the OpenPype menu and force Python 3 over Python 2.
|
||||
|
||||
"""
|
||||
app_groups = ["fusion"]
|
||||
|
||||
def execute(self):
|
||||
# making sure python 3.6 is installed at provided path
|
||||
py36_dir = self.launch_context.env.get("PYTHON36")
|
||||
if not py36_dir:
|
||||
# making sure python 3 is installed at provided path
|
||||
# Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17
|
||||
py3_var = "FUSION_PYTHON3_HOME"
|
||||
fusion_python3_home = self.launch_context.env.get(py3_var, "")
|
||||
|
||||
self.log.info(f"Looking for Python 3 in: {fusion_python3_home}")
|
||||
for path in fusion_python3_home.split(os.pathsep):
|
||||
# Allow defining multiple paths to allow "fallback" to other
|
||||
# path. But make to set only a single path as final variable.
|
||||
py3_dir = os.path.normpath(path)
|
||||
if os.path.isdir(py3_dir):
|
||||
break
|
||||
else:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Required environment variable \"PYTHON36\" is not set."
|
||||
"\n\nFusion implementation requires to have"
|
||||
" installed Python 3.6"
|
||||
"Python 3 is not installed at the provided path.\n"
|
||||
"Make sure the environment in fusion settings has "
|
||||
"'FUSION_PYTHON3_HOME' set correctly and make sure "
|
||||
"Python 3 is installed in the given path."
|
||||
f"\n\nPYTHON36: {fusion_python3_home}"
|
||||
)
|
||||
|
||||
py36_dir = os.path.normpath(py36_dir)
|
||||
if not os.path.isdir(py36_dir):
|
||||
raise ApplicationLaunchFailed(
|
||||
"Python 3.6 is not installed at the provided path.\n"
|
||||
"Either make sure the environments in fusion settings has"
|
||||
" 'PYTHON36' set corectly or make sure Python 3.6 is installed"
|
||||
f" in the given path.\n\nPYTHON36: {py36_dir}"
|
||||
)
|
||||
self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...")
|
||||
self.launch_context.env["PYTHON36"] = py36_dir
|
||||
self.log.info(f"Setting {py3_var}: '{py3_dir}'...")
|
||||
self.launch_context.env[py3_var] = py3_dir
|
||||
|
||||
utility_dir = self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR")
|
||||
if not utility_dir:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Required Fusion utility script dir environment variable"
|
||||
" \"FUSION_UTILITY_SCRIPTS_DIR\" is not set."
|
||||
)
|
||||
# Fusion 18+ requires FUSION_PYTHON3_HOME to also be on PATH
|
||||
self.launch_context.env["PATH"] += ";" + py3_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
utility_dir = os.path.normpath(utility_dir)
|
||||
if not os.path.isdir(utility_dir):
|
||||
raise ApplicationLaunchFailed(
|
||||
"Fusion utility script dir does not exist. Either make sure "
|
||||
"the environments in fusion settings has"
|
||||
" 'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall "
|
||||
f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{utility_dir}'"
|
||||
)
|
||||
# Fusion 16 and 17 use FUSION16_PYTHON36_HOME instead of
|
||||
# FUSION_PYTHON3_HOME and will only work with a Python 3.6 version
|
||||
# TODO: Detect Fusion version to only set for specific Fusion build
|
||||
self.launch_context.env["FUSION16_PYTHON36_HOME"] = py3_dir
|
||||
|
||||
self._sync_utility_scripts(self.launch_context.env)
|
||||
self.log.info("Fusion Pype wrapper has been installed")
|
||||
# Add our Fusion Master Prefs which is the only way to customize
|
||||
# Fusion to define where it can read custom scripts and tools from
|
||||
self.log.info(f"Setting OPENPYPE_FUSION: {FUSION_HOST_DIR}")
|
||||
self.launch_context.env["OPENPYPE_FUSION"] = FUSION_HOST_DIR
|
||||
|
||||
def _sync_utility_scripts(self, env):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Fusion/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = {k: v for k, v in os.environ.items()}
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
self.log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
self.log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
self.log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.normpath(
|
||||
os.path.join(us_dir, s))
|
||||
self.log.info(f"Removing `{path}`...")
|
||||
|
||||
# remove file or directory if not in our folders
|
||||
if not os.path.isdir(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.normpath(os.path.join(d, s))
|
||||
dst = os.path.normpath(os.path.join(us_dir, s))
|
||||
|
||||
self.log.info(f"Copying `{src}` to `{dst}`...")
|
||||
|
||||
# copy file or directory from our folders to fusion's folder
|
||||
if not os.path.isdir(src):
|
||||
shutil.copy2(src, dst)
|
||||
else:
|
||||
shutil.copytree(src, dst)
|
||||
pref_var = "FUSION16_MasterPrefs" # used by Fusion 16, 17 and 18
|
||||
prefs = os.path.join(FUSION_HOST_DIR, "deploy", "fusion_shared.prefs")
|
||||
self.log.info(f"Setting {pref_var}: {prefs}")
|
||||
self.launch_context.env[pref_var] = prefs
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
legacy_io
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
|
|
@ -21,12 +24,9 @@ class CreateOpenEXRSaver(LegacyCreator):
|
|||
|
||||
comp = get_current_comp()
|
||||
|
||||
# todo: improve method of getting current environment
|
||||
# todo: pref avalon.Session over os.environ
|
||||
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])
|
||||
|
||||
workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
filename = "{}..tiff".format(self.name)
|
||||
filename = "{}..exr".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", filename)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
|
|
@ -39,10 +39,10 @@ class CreateOpenEXRSaver(LegacyCreator):
|
|||
saver["Clip"] = filepath
|
||||
saver["OutputFormat"] = file_format
|
||||
|
||||
# # # Set standard TIFF settings
|
||||
# Check file format settings are available
|
||||
if saver[file_format] is None:
|
||||
raise RuntimeError("File format is not set to TiffFormat, "
|
||||
"this is a bug")
|
||||
raise RuntimeError("File format is not set to {}, "
|
||||
"this is a bug".format(file_format))
|
||||
|
||||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
|
||||
|
|
|
|||
70
openpype/hosts/fusion/plugins/load/load_alembic.py
Normal file
70
openpype/hosts/fusion/plugins/load/load_alembic.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
imprint_container,
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class FusionLoadAlembicMesh(load.LoaderPlugin):
|
||||
"""Load Alembic mesh into Fusion"""
|
||||
|
||||
families = ["pointcache", "model"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Load alembic mesh"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
tool_type = "SurfaceAlembicMesh"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
with comp_lock_and_undo_chunk(comp, "Create tool"):
|
||||
|
||||
path = self.fname
|
||||
|
||||
args = (-32768, -32768)
|
||||
tool = comp.AddTool(self.tool_type, *args)
|
||||
tool["Filename"] = path
|
||||
|
||||
imprint_container(tool,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update Alembic path"""
|
||||
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
|
||||
comp = tool.Comp()
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update tool"):
|
||||
tool["Filename"] = path
|
||||
|
||||
# Update the imprinted representation
|
||||
tool.SetData("avalon.representation", str(representation["_id"]))
|
||||
|
||||
def remove(self, container):
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
|
||||
comp = tool.Comp()
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Remove tool"):
|
||||
tool.Delete()
|
||||
71
openpype/hosts/fusion/plugins/load/load_fbx.py
Normal file
71
openpype/hosts/fusion/plugins/load/load_fbx.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
imprint_container,
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class FusionLoadFBXMesh(load.LoaderPlugin):
|
||||
"""Load FBX mesh into Fusion"""
|
||||
|
||||
families = ["*"]
|
||||
representations = ["fbx"]
|
||||
|
||||
label = "Load FBX mesh"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
tool_type = "SurfaceFBXMesh"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
with comp_lock_and_undo_chunk(comp, "Create tool"):
|
||||
|
||||
path = self.fname
|
||||
|
||||
args = (-32768, -32768)
|
||||
tool = comp.AddTool(self.tool_type, *args)
|
||||
tool["ImportFile"] = path
|
||||
|
||||
imprint_container(tool,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update path"""
|
||||
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
|
||||
comp = tool.Comp()
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update tool"):
|
||||
tool["ImportFile"] = path
|
||||
|
||||
# Update the imprinted representation
|
||||
tool.SetData("avalon.representation", str(representation["_id"]))
|
||||
|
||||
def remove(self, container):
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == self.tool_type, f"Must be {self.tool_type}"
|
||||
comp = tool.Comp()
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Remove tool"):
|
||||
tool.Delete()
|
||||
|
|
@ -101,6 +101,9 @@ def loader_shift(loader, frame, relative=True):
|
|||
else:
|
||||
shift = frame - old_in
|
||||
|
||||
if not shift:
|
||||
return 0
|
||||
|
||||
# Shifting global in will try to automatically compensate for the change
|
||||
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
|
||||
# input values to "just shift" the clip
|
||||
|
|
@ -149,9 +152,8 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
tool["Clip"] = path
|
||||
|
||||
# Set global in point to start frame (if in version.data)
|
||||
start = context["version"]["data"].get("frameStart", None)
|
||||
if start is not None:
|
||||
loader_shift(tool, start, relative=False)
|
||||
start = self._get_start(context["version"], tool)
|
||||
loader_shift(tool, start, relative=False)
|
||||
|
||||
imprint_container(tool,
|
||||
name=name,
|
||||
|
|
@ -214,12 +216,7 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
# Get start frame from version data
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_version_by_id(project_name, representation["parent"])
|
||||
start = version["data"].get("frameStart")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(tool.Name, representation))
|
||||
start = 0
|
||||
start = self._get_start(version, tool)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update Loader"):
|
||||
|
||||
|
|
@ -256,3 +253,27 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
|
||||
def _get_start(self, version_doc, tool):
|
||||
"""Return real start frame of published files (incl. handles)"""
|
||||
data = version_doc["data"]
|
||||
|
||||
# Get start frame directly with handle if it's in data
|
||||
start = data.get("frameStartHandle")
|
||||
if start is not None:
|
||||
return start
|
||||
|
||||
# Get frame start without handles
|
||||
start = data.get("frameStart")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for version "
|
||||
"assuming starts at frame 0 for: "
|
||||
"{}".format(tool.Name))
|
||||
return 0
|
||||
|
||||
# Use `handleStart` if the data is available
|
||||
handle_start = data.get("handleStart")
|
||||
if handle_start:
|
||||
start -= handle_start
|
||||
|
||||
return start
|
||||
|
|
|
|||
114
openpype/hosts/fusion/plugins/publish/collect_inputs.py
Normal file
114
openpype/hosts/fusion/plugins/publish/collect_inputs.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
|
||||
def collect_input_containers(tools):
|
||||
"""Collect containers that contain any of the node in `nodes`.
|
||||
|
||||
This will return any loaded Avalon container that contains at least one of
|
||||
the nodes. As such, the Avalon container is an input for it. Or in short,
|
||||
there are member nodes of that container.
|
||||
|
||||
Returns:
|
||||
list: Input avalon containers
|
||||
|
||||
"""
|
||||
|
||||
# Lookup by node ids
|
||||
lookup = frozenset([tool.Name for tool in tools])
|
||||
|
||||
containers = []
|
||||
host = registered_host()
|
||||
for container in host.ls():
|
||||
|
||||
name = container["_tool"].Name
|
||||
|
||||
# We currently assume no "groups" as containers but just single tools
|
||||
# like a single "Loader" operator. As such we just check whether the
|
||||
# Loader is part of the processing queue.
|
||||
if name in lookup:
|
||||
containers.append(container)
|
||||
|
||||
return containers
|
||||
|
||||
|
||||
def iter_upstream(tool):
|
||||
"""Yields all upstream inputs for the current tool.
|
||||
|
||||
Yields:
|
||||
tool: The input tools.
|
||||
|
||||
"""
|
||||
|
||||
def get_connected_input_tools(tool):
|
||||
"""Helper function that returns connected input tools for a tool."""
|
||||
inputs = []
|
||||
|
||||
# Filter only to actual types that will have sensible upstream
|
||||
# connections. So we ignore just "Number" inputs as they can be
|
||||
# many to iterate, slowing things down quite a bit - and in practice
|
||||
# they don't have upstream connections.
|
||||
VALID_INPUT_TYPES = ['Image', 'Particles', 'Mask', 'DataType3D']
|
||||
for type_ in VALID_INPUT_TYPES:
|
||||
for input_ in tool.GetInputList(type_).values():
|
||||
output = input_.GetConnectedOutput()
|
||||
if output:
|
||||
input_tool = output.GetTool()
|
||||
inputs.append(input_tool)
|
||||
|
||||
return inputs
|
||||
|
||||
# Initialize process queue with the node's inputs itself
|
||||
queue = get_connected_input_tools(tool)
|
||||
|
||||
# We keep track of which node names we have processed so far, to ensure we
|
||||
# don't process the same hierarchy again. We are not pushing the tool
|
||||
# itself into the set as that doesn't correctly recognize the same tool.
|
||||
# Since tool names are unique in a comp in Fusion we rely on that.
|
||||
collected = set(tool.Name for tool in queue)
|
||||
|
||||
# Traverse upstream references for all nodes and yield them as we
|
||||
# process the queue.
|
||||
while queue:
|
||||
upstream_tool = queue.pop()
|
||||
yield upstream_tool
|
||||
|
||||
# Find upstream tools that are not collected yet.
|
||||
upstream_inputs = get_connected_input_tools(upstream_tool)
|
||||
upstream_inputs = [t for t in upstream_inputs if
|
||||
t.Name not in collected]
|
||||
|
||||
queue.extend(upstream_inputs)
|
||||
collected.update(tool.Name for tool in upstream_inputs)
|
||||
|
||||
|
||||
class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
||||
"""Collect source input containers used for this publish.
|
||||
|
||||
This will include `inputs` data of which loaded publishes were used in the
|
||||
generation of this publish. This leaves an upstream trace to what was used
|
||||
as input.
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Inputs"
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Get all upstream and include itself
|
||||
tool = instance[0]
|
||||
nodes = list(iter_upstream(tool))
|
||||
nodes.append(tool)
|
||||
|
||||
# Collect containers for the given set of nodes
|
||||
containers = collect_input_containers(nodes)
|
||||
|
||||
inputs = [ObjectId(c["representation"]) for c in containers]
|
||||
instance.data["inputRepresentations"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
|
@ -4,19 +4,21 @@ import pyblish.api
|
|||
|
||||
|
||||
def get_comp_render_range(comp):
|
||||
"""Return comp's start and end render range."""
|
||||
"""Return comp's start-end render range and global start-end range."""
|
||||
comp_attrs = comp.GetAttrs()
|
||||
start = comp_attrs["COMPN_RenderStart"]
|
||||
end = comp_attrs["COMPN_RenderEnd"]
|
||||
global_start = comp_attrs["COMPN_GlobalStart"]
|
||||
global_end = comp_attrs["COMPN_GlobalEnd"]
|
||||
|
||||
# Whenever render ranges are undefined fall back
|
||||
# to the comp's global start and end
|
||||
if start == -1000000000:
|
||||
start = comp_attrs["COMPN_GlobalEnd"]
|
||||
start = global_start
|
||||
if end == -1000000000:
|
||||
end = comp_attrs["COMPN_GlobalStart"]
|
||||
end = global_end
|
||||
|
||||
return start, end
|
||||
return start, end, global_start, global_end
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -42,9 +44,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
tools = comp.GetToolList(False).values()
|
||||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
|
||||
start, end = get_comp_render_range(comp)
|
||||
start, end, global_start, global_end = get_comp_render_range(comp)
|
||||
context.data["frameStart"] = int(start)
|
||||
context.data["frameEnd"] = int(end)
|
||||
context.data["frameStartHandle"] = int(global_start)
|
||||
context.data["frameEndHandle"] = int(global_end)
|
||||
|
||||
for tool in savers:
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
|
@ -78,8 +82,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"label": label,
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"frameStartHandle": context.data["frameStartHandle"],
|
||||
"frameEndHandle": context.data["frameStartHandle"],
|
||||
"fps": context.data["fps"],
|
||||
"families": ["render", "review", "ftrack"],
|
||||
"families": ["render", "review"],
|
||||
"family": "render",
|
||||
"active": active,
|
||||
"publish": active # backwards compatibility
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ class Fusionlocal(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
# This plug-in runs only once and thus assumes all instances
|
||||
# currently will render the same frame range
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
|
|
@ -28,8 +30,8 @@ class Fusionlocal(pyblish.api.InstancePlugin):
|
|||
context.data[key] = True
|
||||
|
||||
current_comp = context.data["currentComp"]
|
||||
frame_start = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
frame_end = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
frame_start = context.data["frameStartHandle"]
|
||||
frame_end = context.data["frameEndHandle"]
|
||||
path = instance.data["path"]
|
||||
output_dir = instance.data["outputDir"]
|
||||
|
||||
|
|
@ -40,7 +42,11 @@ class Fusionlocal(pyblish.api.InstancePlugin):
|
|||
self.log.info("End frame: {}".format(frame_end))
|
||||
|
||||
with comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render()
|
||||
result = current_comp.Render({
|
||||
"Start": frame_start,
|
||||
"End": frame_end,
|
||||
"Wait": True
|
||||
})
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,284 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_versions,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
install_host,
|
||||
registered_host,
|
||||
)
|
||||
from openpype.lib import version_up
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.hosts.fusion.api import lib
|
||||
from openpype.pipeline.context_tools import get_workdir_from_session
|
||||
|
||||
log = logging.getLogger("Update Slap Comp")
|
||||
|
||||
|
||||
def _format_version_folder(folder):
|
||||
"""Format a version folder based on the filepath
|
||||
|
||||
Assumption here is made that, if the path does not exists the folder
|
||||
will be "v001"
|
||||
|
||||
Args:
|
||||
folder: file path to a folder
|
||||
|
||||
Returns:
|
||||
str: new version folder name
|
||||
"""
|
||||
|
||||
new_version = 1
|
||||
if os.path.isdir(folder):
|
||||
re_version = re.compile(r"v\d+$")
|
||||
versions = [i for i in os.listdir(folder) if os.path.isdir(i)
|
||||
and re_version.match(i)]
|
||||
if versions:
|
||||
# ensure the "v" is not included
|
||||
new_version = int(max(versions)[1:]) + 1
|
||||
|
||||
version_folder = "v{:03d}".format(new_version)
|
||||
|
||||
return version_folder
|
||||
|
||||
|
||||
def _get_fusion_instance():
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
if fusion is None:
|
||||
try:
|
||||
# Support for FuScript.exe, BlackmagicFusion module for py2 only
|
||||
import BlackmagicFusion as bmf
|
||||
fusion = bmf.scriptapp("Fusion")
|
||||
except ImportError:
|
||||
raise RuntimeError("Could not find a Fusion instance")
|
||||
return fusion
|
||||
|
||||
|
||||
def _format_filepath(session):
|
||||
|
||||
project = session["AVALON_PROJECT"]
|
||||
asset = session["AVALON_ASSET"]
|
||||
|
||||
# Save updated slap comp
|
||||
work_path = get_workdir_from_session(session)
|
||||
walk_to_dir = os.path.join(work_path, "scenes", "slapcomp")
|
||||
slapcomp_dir = os.path.abspath(walk_to_dir)
|
||||
|
||||
# Ensure destination exists
|
||||
if not os.path.isdir(slapcomp_dir):
|
||||
log.warning("Folder did not exist, creating folder structure")
|
||||
os.makedirs(slapcomp_dir)
|
||||
|
||||
# Compute output path
|
||||
new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset)
|
||||
new_filepath = os.path.join(slapcomp_dir, new_filename)
|
||||
|
||||
# Create new unique filepath
|
||||
if os.path.exists(new_filepath):
|
||||
new_filepath = version_up(new_filepath)
|
||||
|
||||
return new_filepath
|
||||
|
||||
|
||||
def _update_savers(comp, session):
|
||||
"""Update all savers of the current comp to ensure the output is correct
|
||||
|
||||
This will refactor the Saver file outputs to the renders of the new session
|
||||
that is provided.
|
||||
|
||||
In the case the original saver path had a path set relative to a /fusion/
|
||||
folder then that relative path will be matched with the exception of all
|
||||
"version" (e.g. v010) references will be reset to v001. Otherwise only a
|
||||
version folder will be computed in the new session's work "render" folder
|
||||
to dump the files in and keeping the original filenames.
|
||||
|
||||
Args:
|
||||
comp (object): current comp instance
|
||||
session (dict): the current Avalon session
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
new_work = get_workdir_from_session(session)
|
||||
renders = os.path.join(new_work, "renders")
|
||||
version_folder = _format_version_folder(renders)
|
||||
renders_version = os.path.join(renders, version_folder)
|
||||
|
||||
comp.Print("New renders to: %s\n" % renders)
|
||||
|
||||
with api.comp_lock_and_undo_chunk(comp):
|
||||
savers = comp.GetToolList(False, "Saver").values()
|
||||
for saver in savers:
|
||||
filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0]
|
||||
|
||||
# Get old relative path to the "fusion" app folder so we can apply
|
||||
# the same relative path afterwards. If not found fall back to
|
||||
# using just a version folder with the filename in it.
|
||||
# todo: can we make this less magical?
|
||||
relpath = filepath.replace("\\", "/").rsplit("/fusion/", 1)[-1]
|
||||
|
||||
if os.path.isabs(relpath):
|
||||
# If not relative to a "/fusion/" folder then just use filename
|
||||
filename = os.path.basename(filepath)
|
||||
log.warning("Can't parse relative path, refactoring to only"
|
||||
"filename in a version folder: %s" % filename)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
|
||||
else:
|
||||
# Else reuse the relative path
|
||||
# Reset version in folder and filename in the relative path
|
||||
# to v001. The version should be is only detected when prefixed
|
||||
# with either `_v` (underscore) or `/v` (folder)
|
||||
version_pattern = r"(/|_)v[0-9]+"
|
||||
if re.search(version_pattern, relpath):
|
||||
new_relpath = re.sub(version_pattern,
|
||||
r"\1v001",
|
||||
relpath)
|
||||
log.info("Resetting version folders to v001: "
|
||||
"%s -> %s" % (relpath, new_relpath))
|
||||
relpath = new_relpath
|
||||
|
||||
new_path = os.path.join(new_work, relpath)
|
||||
|
||||
saver["Clip"] = new_path
|
||||
|
||||
|
||||
def update_frame_range(comp, representations):
|
||||
"""Update the frame range of the comp and render length
|
||||
|
||||
The start and end frame are based on the lowest start frame and the highest
|
||||
end frame
|
||||
|
||||
Args:
|
||||
comp (object): current focused comp
|
||||
representations (list) collection of dicts
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
version_ids = {r["parent"] for r in representations}
|
||||
versions = list(get_versions(project_name, version_ids))
|
||||
|
||||
versions = [v for v in versions
|
||||
if v["data"].get("frameStart", None) is not None]
|
||||
|
||||
if not versions:
|
||||
log.warning("No versions loaded to match frame range to.\n")
|
||||
return
|
||||
|
||||
start = min(v["data"]["frameStart"] for v in versions)
|
||||
end = max(v["data"]["frameEnd"] for v in versions)
|
||||
|
||||
lib.update_frame_range(start, end, comp=comp)
|
||||
|
||||
|
||||
def switch(asset_name, filepath=None, new=True):
|
||||
"""Switch the current containers of the file to the other asset (shot)
|
||||
|
||||
Args:
|
||||
filepath (str): file path of the comp file
|
||||
asset_name (str): name of the asset (shot)
|
||||
new (bool): Save updated comp under a different name
|
||||
|
||||
Returns:
|
||||
comp path (str): new filepath of the updated comp
|
||||
|
||||
"""
|
||||
|
||||
# If filepath provided, ensure it is valid absolute path
|
||||
if filepath is not None:
|
||||
if not os.path.isabs(filepath):
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
assert os.path.exists(filepath), "%s must exist " % filepath
|
||||
|
||||
# Assert asset name exists
|
||||
# It is better to do this here then to wait till switch_shot does it
|
||||
project_name = legacy_io.active_project()
|
||||
asset = get_asset_by_name(project_name, asset_name)
|
||||
assert asset, "Could not find '%s' in the database" % asset_name
|
||||
|
||||
# Go to comp
|
||||
if not filepath:
|
||||
current_comp = api.get_current_comp()
|
||||
assert current_comp is not None, "Could not find current comp"
|
||||
else:
|
||||
fusion = _get_fusion_instance()
|
||||
current_comp = fusion.LoadComp(filepath, quiet=True)
|
||||
assert current_comp is not None, (
|
||||
"Fusion could not load '{}'").format(filepath)
|
||||
|
||||
host = registered_host()
|
||||
containers = list(host.ls())
|
||||
assert containers, "Nothing to update"
|
||||
|
||||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = lib.switch_item(
|
||||
container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
except Exception as e:
|
||||
current_comp.Print("Error in switching! %s\n" % e.message)
|
||||
|
||||
message = "Switched %i Loaders of the %i\n" % (len(representations),
|
||||
len(containers))
|
||||
current_comp.Print(message)
|
||||
|
||||
# Build the session to switch to
|
||||
switch_to_session = legacy_io.Session.copy()
|
||||
switch_to_session["AVALON_ASSET"] = asset['name']
|
||||
|
||||
if new:
|
||||
comp_path = _format_filepath(switch_to_session)
|
||||
|
||||
# Update savers output based on new session
|
||||
_update_savers(current_comp, switch_to_session)
|
||||
else:
|
||||
comp_path = version_up(filepath)
|
||||
|
||||
current_comp.Print(comp_path)
|
||||
|
||||
current_comp.Print("\nUpdating frame range")
|
||||
update_frame_range(current_comp, representations)
|
||||
|
||||
current_comp.Save(comp_path)
|
||||
|
||||
return comp_path
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# QUESTION: can we convert this to gui rather then standalone script?
|
||||
# TODO: convert to gui tool
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Switch to a shot within an"
|
||||
"existing comp file")
|
||||
|
||||
parser.add_argument("--file_path",
|
||||
type=str,
|
||||
default=True,
|
||||
help="File path of the comp to use")
|
||||
|
||||
parser.add_argument("--asset_name",
|
||||
type=str,
|
||||
default=True,
|
||||
help="Name of the asset (shot) to switch")
|
||||
|
||||
args, unknown = parser.parse_args()
|
||||
|
||||
install_host(api)
|
||||
switch(args.asset_name, args.file_path)
|
||||
|
||||
sys.exit(0)
|
||||
|
|
@ -6,10 +6,10 @@ import csv
|
|||
from PIL import Image, ImageDraw, ImageFont
|
||||
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractPalette(openpype.api.Extractor):
|
||||
class ExtractPalette(publish.Extractor):
|
||||
"""Extract palette."""
|
||||
|
||||
label = "Extract Palette"
|
||||
|
|
|
|||
|
|
@ -3,12 +3,11 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
import openpype.hosts.harmony
|
||||
|
||||
|
||||
class ExtractTemplate(openpype.api.Extractor):
|
||||
class ExtractTemplate(publish.Extractor):
|
||||
"""Extract the connected nodes to the composite instance."""
|
||||
|
||||
label = "Extract Template"
|
||||
|
|
@ -50,7 +49,7 @@ class ExtractTemplate(openpype.api.Extractor):
|
|||
dependencies.remove(instance.data["setMembers"][0])
|
||||
|
||||
# Export template.
|
||||
openpype.hosts.harmony.api.export_template(
|
||||
harmony.export_template(
|
||||
unique_backdrops, dependencies, filepath
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ import os
|
|||
import shutil
|
||||
from zipfile import ZipFile
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractWorkfile(openpype.api.Extractor):
|
||||
class ExtractWorkfile(publish.Extractor):
|
||||
"""Extract and zip complete workfile folder into zip."""
|
||||
|
||||
label = "Extract Workfile"
|
||||
|
|
|
|||
|
|
@ -13,14 +13,10 @@ import hiero
|
|||
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
get_versions,
|
||||
get_last_versions,
|
||||
get_representations,
|
||||
)
|
||||
from openpype.settings import get_anatomy_settings
|
||||
from openpype.client import get_project
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import legacy_io, Anatomy
|
||||
from openpype.pipeline.load import filter_containers
|
||||
from openpype.lib import Logger
|
||||
from . import tags
|
||||
|
||||
|
|
@ -882,8 +878,7 @@ def apply_colorspace_project():
|
|||
project.close()
|
||||
|
||||
# get presets for hiero
|
||||
imageio = get_anatomy_settings(
|
||||
project_name)["imageio"].get("hiero", None)
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
presets = imageio.get("workfile")
|
||||
|
||||
# save the workfile as subversion "comment:_colorspaceChange"
|
||||
|
|
@ -936,8 +931,7 @@ def apply_colorspace_clips():
|
|||
clips = project.clips()
|
||||
|
||||
# get presets for hiero
|
||||
imageio = get_anatomy_settings(
|
||||
project_name)["imageio"].get("hiero", None)
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
from pprint import pprint
|
||||
|
||||
presets = imageio.get("regexInputs", {}).get("inputs", {})
|
||||
|
|
@ -1055,6 +1049,10 @@ def sync_clip_name_to_data_asset(track_items_list):
|
|||
print("asset was changed in clip: {}".format(ti_name))
|
||||
|
||||
|
||||
def set_track_color(track_item, color):
|
||||
track_item.source().binItem().setColor(color)
|
||||
|
||||
|
||||
def check_inventory_versions(track_items=None):
|
||||
"""
|
||||
Actual version color idetifier of Loaded containers
|
||||
|
|
@ -1066,68 +1064,29 @@ def check_inventory_versions(track_items=None):
|
|||
"""
|
||||
from . import parse_container
|
||||
|
||||
track_item = track_items or get_track_items()
|
||||
track_items = track_items or get_track_items()
|
||||
# presets
|
||||
clip_color_last = "green"
|
||||
clip_color = "red"
|
||||
|
||||
item_with_repre_id = []
|
||||
repre_ids = set()
|
||||
containers = []
|
||||
# Find all containers and collect it's node and representation ids
|
||||
for track_item in track_item:
|
||||
for track_item in track_items:
|
||||
container = parse_container(track_item)
|
||||
if container:
|
||||
repre_id = container["representation"]
|
||||
repre_ids.add(repre_id)
|
||||
item_with_repre_id.append((track_item, repre_id))
|
||||
containers.append(container)
|
||||
|
||||
# Skip if nothing was found
|
||||
if not repre_ids:
|
||||
if not containers:
|
||||
return
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
# Find representations based on found containers
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
repre_ids=repre_ids,
|
||||
fields=["_id", "parent"]
|
||||
)
|
||||
# Store representations by id and collect version ids
|
||||
repre_docs_by_id = {}
|
||||
version_ids = set()
|
||||
for repre_doc in repre_docs:
|
||||
# Use stringed representation id to match value in containers
|
||||
repre_id = str(repre_doc["_id"])
|
||||
repre_docs_by_id[repre_id] = repre_doc
|
||||
version_ids.add(repre_doc["parent"])
|
||||
filter_result = filter_containers(containers, project_name)
|
||||
for container in filter_result.latest:
|
||||
set_track_color(container["_track_item"], clip_color)
|
||||
|
||||
version_docs = get_versions(
|
||||
project_name, version_ids, fields=["_id", "name", "parent"]
|
||||
)
|
||||
# Store versions by id and collect subset ids
|
||||
version_docs_by_id = {}
|
||||
subset_ids = set()
|
||||
for version_doc in version_docs:
|
||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
||||
subset_ids.add(version_doc["parent"])
|
||||
|
||||
# Query last versions based on subset ids
|
||||
last_versions_by_subset_id = get_last_versions(
|
||||
project_name, subset_ids=subset_ids, fields=["_id", "parent"]
|
||||
)
|
||||
|
||||
for item in item_with_repre_id:
|
||||
# Some python versions of nuke can't unfold tuple in for loop
|
||||
track_item, repre_id = item
|
||||
|
||||
repre_doc = repre_docs_by_id[repre_id]
|
||||
version_doc = version_docs_by_id[repre_doc["parent"]]
|
||||
last_version_doc = last_versions_by_subset_id[version_doc["parent"]]
|
||||
# Check if last version is same as current version
|
||||
if version_doc["_id"] == last_version_doc["_id"]:
|
||||
track_item.source().binItem().setColor(clip_color_last)
|
||||
else:
|
||||
track_item.source().binItem().setColor(clip_color)
|
||||
for container in filter_result.outdated:
|
||||
set_track_color(container["_track_item"], clip_color_last)
|
||||
|
||||
|
||||
def selection_changed_timeline(event):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import sys
|
|||
import hiero.core
|
||||
from hiero.ui import findMenuAction
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
|
|
|
|||
|
|
@ -251,7 +251,6 @@ def reload_config():
|
|||
import importlib
|
||||
|
||||
for module in (
|
||||
"openpype.api",
|
||||
"openpype.hosts.hiero.lib",
|
||||
"openpype.hosts.hiero.menu",
|
||||
"openpype.hosts.hiero.tags"
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import hiero
|
|||
from Qt import QtWidgets, QtCore
|
||||
import qargparse
|
||||
|
||||
import openpype.api as openpype
|
||||
from openpype.settings import get_current_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import LoaderPlugin, LegacyCreator
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
|
|
@ -606,7 +606,7 @@ class Creator(LegacyCreator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(Creator, self).__init__(*args, **kwargs)
|
||||
import openpype.hosts.hiero.api as phiero
|
||||
self.presets = openpype.get_current_project_settings()[
|
||||
self.presets = get_current_project_settings()[
|
||||
"hiero"]["create"].get(self.__class__.__name__, {})
|
||||
|
||||
# adding basic current context resolve objects
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import os
|
|||
import hiero
|
||||
|
||||
from openpype.client import get_project, get_assets
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import hiero
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,10 +2,11 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
import openpype
|
||||
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractClipEffects(openpype.api.Extractor):
|
||||
class ExtractClipEffects(publish.Extractor):
|
||||
"""Extract clip effects instances."""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype
|
||||
|
||||
from openpype.lib import (
|
||||
get_oiio_tools_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractFrames(openpype.api.Extractor):
|
||||
class ExtractFrames(publish.Extractor):
|
||||
"""Extracts frames"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
|
|
@ -13,7 +18,7 @@ class ExtractFrames(openpype.api.Extractor):
|
|||
movie_extensions = ["mov", "mp4"]
|
||||
|
||||
def process(self, instance):
|
||||
oiio_tool_path = openpype.lib.get_oiio_tools_path()
|
||||
oiio_tool_path = get_oiio_tools_path()
|
||||
staging_dir = self.staging_dir(instance)
|
||||
output_template = os.path.join(staging_dir, instance.data["name"])
|
||||
sequence = instance.context.data["activeTimeline"]
|
||||
|
|
@ -43,7 +48,7 @@ class ExtractFrames(openpype.api.Extractor):
|
|||
args.extend(["--powc", "0.45,0.45,0.45,1.0"])
|
||||
|
||||
args.extend([input_path, "-o", output_path])
|
||||
output = openpype.api.run_subprocess(args)
|
||||
output = run_subprocess(args)
|
||||
|
||||
failed_output = "oiiotool produced no output."
|
||||
if failed_output in output:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractThumnail(openpype.api.Extractor):
|
||||
class ExtractThumnail(publish.Extractor):
|
||||
"""
|
||||
Extractor for track item's tumnails
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from pyblish import api
|
||||
import openpype.api as pype
|
||||
|
||||
from openpype.lib import version_up
|
||||
|
||||
|
||||
class IntegrateVersionUpWorkfile(api.ContextPlugin):
|
||||
|
|
@ -15,7 +16,7 @@ class IntegrateVersionUpWorkfile(api.ContextPlugin):
|
|||
def process(self, context):
|
||||
project = context.data["activeProject"]
|
||||
path = context.data.get("currentFile")
|
||||
new_path = pype.version_up(path)
|
||||
new_path = version_up(path)
|
||||
|
||||
if project:
|
||||
project.saveAs(new_path)
|
||||
|
|
|
|||
|
|
@ -318,17 +318,15 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
@staticmethod
|
||||
def create_otio_time_range_from_timeline_item_data(track_item):
|
||||
speed = track_item.playbackSpeed()
|
||||
timeline = phiero.get_current_sequence()
|
||||
frame_start = int(track_item.timelineIn())
|
||||
frame_duration = int((track_item.duration() - 1) / speed)
|
||||
frame_duration = int(track_item.duration())
|
||||
fps = timeline.framerate().toFloat()
|
||||
|
||||
return hiero_export.create_otio_time_range(
|
||||
frame_start, frame_duration, fps)
|
||||
|
||||
@staticmethod
|
||||
def collect_sub_track_items(tracks):
|
||||
def collect_sub_track_items(self, tracks):
|
||||
"""
|
||||
Returns dictionary with track index as key and list of subtracks
|
||||
"""
|
||||
|
|
@ -337,8 +335,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
for track in tracks:
|
||||
items = track.items()
|
||||
|
||||
effet_items = track.subTrackItems()
|
||||
|
||||
# skip if no clips on track > need track with effect only
|
||||
if items:
|
||||
if not effet_items:
|
||||
continue
|
||||
|
||||
# skip all disabled tracks
|
||||
|
|
@ -346,10 +346,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
track_index = track.trackIndex()
|
||||
_sub_track_items = phiero.flatten(track.subTrackItems())
|
||||
_sub_track_items = phiero.flatten(effet_items)
|
||||
|
||||
_sub_track_items = list(_sub_track_items)
|
||||
# continue only if any subtrack items are collected
|
||||
if not list(_sub_track_items):
|
||||
if not _sub_track_items:
|
||||
continue
|
||||
|
||||
enabled_sti = []
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
from openpype.hosts.houdini import HOUDINI_HOST_DIR
|
||||
from openpype.hosts.houdini.api import lib
|
||||
from openpype.hosts.houdini.api import lib, shelves
|
||||
|
||||
from openpype.lib import (
|
||||
register_event_callback,
|
||||
|
|
@ -73,6 +73,7 @@ def install():
|
|||
# so it initializes into the correct scene FPS, Frame Range, etc.
|
||||
# todo: make sure this doesn't trigger when opening with last workfile
|
||||
_set_context_settings()
|
||||
shelves.generate_shelves()
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
|
|||
182
openpype/hosts/houdini/api/shelves.py
Normal file
182
openpype/hosts/houdini/api/shelves.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import os
|
||||
import logging
|
||||
import platform
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
import hou
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini.shelves")
|
||||
|
||||
|
||||
def generate_shelves():
|
||||
"""This function generates complete shelves from shelf set to tools
|
||||
in Houdini from openpype project settings houdini shelf definition.
|
||||
"""
|
||||
current_os = platform.system().lower()
|
||||
|
||||
# load configuration of houdini shelves
|
||||
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
shelves_set_config = project_settings["houdini"]["shelves"]
|
||||
|
||||
if not shelves_set_config:
|
||||
log.debug("No custom shelves found in project settings.")
|
||||
return
|
||||
|
||||
for shelf_set_config in shelves_set_config:
|
||||
shelf_set_filepath = shelf_set_config.get('shelf_set_source_path')
|
||||
shelf_set_os_filepath = shelf_set_filepath[current_os]
|
||||
if shelf_set_os_filepath:
|
||||
if not os.path.isfile(shelf_set_os_filepath):
|
||||
log.error("Shelf path doesn't exist - "
|
||||
"{}".format(shelf_set_os_filepath))
|
||||
continue
|
||||
|
||||
hou.shelves.newShelfSet(file_path=shelf_set_os_filepath)
|
||||
continue
|
||||
|
||||
shelf_set_name = shelf_set_config.get('shelf_set_name')
|
||||
if not shelf_set_name:
|
||||
log.warning("No name found in shelf set definition.")
|
||||
continue
|
||||
|
||||
shelves_definition = shelf_set_config.get('shelf_definition')
|
||||
if not shelves_definition:
|
||||
log.debug(
|
||||
"No shelf definition found for shelf set named '{}'".format(
|
||||
shelf_set_name
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
shelf_set = get_or_create_shelf_set(shelf_set_name)
|
||||
for shelf_definition in shelves_definition:
|
||||
shelf_name = shelf_definition.get('shelf_name')
|
||||
if not shelf_name:
|
||||
log.warning("No name found in shelf definition.")
|
||||
continue
|
||||
|
||||
shelf = get_or_create_shelf(shelf_name)
|
||||
|
||||
if not shelf_definition.get('tools_list'):
|
||||
log.debug(
|
||||
"No tool definition found for shelf named {}".format(
|
||||
shelf_name
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
mandatory_attributes = {'name', 'script'}
|
||||
for tool_definition in shelf_definition.get('tools_list'):
|
||||
# We verify that the name and script attibutes of the tool
|
||||
# are set
|
||||
if not all(
|
||||
tool_definition[key] for key in mandatory_attributes
|
||||
):
|
||||
log.warning(
|
||||
"You need to specify at least the name and the "
|
||||
"script path of the tool.")
|
||||
continue
|
||||
|
||||
tool = get_or_create_tool(tool_definition, shelf)
|
||||
|
||||
if not tool:
|
||||
continue
|
||||
|
||||
# Add the tool to the shelf if not already in it
|
||||
if tool not in shelf.tools():
|
||||
shelf.setTools(list(shelf.tools()) + [tool])
|
||||
|
||||
# Add the shelf in the shelf set if not already in it
|
||||
if shelf not in shelf_set.shelves():
|
||||
shelf_set.setShelves(shelf_set.shelves() + (shelf,))
|
||||
|
||||
|
||||
def get_or_create_shelf_set(shelf_set_label):
|
||||
"""This function verifies if the shelf set label exists. If not,
|
||||
creates a new shelf set.
|
||||
|
||||
Arguments:
|
||||
shelf_set_label (str): The label of the shelf set
|
||||
|
||||
Returns:
|
||||
hou.ShelfSet: The shelf set existing or the new one
|
||||
"""
|
||||
all_shelves_sets = hou.shelves.shelfSets().values()
|
||||
|
||||
shelf_set = next((shelf for shelf in all_shelves_sets if
|
||||
shelf.label() == shelf_set_label), None)
|
||||
if shelf_set:
|
||||
return shelf_set
|
||||
|
||||
shelf_set_name = shelf_set_label.replace(' ', '_').lower()
|
||||
new_shelf_set = hou.shelves.newShelfSet(
|
||||
name=shelf_set_name,
|
||||
label=shelf_set_label
|
||||
)
|
||||
return new_shelf_set
|
||||
|
||||
|
||||
def get_or_create_shelf(shelf_label):
|
||||
"""This function verifies if the shelf label exists. If not, creates
|
||||
a new shelf.
|
||||
|
||||
Arguments:
|
||||
shelf_label (str): The label of the shelf
|
||||
|
||||
Returns:
|
||||
hou.Shelf: The shelf existing or the new one
|
||||
"""
|
||||
all_shelves = hou.shelves.shelves().values()
|
||||
|
||||
shelf = next((s for s in all_shelves if s.label() == shelf_label), None)
|
||||
if shelf:
|
||||
return shelf
|
||||
|
||||
shelf_name = shelf_label.replace(' ', '_').lower()
|
||||
new_shelf = hou.shelves.newShelf(
|
||||
name=shelf_name,
|
||||
label=shelf_label
|
||||
)
|
||||
return new_shelf
|
||||
|
||||
|
||||
def get_or_create_tool(tool_definition, shelf):
|
||||
"""This function verifies if the tool exists and updates it. If not, creates
|
||||
a new one.
|
||||
|
||||
Arguments:
|
||||
tool_definition (dict): Dict with label, script, icon and help
|
||||
shelf (hou.Shelf): The parent shelf of the tool
|
||||
|
||||
Returns:
|
||||
hou.Tool: The tool updated or the new one
|
||||
"""
|
||||
existing_tools = shelf.tools()
|
||||
tool_label = tool_definition.get('label')
|
||||
|
||||
existing_tool = next(
|
||||
(tool for tool in existing_tools if tool.label() == tool_label),
|
||||
None
|
||||
)
|
||||
if existing_tool:
|
||||
tool_definition.pop('name', None)
|
||||
tool_definition.pop('label', None)
|
||||
existing_tool.setData(**tool_definition)
|
||||
return existing_tool
|
||||
|
||||
tool_name = tool_label.replace(' ', '_').lower()
|
||||
|
||||
if not os.path.exists(tool_definition['script']):
|
||||
log.warning(
|
||||
"This path doesn't exist - {}".format(tool_definition['script'])
|
||||
)
|
||||
return
|
||||
|
||||
with open(tool_definition['script']) as f:
|
||||
script = f.read()
|
||||
tool_definition.update({'script': script})
|
||||
|
||||
new_tool = hou.shelves.newTool(name=tool_name, **tool_definition)
|
||||
|
||||
return new_tool
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import clique
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
|
||||
from openpype.hosts.houdini.api import pipeline
|
||||
|
||||
|
||||
|
|
@ -20,7 +19,6 @@ class AssLoader(load.LoaderPlugin):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import hou
|
||||
|
||||
# Get the root node
|
||||
|
|
@ -32,7 +30,11 @@ class AssLoader(load.LoaderPlugin):
|
|||
|
||||
# Create a new geo node
|
||||
procedural = obj.createNode("arnold::procedural", node_name=node_name)
|
||||
procedural.setParms({"ar_filename": self.get_path(self.fname)})
|
||||
|
||||
procedural.setParms(
|
||||
{
|
||||
"ar_filename": self.format_path(context["representation"])
|
||||
})
|
||||
|
||||
nodes = [procedural]
|
||||
self[:] = nodes
|
||||
|
|
@ -46,62 +48,43 @@ class AssLoader(load.LoaderPlugin):
|
|||
suffix="",
|
||||
)
|
||||
|
||||
def get_path(self, path):
|
||||
|
||||
# Find all frames in the folder
|
||||
ext = ".ass.gz" if path.endswith(".ass.gz") else ".ass"
|
||||
folder = os.path.dirname(path)
|
||||
frames = [f for f in os.listdir(folder) if f.endswith(ext)]
|
||||
|
||||
# Get the collection of frames to detect frame padding
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
collections, remainder = clique.assemble(frames,
|
||||
minimum_items=1,
|
||||
patterns=patterns)
|
||||
self.log.debug("Detected collections: {}".format(collections))
|
||||
self.log.debug("Detected remainder: {}".format(remainder))
|
||||
|
||||
if not collections and remainder:
|
||||
if len(remainder) != 1:
|
||||
raise ValueError("Frames not correctly detected "
|
||||
"in: {}".format(remainder))
|
||||
|
||||
# A single frame without frame range detected
|
||||
filepath = remainder[0]
|
||||
return os.path.normpath(filepath).replace("\\", "/")
|
||||
|
||||
# Frames detected with a valid "frame" number pattern
|
||||
# Then we don't want to have any remainder files found
|
||||
assert len(collections) == 1 and not remainder
|
||||
collection = collections[0]
|
||||
|
||||
num_frames = len(collection.indexes)
|
||||
if num_frames == 1:
|
||||
# Return the input path without dynamic $F variable
|
||||
result = path
|
||||
else:
|
||||
# More than a single frame detected - use $F{padding}
|
||||
fname = "{}$F{}{}".format(collection.head,
|
||||
collection.padding,
|
||||
collection.tail)
|
||||
result = os.path.join(folder, fname)
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
return os.path.normpath(result).replace("\\", "/")
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
# Update the file path
|
||||
file_path = get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
procedural = container["node"]
|
||||
procedural.setParms({"ar_filename": self.get_path(file_path)})
|
||||
procedural.setParms({"ar_filename": self.format_path(representation)})
|
||||
|
||||
# Update attribute
|
||||
procedural.setParms({"representation": str(representation["_id"])})
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
@staticmethod
|
||||
def format_path(representation):
|
||||
"""Format file path correctly for single ass.* or ass.* sequence.
|
||||
|
||||
Args:
|
||||
representation (dict): representation to be loaded.
|
||||
|
||||
Returns:
|
||||
str: Formatted path to be used by the input node.
|
||||
|
||||
"""
|
||||
path = get_representation_path(representation)
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: {}".format(path))
|
||||
|
||||
is_sequence = bool(representation["context"].get("frame"))
|
||||
# The path is either a single file or sequence in a folder.
|
||||
if is_sequence:
|
||||
dir_path, file_name = os.path.split(path)
|
||||
path = os.path.join(
|
||||
dir_path,
|
||||
re.sub(r"(.*)\.(\d+)\.(ass.*)", "\\1.$F4.\\3", file_name)
|
||||
)
|
||||
|
||||
return os.path.normpath(path).replace("\\", "/")
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue