Merge branch 'develop' into bugfix/stdout_stderr_application_launch

This commit is contained in:
iLLiCiTiT 2021-07-21 16:14:06 +02:00
commit a0beeddfd7
187 changed files with 6231 additions and 1793 deletions

View file

@ -43,10 +43,10 @@ jobs:
uses: heinrichreimer/github-changelog-generator-action@v2.2 uses: heinrichreimer/github-changelog-generator-action@v2.2
with: with:
token: ${{ secrets.ADMIN_TOKEN }} token: ${{ secrets.ADMIN_TOKEN }}
breakingLabel: '#### 💥 Breaking' breakingLabel: '**💥 Breaking**'
enhancementLabel: '#### 🚀 Enhancements' enhancementLabel: '**🚀 Enhancements**'
bugsLabel: '#### 🐛 Bug fixes' bugsLabel: '**🐛 Bug fixes**'
deprecatedLabel: '#### ⚠️ Deprecations' deprecatedLabel: '**⚠️ Deprecations**'
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
issues: false issues: false
issuesWoLabels: false issuesWoLabels: false

View file

@ -1,9 +1,9 @@
name: Stable Release name: Stable Release
on: on:
push: release:
tags: types:
- '*[0-9].*[0-9].*[0-9]*' - prereleased
jobs: jobs:
create_release: create_release:
@ -23,35 +23,26 @@ jobs:
- name: Install Python requirements - name: Install Python requirements
run: pip install gitpython semver run: pip install gitpython semver
- name: Set env
run: |
echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
git config user.email ${{ secrets.CI_EMAIL }}
git config user.name ${{ secrets.CI_USER }}
git fetch
git checkout -b main origin/main
git tag -d ${GITHUB_REF#refs/*/}
git remote set-url --push origin https://pypebot:${{ secrets.ADMIN_TOKEN }}@github.com/pypeclub/openpype
git push origin --delete ${GITHUB_REF#refs/*/}
echo PREVIOUS_VERSION=`git describe --tags --match="[0-9]*" --abbrev=0` >> $GITHUB_ENV
- name: 💉 Inject new version into files - name: 💉 Inject new version into files
id: version id: version
if: steps.version_type.outputs.type != 'skip'
run: | run: |
python ./tools/ci_tools.py --version ${{ env.RELEASE_VERSION }} echo ::set-output name=current_version::${GITHUB_REF#refs/*/}
RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release)
echo ::set-output name=last_release::$LASTRELEASE
echo ::set-output name=release_tag::$RESULT
- name: "✏️ Generate full changelog" - name: "✏️ Generate full changelog"
if: steps.version_type.outputs.type != 'skip' if: steps.version.outputs.release_tag != 'skip'
id: generate-full-changelog id: generate-full-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2 uses: heinrichreimer/github-changelog-generator-action@v2.2
with: with:
token: ${{ secrets.ADMIN_TOKEN }} token: ${{ secrets.ADMIN_TOKEN }}
breakingLabel: '#### 💥 Breaking' breakingLabel: '**💥 Breaking**'
enhancementLabel: '#### 🚀 Enhancements' enhancementLabel: '**🚀 Enhancements**'
bugsLabel: '#### 🐛 Bug fixes' bugsLabel: '**🐛 Bug fixes**'
deprecatedLabel: '#### ⚠️ Deprecations' deprecatedLabel: '**⚠️ Deprecations**'
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
issues: false issues: false
issuesWoLabels: false issuesWoLabels: false
@ -64,39 +55,76 @@ jobs:
compareLink: true compareLink: true
stripGeneratorNotice: true stripGeneratorNotice: true
verbose: true verbose: true
futureRelease: ${{ env.RELEASE_VERSION }} futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+" excludeTagsRegex: "CI/.+"
releaseBranch: "main" releaseBranch: "main"
- name: "🖨️ Print changelog to console"
run: echo ${{ steps.generate-last-changelog.outputs.changelog }}
- name: 💾 Commit and Tag - name: 💾 Commit and Tag
id: git_commit id: git_commit
if: steps.version_type.outputs.type != 'skip' if: steps.version.outputs.release_tag != 'skip'
run: | run: |
git config user.email ${{ secrets.CI_EMAIL }}
git config user.name ${{ secrets.CI_USER }}
git add . git add .
git commit -m "[Automated] Release" git commit -m "[Automated] Release"
tag_name="${{ env.RELEASE_VERSION }}" tag_name="${{ steps.version.outputs.release_tag }}"
git push git tag -a $tag_name -m "stable release"
git tag -fa $tag_name -m "stable release"
git remote set-url --push origin https://pypebot:${{ secrets.ADMIN_TOKEN }}@github.com/pypeclub/openpype
git push origin $tag_name
- name: "🚀 Github Release" - name: 🔏 Push to protected main branch
uses: docker://antonyurchenko/git-release:latest if: steps.version.outputs.release_tag != 'skip'
env: uses: CasperWA/push-protected@v2
GITHUB_TOKEN: ${{ secrets.ADMIN_TOKEN }} with:
DRAFT_RELEASE: "false" token: ${{ secrets.ADMIN_TOKEN }}
PRE_RELEASE: "false" branch: main
CHANGELOG_FILE: "CHANGELOG.md" tags: true
ALLOW_EMPTY_CHANGELOG: "false" unprotect_reviews: true
ALLOW_TAG_PREFIX: "true"
- name: "✏️ Generate last changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-last-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
breakingLabel: '**💥 Breaking**'
enhancementLabel: '**🚀 Enhancements**'
bugsLabel: '**🐛 Bug fixes**'
deprecatedLabel: '**⚠️ Deprecations**'
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
issues: false
issuesWoLabels: false
sinceTag: ${{ steps.version.outputs.last_release }}
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
stripHeaders: true
base: 'none'
- name: 🔨 Merge main back to develop - name: 🚀 Github Release
if: steps.version.outputs.release_tag != 'skip'
uses: ncipollo/release-action@v1
with:
body: ${{ steps.generate-last-changelog.outputs.changelog }}
tag: ${{ steps.version.outputs.release_tag }}
token: ${{ secrets.ADMIN_TOKEN }}
- name: ☠ Delete Pre-release
if: steps.version.outputs.release_tag != 'skip'
uses: cb80/delrel@latest
with:
tag: "${{ steps.version.outputs.current_version }}"
- name: 🔁 Merge main back to develop
if: steps.version.outputs.release_tag != 'skip'
uses: everlytic/branch-merge@1.1.0 uses: everlytic/branch-merge@1.1.0
if: steps.version_type.outputs.type != 'skip'
with: with:
github_token: ${{ secrets.ADMIN_TOKEN }} github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main' source_ref: 'main'

3
.gitignore vendored
View file

@ -97,4 +97,5 @@ website/.docusaurus
# Poetry # Poetry
######## ########
.poetry/ .poetry/
.python-version

View file

@ -1,22 +1,131 @@
# Changelog # Changelog
## [3.1.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) ## [3.3.0-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...HEAD)
#### 🚀 Enhancements **🚀 Enhancements**
- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) - nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829)
- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) - Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823)
- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819)
- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805)
- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797)
#### 🐛 Bug fixes **🐛 Bug fixes**
- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) - nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836)
- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) - Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813)
- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) - Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809)
- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) - Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802)
- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645)
- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) **Merged pull requests:**
- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811)
## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0)
**🚀 Enhancements**
- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799)
- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795)
- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777)
- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776)
- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769)
- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766)
- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763)
- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757)
- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756)
- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753)
- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739)
- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736)
- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726)
- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708)
**🐛 Bug fixes**
- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803)
- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801)
- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788)
- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786)
- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782)
- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775)
- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772)
- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768)
- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767)
- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764)
- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761)
- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758)
- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745)
- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743)
- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742)
- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741)
- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738)
- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737)
- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735)
**Merged pull requests:**
- Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808)
- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773)
- Bc/fix/docs [\#1771](https://github.com/pypeclub/OpenPype/pull/1771)
- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755)
## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4)
**Merged pull requests:**
- celaction fixes [\#1754](https://github.com/pypeclub/OpenPype/pull/1754)
- celaciton: audio subset changed data structure [\#1750](https://github.com/pypeclub/OpenPype/pull/1750)
## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.2...2.18.3)
**🐛 Bug fixes**
- Tools names forwards compatibility [\#1727](https://github.com/pypeclub/OpenPype/pull/1727)
**⚠️ Deprecations**
- global: removing obsolete ftrack validator plugin [\#1710](https://github.com/pypeclub/OpenPype/pull/1710)
## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.1.0...2.18.2)
**🐛 Bug fixes**
- Maya: Extract review hotfix - 2.x backport [\#1713](https://github.com/pypeclub/OpenPype/pull/1713)
**Merged pull requests:**
- 1698 Nuke: Prerender Frame Range by default [\#1709](https://github.com/pypeclub/OpenPype/pull/1709)
## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.1.0-nightly.4...3.1.0)
**🚀 Enhancements**
- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703)
- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702)
- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694)
- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689)
**🐛 Bug fixes**
- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707)
- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705)
- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691)
**Merged pull requests:**
- update dependencies [\#1697](https://github.com/pypeclub/OpenPype/pull/1697)
# Changelog # Changelog

View file

@ -657,7 +657,7 @@ class BootstrapRepos:
] ]
# remove duplicates # remove duplicates
openpype_versions = list(set(openpype_versions)) openpype_versions = sorted(list(set(openpype_versions)))
return openpype_versions return openpype_versions
@ -972,8 +972,12 @@ class BootstrapRepos:
"openpype/version.py") as version_file: "openpype/version.py") as version_file:
zip_version = {} zip_version = {}
exec(version_file.read(), zip_version) exec(version_file.read(), zip_version)
version_check = OpenPypeVersion( try:
version=zip_version["__version__"]) version_check = OpenPypeVersion(
version=zip_version["__version__"])
except ValueError as e:
self._print(str(e), True)
return False
version_main = version_check.get_main_version() # noqa: E501 version_main = version_check.get_main_version() # noqa: E501
detected_main = detected_version.get_main_version() # noqa: E501 detected_main = detected_version.get_main_version() # noqa: E501

View file

@ -15,6 +15,9 @@ from .pype_commands import PypeCommands
expose_value=False, help="use specified version") expose_value=False, help="use specified version")
@click.option("--use-staging", is_flag=True, @click.option("--use-staging", is_flag=True,
expose_value=False, help="use staging variants") expose_value=False, help="use staging variants")
@click.option("--list-versions", is_flag=True, expose_value=False,
help=("list all detected versions. Use With `--use-staging "
"to list staging versions."))
def main(ctx): def main(ctx):
"""Pype is main command serving as entry point to pipeline system. """Pype is main command serving as entry point to pipeline system.
@ -115,7 +118,9 @@ def extractenvironments(output_json_path, project, asset, task, app):
@main.command() @main.command()
@click.argument("paths", nargs=-1) @click.argument("paths", nargs=-1)
@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-d", "--debug", is_flag=True, help="Print debug messages")
def publish(debug, paths): @click.option("-t", "--targets", help="Targets module", default=None,
multiple=True)
def publish(debug, paths, targets):
"""Start CLI publishing. """Start CLI publishing.
Publish collects json from paths provided as an argument. Publish collects json from paths provided as an argument.
@ -123,7 +128,7 @@ def publish(debug, paths):
""" """
if debug: if debug:
os.environ['OPENPYPE_DEBUG'] = '3' os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands.publish(list(paths)) PypeCommands.publish(list(paths), targets)
@main.command() @main.command()

View file

@ -0,0 +1,61 @@
from avalon import api
import pyblish.api
import openpype.api
from avalon import aftereffects
class ValidateInstanceAssetRepair(pyblish.api.Action):
"""Repair the instance asset with value from Context."""
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
stub = aftereffects.stub()
for instance in instances:
data = stub.read(instance[0])
data["asset"] = api.Session["AVALON_ASSET"]
stub.imprint(instance[0], data)
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
"""Validate the instance asset is the current selected context asset.
As it might happen that multiple worfiles are opened at same time,
switching between them would mess with selected context. (From Launcher
or Ftrack).
In that case outputs might be output under wrong asset!
Repair action will use Context asset value (from Workfiles or Launcher)
Closing and reopening with Workfiles will refresh Context value.
"""
label = "Validate Instance Asset"
hosts = ["aftereffects"]
actions = [ValidateInstanceAssetRepair]
order = openpype.api.ValidateContentsOrder
def process(self, instance):
instance_asset = instance.data["asset"]
current_asset = api.Session["AVALON_ASSET"]
msg = (
f"Instance asset {instance_asset} is not the same "
f"as current context {current_asset}. PLEASE DO:\n"
f"Repair with 'A' action to use '{current_asset}'.\n"
f"If that's not correct value, close workfile and "
f"reopen via Workfiles!"
)
assert instance_asset == current_asset, msg

View file

@ -1,3 +1,4 @@
from copy import deepcopy
import openpype.hosts.hiero.api as phiero import openpype.hosts.hiero.api as phiero
# from openpype.hosts.hiero.api import plugin, lib # from openpype.hosts.hiero.api import plugin, lib
# reload(lib) # reload(lib)
@ -206,20 +207,24 @@ class CreateShotClip(phiero.Creator):
presets = None presets = None
def process(self): def process(self):
# Creator copy of object attributes that are modified during `process`
presets = deepcopy(self.presets)
gui_inputs = deepcopy(self.gui_inputs)
# get key pares from presets and match it on ui inputs # get key pares from presets and match it on ui inputs
for k, v in self.gui_inputs.items(): for k, v in gui_inputs.items():
if v["type"] in ("dict", "section"): if v["type"] in ("dict", "section"):
# nested dictionary (only one level allowed # nested dictionary (only one level allowed
# for sections and dict) # for sections and dict)
for _k, _v in v["value"].items(): for _k, _v in v["value"].items():
if self.presets.get(_k): if presets.get(_k):
self.gui_inputs[k][ gui_inputs[k][
"value"][_k]["value"] = self.presets[_k] "value"][_k]["value"] = presets[_k]
if self.presets.get(k): if presets.get(k):
self.gui_inputs[k]["value"] = self.presets[k] gui_inputs[k]["value"] = presets[k]
# open widget for plugins inputs # open widget for plugins inputs
widget = self.widget(self.gui_name, self.gui_info, self.gui_inputs) widget = self.widget(self.gui_name, self.gui_info, gui_inputs)
widget.exec_() widget.exec_()
if len(self.selected) < 1: if len(self.selected) < 1:

View file

@ -41,16 +41,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
# process all sellected timeline track items # process all sellected timeline track items
for track_item in selected_timeline_items: for track_item in selected_timeline_items:
data = {} data = {}
clip_name = track_item.name() clip_name = track_item.name()
source_clip = track_item.source() source_clip = track_item.source()
self.log.debug("clip_name: {}".format(clip_name))
# get clips subtracks and anotations
annotations = self.clip_annotations(source_clip)
subtracks = self.clip_subtrack(track_item)
self.log.debug("Annotations: {}".format(annotations))
self.log.debug(">> Subtracks: {}".format(subtracks))
# get openpype tag data # get openpype tag data
tag_data = phiero.get_track_item_pype_data(track_item) tag_data = phiero.get_track_item_pype_data(track_item)
@ -62,6 +56,12 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if tag_data.get("id") != "pyblish.avalon.instance": if tag_data.get("id") != "pyblish.avalon.instance":
continue continue
# get clips subtracks and anotations
annotations = self.clip_annotations(source_clip)
subtracks = self.clip_subtrack(track_item)
self.log.debug("Annotations: {}".format(annotations))
self.log.debug(">> Subtracks: {}".format(subtracks))
# solve handles length # solve handles length
tag_data["handleStart"] = min( tag_data["handleStart"] = min(
tag_data["handleStart"], int(track_item.handleInLength())) tag_data["handleStart"], int(track_item.handleInLength()))
@ -128,7 +128,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
"_ instance.data: {}".format(pformat(instance.data))) "_ instance.data: {}".format(pformat(instance.data)))
if not with_audio: if not with_audio:
return continue
# create audio subset instance # create audio subset instance
self.create_audio_instance(context, **data) self.create_audio_instance(context, **data)

View file

@ -56,7 +56,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Create nice name if the instance has a frame range. # Create nice name if the instance has a frame range.
label = data.get("name", node.name()) label = data.get("name", node.name())
if "frameStart" in data and "frameEnd" in data: if "frameStart" in data and "frameEnd" in data:
frames = "[{startFrame} - {endFrame}]".format(**data) frames = "[{frameStart} - {frameEnd}]".format(**data)
label = "{} {}".format(label, frames) label = "{} {}".format(label, frames)
instance = context.create_instance(label) instance = context.create_instance(label)

View file

@ -72,7 +72,7 @@ class ExtractPlayblast(openpype.api.Extractor):
# Isolate view is requested by having objects in the set besides a # Isolate view is requested by having objects in the set besides a
# camera. # camera.
if preset.pop("isolate_view", False) or instance.data.get("isolate"): if preset.pop("isolate_view", False) and instance.data.get("isolate"):
preset["isolate"] = instance.data["setMembers"] preset["isolate"] = instance.data["setMembers"]
# Show/Hide image planes on request. # Show/Hide image planes on request.

View file

@ -75,7 +75,7 @@ class ExtractThumbnail(openpype.api.Extractor):
# Isolate view is requested by having objects in the set besides a # Isolate view is requested by having objects in the set besides a
# camera. # camera.
if preset.pop("isolate_view", False) or instance.data.get("isolate"): if preset.pop("isolate_view", False) and instance.data.get("isolate"):
preset["isolate"] = instance.data["setMembers"] preset["isolate"] = instance.data["setMembers"]
with maintained_time(): with maintained_time():

View file

@ -133,10 +133,10 @@ class ExtractYetiRig(openpype.api.Extractor):
image_search_path = resources_dir = instance.data["resourcesDir"] image_search_path = resources_dir = instance.data["resourcesDir"]
settings = instance.data.get("rigsettings", None) settings = instance.data.get("rigsettings", None)
if settings: assert settings, "Yeti rig settings were not collected."
settings["imageSearchPath"] = image_search_path settings["imageSearchPath"] = image_search_path
with open(settings_path, "w") as fp: with open(settings_path, "w") as fp:
json.dump(settings, fp, ensure_ascii=False) json.dump(settings, fp, ensure_ascii=False)
# add textures to transfers # add textures to transfers
if 'transfers' not in instance.data: if 'transfers' not in instance.data:
@ -192,12 +192,12 @@ class ExtractYetiRig(openpype.api.Extractor):
'stagingDir': dirname 'stagingDir': dirname
} }
) )
self.log.info("settings file: {}".format(settings)) self.log.info("settings file: {}".format(settings_path))
instance.data["representations"].append( instance.data["representations"].append(
{ {
'name': 'rigsettings', 'name': 'rigsettings',
'ext': 'rigsettings', 'ext': 'rigsettings',
'files': os.path.basename(settings), 'files': os.path.basename(settings_path),
'stagingDir': dirname 'stagingDir': dirname
} }
) )

View file

@ -286,7 +286,8 @@ def add_button_write_to_read(node):
node.addKnob(knob) node.addKnob(knob)
def create_write_node(name, data, input=None, prenodes=None, review=True): def create_write_node(name, data, input=None, prenodes=None,
review=True, linked_knobs=None):
''' Creating write node which is group node ''' Creating write node which is group node
Arguments: Arguments:
@ -390,13 +391,14 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
if prenodes: if prenodes:
for node in prenodes: for node in prenodes:
# get attributes # get attributes
name = node["name"] pre_node_name = node["name"]
klass = node["class"] klass = node["class"]
knobs = node["knobs"] knobs = node["knobs"]
dependent = node["dependent"] dependent = node["dependent"]
# create node # create node
now_node = nuke.createNode(klass, "name {}".format(name)) now_node = nuke.createNode(
klass, "name {}".format(pre_node_name))
now_node.hideControlPanel() now_node.hideControlPanel()
# add data to knob # add data to knob
@ -465,29 +467,40 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
GN.addKnob(nuke.Text_Knob('', 'Rendering')) GN.addKnob(nuke.Text_Knob('', 'Rendering'))
# Add linked knobs. # Add linked knobs.
linked_knob_names = [ linked_knob_names = []
"_grp-start_",
"use_limit", "first", "last", # add input linked knobs and create group only if any input
"_grp-end_", if linked_knobs:
"Render" linked_knob_names.append("_grp-start_")
] linked_knob_names.extend(linked_knobs)
for name in linked_knob_names: linked_knob_names.append("_grp-end_")
if "_grp-start_" in name:
linked_knob_names.append("Render")
for _k_name in linked_knob_names:
if "_grp-start_" in _k_name:
knob = nuke.Tab_Knob( knob = nuke.Tab_Knob(
"rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP)
GN.addKnob(knob) GN.addKnob(knob)
elif "_grp-end_" in name: elif "_grp-end_" in _k_name:
knob = nuke.Tab_Knob( knob = nuke.Tab_Knob(
"rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP)
GN.addKnob(knob) GN.addKnob(knob)
else: else:
link = nuke.Link_Knob("") if "___" in _k_name:
link.makeLink(write_node.name(), name) # add devider
link.setName(name) GN.addKnob(nuke.Text_Knob(""))
if "Render" in name: else:
link.setLabel("Render Local") # add linked knob by _k_name
link.setFlag(0x1000) link = nuke.Link_Knob("")
GN.addKnob(link) link.makeLink(write_node.name(), _k_name)
link.setName(_k_name)
# make render
if "Render" in _k_name:
link.setLabel("Render Local")
link.setFlag(0x1000)
GN.addKnob(link)
# adding write to read button # adding write to read button
add_button_write_to_read(GN) add_button_write_to_read(GN)

View file

@ -103,7 +103,8 @@ class CreateWritePrerender(plugin.PypeCreator):
write_data, write_data,
input=selected_node, input=selected_node,
prenodes=[], prenodes=[],
review=False) review=False,
linked_knobs=["channels", "___", "first", "last", "use_limit"])
# relinking to collected connections # relinking to collected connections
for i, input in enumerate(inputs): for i, input in enumerate(inputs):
@ -122,19 +123,9 @@ class CreateWritePrerender(plugin.PypeCreator):
w_node = n w_node = n
write_node.end() write_node.end()
# add inner write node Tab if self.presets.get("use_range_limit"):
write_node.addKnob(nuke.Tab_Knob("WriteLinkedKnobs")) w_node["use_limit"].setValue(True)
w_node["first"].setValue(nuke.root()["first_frame"].value())
# linking knobs to group property panel w_node["last"].setValue(nuke.root()["last_frame"].value())
linking_knobs = ["channels", "___", "first", "last", "use_limit"]
for k in linking_knobs:
if "___" in k:
write_node.addKnob(nuke.Text_Knob(''))
else:
lnk = nuke.Link_Knob(k)
lnk.makeLink(w_node.name(), k)
lnk.setName(k.replace('_', ' ').capitalize())
lnk.clearFlag(nuke.STARTLINE)
write_node.addKnob(lnk)
return write_node return write_node

View file

@ -100,6 +100,13 @@ class CreateWriteRender(plugin.PypeCreator):
"/{subset}.{frame}.{ext}")}) "/{subset}.{frame}.{ext}")})
# add crop node to cut off all outside of format bounding box # add crop node to cut off all outside of format bounding box
# get width and height
try:
width, height = (selected_node.width(), selected_node.height())
except AttributeError:
actual_format = nuke.root().knob('format').value()
width, height = (actual_format.width(), actual_format.height())
_prenodes = [ _prenodes = [
{ {
"name": "Crop01", "name": "Crop01",
@ -108,8 +115,8 @@ class CreateWriteRender(plugin.PypeCreator):
("box", [ ("box", [
0.0, 0.0,
0.0, 0.0,
selected_node.width(), width,
selected_node.height() height
]) ])
], ],
"dependent": None "dependent": None

View file

@ -70,8 +70,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
review = False review = False
if "review" in node.knobs(): if "review" in node.knobs():
review = node["review"].value() review = node["review"].value()
if review:
families.append("review") families.append("review")
families.append("ftrack")
# Add all nodes in group instances. # Add all nodes in group instances.
if node.Class() == "Group": if node.Class() == "Group":
@ -81,6 +82,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
if target == "Use existing frames": if target == "Use existing frames":
# Local rendering # Local rendering
self.log.info("flagged for no render") self.log.info("flagged for no render")
families.append(families_ak.lower())
elif target == "Local": elif target == "Local":
# Local rendering # Local rendering
self.log.info("flagged for local render") self.log.info("flagged for local render")

View file

@ -1,5 +1,6 @@
import os import os
import re import re
from pprint import pformat
import nuke import nuke
import pyblish.api import pyblish.api
import openpype.api as pype import openpype.api as pype
@ -17,6 +18,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
def process(self, instance): def process(self, instance):
_families_test = [instance.data["family"]] + instance.data["families"] _families_test = [instance.data["family"]] + instance.data["families"]
self.log.debug("_families_test: {}".format(_families_test))
node = None node = None
for x in instance: for x in instance:
@ -133,22 +135,29 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"outputDir": output_dir, "outputDir": output_dir,
"ext": ext, "ext": ext,
"label": label, "label": label,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
"outputType": output_type, "outputType": output_type,
"colorspace": colorspace, "colorspace": colorspace,
"deadlineChunkSize": deadlineChunkSize, "deadlineChunkSize": deadlineChunkSize,
"deadlinePriority": deadlinePriority "deadlinePriority": deadlinePriority
}) })
if "prerender" in _families_test: if self.is_prerender(_families_test):
instance.data.update({ instance.data.update({
"family": "prerender", "handleStart": 0,
"families": [] "handleEnd": 0,
"frameStart": first_frame,
"frameEnd": last_frame,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
else:
instance.data.update({
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
}) })
# * Add audio to instance if exists. # * Add audio to instance if exists.
@ -170,4 +179,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"filename": api.get_representation_path(repre_doc) "filename": api.get_representation_path(repre_doc)
}] }]
self.log.debug("instance.data: {}".format(instance.data)) self.log.debug("instance.data: {}".format(pformat(instance.data)))
def is_prerender(self, families):
return next((f for f in families if "prerender" in f), None)

View file

@ -61,7 +61,6 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
hosts = ["nuke", "nukestudio"] hosts = ["nuke", "nukestudio"]
actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm]
def process(self, instance): def process(self, instance):
for repre in instance.data["representations"]: for repre in instance.data["representations"]:
@ -78,10 +77,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
collection = collections[0] collection = collections[0]
frame_length = int( fstartH = instance.data["frameStartHandle"]
instance.data["frameEndHandle"] fendH = instance.data["frameEndHandle"]
- instance.data["frameStartHandle"] + 1
) frame_length = int(fendH - fstartH + 1)
if frame_length != 1: if frame_length != 1:
if len(collections) != 1: if len(collections) != 1:
@ -95,7 +94,16 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
raise ValidationException(msg) raise ValidationException(msg)
collected_frames_len = int(len(collection.indexes)) collected_frames_len = int(len(collection.indexes))
coll_start = min(collection.indexes)
coll_end = max(collection.indexes)
self.log.info("frame_length: {}".format(frame_length)) self.log.info("frame_length: {}".format(frame_length))
self.log.info("collected_frames_len: {}".format(
collected_frames_len))
self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH))
self.log.info(
"coll_start-coll_end: {}-{}".format(coll_start, coll_end))
self.log.info( self.log.info(
"len(collection.indexes): {}".format(collected_frames_len) "len(collection.indexes): {}".format(collected_frames_len)
) )
@ -103,8 +111,11 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if ("slate" in instance.data["families"]) \ if ("slate" in instance.data["families"]) \
and (frame_length != collected_frames_len): and (frame_length != collected_frames_len):
collected_frames_len -= 1 collected_frames_len -= 1
fstartH += 1
assert (collected_frames_len == frame_length), ( assert ((collected_frames_len >= frame_length)
and (coll_start <= fstartH)
and (coll_end >= fendH)), (
"{} missing frames. Use repair to render all frames" "{} missing frames. Use repair to render all frames"
).format(__name__) ).format(__name__)

View file

@ -0,0 +1,26 @@
import re
def get_unique_layer_name(layers, asset_name, subset_name):
"""
Gets all layer names and if 'asset_name_subset_name' is present, it
increases suffix by 1 (eg. creates unique layer name - for Loader)
Args:
layers (list) of dict with layers info (name, id etc.)
asset_name (string):
subset_name (string):
Returns:
(string): name_00X (without version)
"""
name = "{}_{}".format(asset_name, subset_name)
names = {}
for layer in layers:
layer_name = re.sub(r'_\d{3}$', '', layer.name)
if layer_name in names.keys():
names[layer_name] = names[layer_name] + 1
else:
names[layer_name] = 1
occurrences = names.get(name, 0)
return "{}_{:0>3d}".format(name, occurrences + 1)

View file

@ -1,7 +1,9 @@
from avalon import api, photoshop
import os
import re import re
from avalon import api, photoshop
from openpype.hosts.photoshop.plugins.lib import get_unique_layer_name
stub = photoshop.stub() stub = photoshop.stub()
@ -15,8 +17,9 @@ class ImageLoader(api.Loader):
representations = ["*"] representations = ["*"]
def load(self, context, name=None, namespace=None, data=None): def load(self, context, name=None, namespace=None, data=None):
layer_name = self._get_unique_layer_name(context["asset"]["name"], layer_name = get_unique_layer_name(stub.get_layers(),
name) context["asset"]["name"],
name)
with photoshop.maintained_selection(): with photoshop.maintained_selection():
layer = stub.import_smart_object(self.fname, layer_name) layer = stub.import_smart_object(self.fname, layer_name)
@ -69,25 +72,3 @@ class ImageLoader(api.Loader):
def switch(self, container, representation): def switch(self, container, representation):
self.update(container, representation) self.update(container, representation)
def _get_unique_layer_name(self, asset_name, subset_name):
"""
Gets all layer names and if 'name' is present in them, increases
suffix by 1 (eg. creates unique layer name - for Loader)
Args:
name (string): in format asset_subset
Returns:
(string): name_00X (without version)
"""
name = "{}_{}".format(asset_name, subset_name)
names = {}
for layer in stub.get_layers():
layer_name = re.sub(r'_\d{3}$', '', layer.name)
if layer_name in names.keys():
names[layer_name] = names[layer_name] + 1
else:
names[layer_name] = 1
occurrences = names.get(name, 0)
return "{}_{:0>3d}".format(name, occurrences + 1)

View file

@ -0,0 +1,98 @@
import os
from avalon import api
from avalon import photoshop
from avalon.pipeline import get_representation_path_from_context
from avalon.vendor import qargparse
from openpype.lib import Anatomy
from openpype.hosts.photoshop.plugins.lib import get_unique_layer_name
stub = photoshop.stub()
class ImageFromSequenceLoader(api.Loader):
""" Load specifing image from sequence
Used only as quick load of reference file from a sequence.
Plain ImageLoader picks first frame from sequence.
Loads only existing files - currently not possible to limit loaders
to single select - multiselect. If user selects multiple repres, list
for all of them is provided, but selection is only single file.
This loader will be triggered multiple times, but selected name will
match only to proper path.
Loader doesnt do containerization as there is currently no data model
of 'frame of rendered files' (only rendered sequence), update would be
difficult.
"""
families = ["render"]
representations = ["*"]
options = []
def load(self, context, name=None, namespace=None, data=None):
if data.get("frame"):
self.fname = os.path.join(os.path.dirname(self.fname),
data["frame"])
if not os.path.exists(self.fname):
return
stub = photoshop.stub()
layer_name = get_unique_layer_name(stub.get_layers(),
context["asset"]["name"],
name)
with photoshop.maintained_selection():
layer = stub.import_smart_object(self.fname, layer_name)
self[:] = [layer]
namespace = namespace or layer_name
return namespace
@classmethod
def get_options(cls, repre_contexts):
"""
Returns list of files for selected 'repre_contexts'.
It returns only files with same extension as in context as it is
expected that context points to sequence of frames.
Returns:
(list) of qargparse.Choice
"""
files = []
for context in repre_contexts:
fname = get_representation_path_from_context(context)
_, file_extension = os.path.splitext(fname)
for file_name in os.listdir(os.path.dirname(fname)):
if not file_name.endswith(file_extension):
continue
files.append(file_name)
# return selection only if there is something
if not files or len(files) <= 1:
return []
return [
qargparse.Choice(
"frame",
label="Select specific file",
items=files,
default=0,
help="Which frame should be loaded?"
)
]
def update(self, container, representation):
"""No update possible, not containerized."""
pass
def remove(self, container):
"""No update possible, not containerized."""
pass

View file

@ -1,5 +1,4 @@
import os from avalon import api
import pyblish.api import pyblish.api
import openpype.api import openpype.api
from avalon import photoshop from avalon import photoshop
@ -27,12 +26,20 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
for instance in instances: for instance in instances:
data = stub.read(instance[0]) data = stub.read(instance[0])
data["asset"] = os.environ["AVALON_ASSET"] data["asset"] = api.Session["AVALON_ASSET"]
stub.imprint(instance[0], data) stub.imprint(instance[0], data)
class ValidateInstanceAsset(pyblish.api.InstancePlugin): class ValidateInstanceAsset(pyblish.api.InstancePlugin):
"""Validate the instance asset is the current asset.""" """Validate the instance asset is the current selected context asset.
As it might happen that multiple worfiles are opened, switching
between them would mess with selected context.
In that case outputs might be output under wrong asset!
Repair action will use Context asset value (from Workfiles or Launcher)
Closing and reopening with Workfiles will refresh Context value.
"""
label = "Validate Instance Asset" label = "Validate Instance Asset"
hosts = ["photoshop"] hosts = ["photoshop"]
@ -41,9 +48,12 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
def process(self, instance): def process(self, instance):
instance_asset = instance.data["asset"] instance_asset = instance.data["asset"]
current_asset = os.environ["AVALON_ASSET"] current_asset = api.Session["AVALON_ASSET"]
msg = ( msg = (
"Instance asset is not the same as current asset:" f"Instance asset {instance_asset} is not the same "
f"\nInstance: {instance_asset}\nCurrent: {current_asset}" f"as current context {current_asset}. PLEASE DO:\n"
f"Repair with 'A' action to use '{current_asset}'.\n"
f"If that's not correct value, close workfile and "
f"reopen via Workfiles!"
) )
assert instance_asset == current_asset, msg assert instance_asset == current_asset, msg

View file

@ -34,7 +34,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
# presets # presets
batch_extensions = ["edl", "xml", "psd"] batch_extensions = ["edl", "xml", "psd"]
default_families = ["ftrack"]
def process(self, context): def process(self, context):
# get json paths from os and load them # get json paths from os and load them
@ -213,10 +212,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
subset = in_data["subset"] subset = in_data["subset"]
# If instance data already contain families then use it # If instance data already contain families then use it
instance_families = in_data.get("families") or [] instance_families = in_data.get("families") or []
# Make sure default families are in instance
for default_family in self.default_families or []:
if default_family not in instance_families:
instance_families.append(default_family)
instance = context.create_instance(subset) instance = context.create_instance(subset)
instance.data.update( instance.data.update(

View file

@ -2,7 +2,7 @@
Optional: Optional:
presets -> extensions ( presets -> extensions (
example of use: example of use:
[".mov", ".mp4"] ["mov", "mp4"]
) )
presets -> source_dir ( presets -> source_dir (
example of use: example of use:
@ -11,6 +11,7 @@ Optional:
"{root[work]}/{project[name]}/inputs" "{root[work]}/{project[name]}/inputs"
"./input" "./input"
"../input" "../input"
""
) )
""" """
@ -48,7 +49,7 @@ class CollectEditorial(pyblish.api.InstancePlugin):
actions = [] actions = []
# presets # presets
extensions = [".mov", ".mp4"] extensions = ["mov", "mp4"]
source_dir = None source_dir = None
def process(self, instance): def process(self, instance):
@ -72,7 +73,7 @@ class CollectEditorial(pyblish.api.InstancePlugin):
video_path = None video_path = None
basename = os.path.splitext(os.path.basename(file_path))[0] basename = os.path.splitext(os.path.basename(file_path))[0]
if self.source_dir: if self.source_dir != "":
source_dir = self.source_dir.replace("\\", "/") source_dir = self.source_dir.replace("\\", "/")
if ("./" in source_dir) or ("../" in source_dir): if ("./" in source_dir) or ("../" in source_dir):
# get current working dir # get current working dir
@ -98,7 +99,7 @@ class CollectEditorial(pyblish.api.InstancePlugin):
if os.path.splitext(f)[0] not in basename: if os.path.splitext(f)[0] not in basename:
continue continue
# filter out by respected extensions # filter out by respected extensions
if os.path.splitext(f)[1] not in self.extensions: if os.path.splitext(f)[1][1:] not in self.extensions:
continue continue
video_path = os.path.join( video_path = os.path.join(
staging_dir, f staging_dir, f

View file

@ -8,7 +8,7 @@ class CollectInstances(pyblish.api.InstancePlugin):
"""Collect instances from editorial's OTIO sequence""" """Collect instances from editorial's OTIO sequence"""
order = pyblish.api.CollectorOrder + 0.01 order = pyblish.api.CollectorOrder + 0.01
label = "Collect Instances" label = "Collect Editorial Instances"
hosts = ["standalonepublisher"] hosts = ["standalonepublisher"]
families = ["editorial"] families = ["editorial"]
@ -16,17 +16,13 @@ class CollectInstances(pyblish.api.InstancePlugin):
subsets = { subsets = {
"referenceMain": { "referenceMain": {
"family": "review", "family": "review",
"families": ["clip", "ftrack"], "families": ["clip"],
"extensions": [".mp4"] "extensions": ["mp4"]
}, },
"audioMain": { "audioMain": {
"family": "audio", "family": "audio",
"families": ["clip", "ftrack"], "families": ["clip"],
"extensions": [".wav"], "extensions": ["wav"],
},
"shotMain": {
"family": "shot",
"families": []
} }
} }
timeline_frame_start = 900000 # starndard edl default (10:00:00:00) timeline_frame_start = 900000 # starndard edl default (10:00:00:00)
@ -55,7 +51,7 @@ class CollectInstances(pyblish.api.InstancePlugin):
fps = plib.get_asset()["data"]["fps"] fps = plib.get_asset()["data"]["fps"]
tracks = timeline.each_child( tracks = timeline.each_child(
descended_from_type=otio.schema.track.Track descended_from_type=otio.schema.Track
) )
# get data from avalon # get data from avalon
@ -84,6 +80,9 @@ class CollectInstances(pyblish.api.InstancePlugin):
if clip.name is None: if clip.name is None:
continue continue
if isinstance(clip, otio.schema.Gap):
continue
# skip all generators like black ampty # skip all generators like black ampty
if isinstance( if isinstance(
clip.media_reference, clip.media_reference,
@ -92,7 +91,7 @@ class CollectInstances(pyblish.api.InstancePlugin):
# Transitions are ignored, because Clips have the full frame # Transitions are ignored, because Clips have the full frame
# range. # range.
if isinstance(clip, otio.schema.transition.Transition): if isinstance(clip, otio.schema.Transition):
continue continue
# basic unique asset name # basic unique asset name
@ -175,7 +174,16 @@ class CollectInstances(pyblish.api.InstancePlugin):
data_key: instance.data.get(data_key)}) data_key: instance.data.get(data_key)})
# adding subsets to context as instances # adding subsets to context as instances
self.subsets.update({
"shotMain": {
"family": "shot",
"families": []
}
})
for subset, properities in self.subsets.items(): for subset, properities in self.subsets.items():
if properities["version"] == 0:
properities.pop("version")
# adding Review-able instance # adding Review-able instance
subset_instance_data = instance_data.copy() subset_instance_data = instance_data.copy()
subset_instance_data.update(properities) subset_instance_data.update(properities)

View file

@ -11,7 +11,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin):
# must be after `CollectInstances` # must be after `CollectInstances`
order = pyblish.api.CollectorOrder + 0.011 order = pyblish.api.CollectorOrder + 0.011
label = "Collect Instance Resources" label = "Collect Editorial Resources"
hosts = ["standalonepublisher"] hosts = ["standalonepublisher"]
families = ["clip"] families = ["clip"]
@ -177,19 +177,23 @@ class CollectInstanceResources(pyblish.api.InstancePlugin):
collection_head_name = None collection_head_name = None
# loop trough collections and create representations # loop trough collections and create representations
for _collection in collections: for _collection in collections:
ext = _collection.tail ext = _collection.tail[1:]
collection_head_name = _collection.head collection_head_name = _collection.head
frame_start = list(_collection.indexes)[0] frame_start = list(_collection.indexes)[0]
frame_end = list(_collection.indexes)[-1] frame_end = list(_collection.indexes)[-1]
repre_data = { repre_data = {
"frameStart": frame_start, "frameStart": frame_start,
"frameEnd": frame_end, "frameEnd": frame_end,
"name": ext[1:], "name": ext,
"ext": ext[1:], "ext": ext,
"files": [item for item in _collection], "files": [item for item in _collection],
"stagingDir": staging_dir "stagingDir": staging_dir
} }
if instance_data.get("keepSequence"):
repre_data_keep = deepcopy(repre_data)
instance_data["representations"].append(repre_data_keep)
if "review" in instance_data["families"]: if "review" in instance_data["families"]:
repre_data.update({ repre_data.update({
"thumbnail": True, "thumbnail": True,
@ -208,20 +212,20 @@ class CollectInstanceResources(pyblish.api.InstancePlugin):
# loop trough reminders and create representations # loop trough reminders and create representations
for _reminding_file in remainder: for _reminding_file in remainder:
ext = os.path.splitext(_reminding_file)[-1] ext = os.path.splitext(_reminding_file)[-1][1:]
if ext not in instance_data["extensions"]: if ext not in instance_data["extensions"]:
continue continue
if collection_head_name and ( if collection_head_name and (
(collection_head_name + ext[1:]) not in _reminding_file (collection_head_name + ext) not in _reminding_file
) and (ext in [".mp4", ".mov"]): ) and (ext in ["mp4", "mov"]):
self.log.info(f"Skipping file: {_reminding_file}") self.log.info(f"Skipping file: {_reminding_file}")
continue continue
frame_start = 1 frame_start = 1
frame_end = 1 frame_end = 1
repre_data = { repre_data = {
"name": ext[1:], "name": ext,
"ext": ext[1:], "ext": ext,
"files": _reminding_file, "files": _reminding_file,
"stagingDir": staging_dir "stagingDir": staging_dir
} }

View file

@ -131,20 +131,21 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
tasks_to_add = dict() tasks_to_add = dict()
project_tasks = io.find_one({"type": "project"})["config"]["tasks"] project_tasks = io.find_one({"type": "project"})["config"]["tasks"]
for task_name, task_data in self.shot_add_tasks.items(): for task_name, task_data in self.shot_add_tasks.items():
try: _task_data = deepcopy(task_data)
if task_data["type"] in project_tasks.keys():
tasks_to_add.update({task_name: task_data}) # fixing enumerator from settings
else: _task_data["type"] = task_data["type"][0]
raise KeyError(
"Wrong FtrackTaskType `{}` for `{}` is not" # check if task type in project task types
" existing in `{}``".format( if _task_data["type"] in project_tasks.keys():
task_data["type"], tasks_to_add.update({task_name: _task_data})
task_name, else:
list(project_tasks.keys())))
except KeyError as error:
raise KeyError( raise KeyError(
"Wrong presets: `{0}`".format(error) "Wrong FtrackTaskType `{}` for `{}` is not"
) " existing in `{}``".format(
_task_data["type"],
task_name,
list(project_tasks.keys())))
instance.data["tasks"] = tasks_to_add instance.data["tasks"] = tasks_to_add
else: else:

View file

@ -1,29 +0,0 @@
"""
Requires:
Nothing
Provides:
Instance
"""
import pyblish.api
import logging
log = logging.getLogger("collector")
class CollectMatchmovePublish(pyblish.api.InstancePlugin):
"""
Collector with only one reason for its existence - remove 'ftrack'
family implicitly added by Standalone Publisher
"""
label = "Collect Matchmove - SA Publish"
order = pyblish.api.CollectorOrder
families = ["matchmove"]
hosts = ["standalonepublisher"]
def process(self, instance):
if "ftrack" in instance.data["families"]:
instance.data["families"].remove("ftrack")

View file

@ -1,6 +1,5 @@
import os import os
import tempfile import tempfile
import subprocess
import pyblish.api import pyblish.api
import openpype.api import openpype.api
import openpype.lib import openpype.lib
@ -67,7 +66,6 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
else: else:
# Convert to jpeg if not yet # Convert to jpeg if not yet
full_input_path = os.path.join(thumbnail_repre["stagingDir"], file) full_input_path = os.path.join(thumbnail_repre["stagingDir"], file)
full_input_path = '"{}"'.format(full_input_path)
self.log.info("input {}".format(full_input_path)) self.log.info("input {}".format(full_input_path))
full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1] full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1]
@ -77,30 +75,37 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
ffmpeg_args = self.ffmpeg_args or {} ffmpeg_args = self.ffmpeg_args or {}
jpeg_items = [] jpeg_items = [
jpeg_items.append("\"{}\"".format(ffmpeg_path)) "\"{}\"".format(ffmpeg_path),
# override file if already exists # override file if already exists
jpeg_items.append("-y") "-y"
]
# add input filters from peresets # add input filters from peresets
jpeg_items.extend(ffmpeg_args.get("input") or []) jpeg_items.extend(ffmpeg_args.get("input") or [])
# input file # input file
jpeg_items.append("-i {}".format(full_input_path)) jpeg_items.append("-i \"{}\"".format(full_input_path))
# extract only single file # extract only single file
jpeg_items.append("-vframes 1") jpeg_items.append("-frames:v 1")
# Add black background for transparent images
jpeg_items.append((
"-filter_complex"
" \"color=black,format=rgb24[c]"
";[c][0]scale2ref[c][i]"
";[c][i]overlay=format=auto:shortest=1,setsar=1\""
))
jpeg_items.extend(ffmpeg_args.get("output") or []) jpeg_items.extend(ffmpeg_args.get("output") or [])
# output file # output file
jpeg_items.append(full_thumbnail_path) jpeg_items.append("\"{}\"".format(full_thumbnail_path))
subprocess_jpeg = " ".join(jpeg_items) subprocess_jpeg = " ".join(jpeg_items)
# run subprocess # run subprocess
self.log.debug("Executing: {}".format(subprocess_jpeg)) self.log.debug("Executing: {}".format(subprocess_jpeg))
subprocess.Popen( openpype.api.run_subprocess(
subprocess_jpeg, subprocess_jpeg, shell=True, logger=self.log
stdout=subprocess.PIPE,
shell=True
) )
# remove thumbnail key from origin repre # remove thumbnail key from origin repre

View file

@ -43,7 +43,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
self.log.warning("Cannot check for extension {}".format(ext)) self.log.warning("Cannot check for extension {}".format(ext))
return return
frames = len(instance.data.get("representations", [None])[0]["files"]) files = instance.data.get("representations", [None])[0]["files"]
if isinstance(files, str):
files = [files]
frames = len(files)
err_msg = "Frame duration from DB:'{}' ". format(int(duration)) +\ err_msg = "Frame duration from DB:'{}' ". format(int(duration)) +\
" doesn't match number of files:'{}'".format(frames) +\ " doesn't match number of files:'{}'".format(frames) +\

View file

@ -58,18 +58,14 @@ class CreateRenderlayer(plugin.Creator):
# Get currently selected layers # Get currently selected layers
layers_data = lib.layers_data() layers_data = lib.layers_data()
group_ids = set() selected_layers = [
for layer in layers_data: layer
if layer["selected"]: for layer in layers_data
group_ids.add(layer["group_id"]) if layer["selected"]
]
# Return layer name if only one is selected # Return layer name if only one is selected
if len(group_ids) == 1: if len(selected_layers) == 1:
group_id = list(group_ids)[0] return selected_layers[0]["name"]
groups_data = lib.groups_data()
for group in groups_data:
if group["group_id"] == group_id:
return group["name"]
# Use defaults # Use defaults
if cls.defaults: if cls.defaults:

View file

@ -103,8 +103,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
instance.data["layers"] = copy.deepcopy( instance.data["layers"] = copy.deepcopy(
context.data["layersData"] context.data["layersData"]
) )
# Add ftrack family
instance.data["families"].append("ftrack")
elif family == "renderLayer": elif family == "renderLayer":
instance = self.create_render_layer_instance( instance = self.create_render_layer_instance(
@ -186,9 +184,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
instance_data["layers"] = group_layers instance_data["layers"] = group_layers
# Add ftrack family
instance_data["families"].append("ftrack")
return context.create_instance(**instance_data) return context.create_instance(**instance_data)
def create_render_pass_instance(self, context, instance_data): def create_render_pass_instance(self, context, instance_data):

View file

@ -1,5 +1,6 @@
import os import os
import json import json
import tempfile
import pyblish.api import pyblish.api
import avalon.api import avalon.api
@ -153,9 +154,45 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
"sceneMarkIn": int(mark_in_frame), "sceneMarkIn": int(mark_in_frame),
"sceneMarkInState": mark_in_state == "set", "sceneMarkInState": mark_in_state == "set",
"sceneMarkOut": int(mark_out_frame), "sceneMarkOut": int(mark_out_frame),
"sceneMarkOutState": mark_out_state == "set" "sceneMarkOutState": mark_out_state == "set",
"sceneBgColor": self._get_bg_color()
} }
self.log.debug( self.log.debug(
"Scene data: {}".format(json.dumps(scene_data, indent=4)) "Scene data: {}".format(json.dumps(scene_data, indent=4))
) )
context.data.update(scene_data) context.data.update(scene_data)
def _get_bg_color(self):
"""Background color set on scene.
Is important for review exporting where scene bg color is used as
background.
"""
output_file = tempfile.NamedTemporaryFile(
mode="w", prefix="a_tvp_", suffix=".txt", delete=False
)
output_file.close()
output_filepath = output_file.name.replace("\\", "/")
george_script_lines = [
# Variable containing full path to output file
"output_path = \"{}\"".format(output_filepath),
"tv_background",
"bg_color = result",
# Write data to output file
(
"tv_writetextfile"
" \"strict\" \"append\" '\"'output_path'\"' bg_color"
)
]
george_script = "\n".join(george_script_lines)
lib.execute_george_through_file(george_script)
with open(output_filepath, "r") as stream:
data = stream.read()
os.remove(output_filepath)
data = data.strip()
if not data:
return None
return data.split(" ")

View file

@ -1,5 +1,6 @@
import os import os
import shutil import shutil
import copy
import tempfile import tempfile
import pyblish.api import pyblish.api
@ -13,6 +14,9 @@ class ExtractSequence(pyblish.api.Extractor):
hosts = ["tvpaint"] hosts = ["tvpaint"]
families = ["review", "renderPass", "renderLayer"] families = ["review", "renderPass", "renderLayer"]
# Modifiable with settings
review_bg = [255, 255, 255, 255]
def process(self, instance): def process(self, instance):
self.log.info( self.log.info(
"* Processing instance \"{}\"".format(instance.data["label"]) "* Processing instance \"{}\"".format(instance.data["label"])
@ -53,6 +57,8 @@ class ExtractSequence(pyblish.api.Extractor):
handle_start = instance.context.data["handleStart"] handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"] handle_end = instance.context.data["handleEnd"]
scene_bg_color = instance.context.data["sceneBgColor"]
# --- Fallbacks ---------------------------------------------------- # --- Fallbacks ----------------------------------------------------
# This is required if validations of ranges are ignored. # This is required if validations of ranges are ignored.
# - all of this code won't change processing if range to render # - all of this code won't change processing if range to render
@ -120,7 +126,8 @@ class ExtractSequence(pyblish.api.Extractor):
if instance.data["family"] == "review": if instance.data["family"] == "review":
output_filenames, thumbnail_fullpath = self.render_review( output_filenames, thumbnail_fullpath = self.render_review(
filename_template, output_dir, mark_in, mark_out filename_template, output_dir, mark_in, mark_out,
scene_bg_color
) )
else: else:
# Render output # Render output
@ -241,7 +248,9 @@ class ExtractSequence(pyblish.api.Extractor):
for path in repre_filepaths for path in repre_filepaths
] ]
def render_review(self, filename_template, output_dir, mark_in, mark_out): def render_review(
self, filename_template, output_dir, mark_in, mark_out, scene_bg_color
):
""" Export images from TVPaint using `tv_savesequence` command. """ Export images from TVPaint using `tv_savesequence` command.
Args: Args:
@ -252,6 +261,8 @@ class ExtractSequence(pyblish.api.Extractor):
output_dir (str): Directory where files will be stored. output_dir (str): Directory where files will be stored.
mark_in (int): Starting frame index from which export will begin. mark_in (int): Starting frame index from which export will begin.
mark_out (int): On which frame index export will end. mark_out (int): On which frame index export will end.
scene_bg_color (list): Bg color set in scene. Result of george
script command `tv_background`.
Retruns: Retruns:
tuple: With 2 items first is list of filenames second is path to tuple: With 2 items first is list of filenames second is path to
@ -263,7 +274,11 @@ class ExtractSequence(pyblish.api.Extractor):
filename_template.format(frame=mark_in) filename_template.format(frame=mark_in)
) )
bg_color = self._get_review_bg_color()
george_script_lines = [ george_script_lines = [
# Change bg color to color from settings
"tv_background \"color\" {} {} {}".format(*bg_color),
"tv_SaveMode \"PNG\"", "tv_SaveMode \"PNG\"",
"export_path = \"{}\"".format( "export_path = \"{}\"".format(
first_frame_filepath.replace("\\", "/") first_frame_filepath.replace("\\", "/")
@ -272,6 +287,18 @@ class ExtractSequence(pyblish.api.Extractor):
mark_in, mark_out mark_in, mark_out
) )
] ]
if scene_bg_color:
# Change bg color back to previous scene bg color
_scene_bg_color = copy.deepcopy(scene_bg_color)
bg_type = _scene_bg_color.pop(0)
orig_color_command = [
"tv_background",
"\"{}\"".format(bg_type)
]
orig_color_command.extend(_scene_bg_color)
george_script_lines.append(" ".join(orig_color_command))
lib.execute_george_through_file("\n".join(george_script_lines)) lib.execute_george_through_file("\n".join(george_script_lines))
first_frame_filepath = None first_frame_filepath = None
@ -291,12 +318,13 @@ class ExtractSequence(pyblish.api.Extractor):
if first_frame_filepath is None: if first_frame_filepath is None:
first_frame_filepath = filepath first_frame_filepath = filepath
thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") thumbnail_filepath = None
if first_frame_filepath and os.path.exists(first_frame_filepath): if first_frame_filepath and os.path.exists(first_frame_filepath):
thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg")
source_img = Image.open(first_frame_filepath) source_img = Image.open(first_frame_filepath)
thumbnail_obj = Image.new("RGB", source_img.size, (255, 255, 255)) if source_img.mode.lower() != "rgb":
thumbnail_obj.paste(source_img) source_img = source_img.convert("RGB")
thumbnail_obj.save(thumbnail_filepath) source_img.save(thumbnail_filepath)
return output_filenames, thumbnail_filepath return output_filenames, thumbnail_filepath
@ -392,12 +420,35 @@ class ExtractSequence(pyblish.api.Extractor):
if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath): if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath):
source_img = Image.open(thumbnail_src_filepath) source_img = Image.open(thumbnail_src_filepath)
thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg")
thumbnail_obj = Image.new("RGB", source_img.size, (255, 255, 255)) # Composite background only on rgba images
thumbnail_obj.paste(source_img) # - just making sure
thumbnail_obj.save(thumbnail_filepath) if source_img.mode.lower() == "rgba":
bg_color = self._get_review_bg_color()
self.log.debug("Adding thumbnail background color {}.".format(
" ".join([str(val) for val in bg_color])
))
bg_image = Image.new("RGBA", source_img.size, bg_color)
thumbnail_obj = Image.alpha_composite(bg_image, source_img)
thumbnail_obj.convert("RGB").save(thumbnail_filepath)
else:
self.log.info((
"Source for thumbnail has mode \"{}\" (Expected: RGBA)."
" Can't use thubmanail background color."
).format(source_img.mode))
source_img.save(thumbnail_filepath)
return output_filenames, thumbnail_filepath return output_filenames, thumbnail_filepath
def _get_review_bg_color(self):
red = green = blue = 255
if self.review_bg:
if len(self.review_bg) == 4:
red, green, blue, _ = self.review_bg
elif len(self.review_bg) == 3:
red, green, blue = self.review_bg
return (red, green, blue)
def _render_layer( def _render_layer(
self, self,
layer, layer,

View file

@ -0,0 +1,9 @@
## Unreal Integration
Supported Unreal Engine version is 4.26+ (mainly because of major Python changes done there).
### Project naming
Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are
invalid. If OpenPype detects such name it automatically prepends letter **P** to make it valid name, so `123_myProject`
will become `P123_myProject`. There is also soft-limit on project name length to be shorter than 20 characters.
Longer names will issue warning in Unreal Editor that there might be possible side effects.

View file

@ -1,38 +1,51 @@
# -*- coding: utf-8 -*-
"""Unreal launching and project tools."""
import sys import sys
import os import os
import platform import platform
import json import json
from distutils import dir_util from distutils import dir_util
import subprocess import subprocess
import re
from pathlib import Path
from collections import OrderedDict
from openpype.api import get_project_settings from openpype.api import get_project_settings
def get_engine_versions(): def get_engine_versions(env=None):
""" """Detect Unreal Engine versions.
This will try to detect location and versions of installed Unreal Engine. This will try to detect location and versions of installed Unreal Engine.
Location can be overridden by `UNREAL_ENGINE_LOCATION` environment Location can be overridden by `UNREAL_ENGINE_LOCATION` environment
variable. variable.
Returns: Args:
env (dict, optional): Environment to use.
dict: dictionary with version as a key and dir as value. Returns:
OrderedDict: dictionary with version as a key and dir as value.
so the highest version is first.
Example: Example:
>>> get_engine_versions()
>>> get_engine_version()
{ {
"4.23": "C:/Epic Games/UE_4.23", "4.23": "C:/Epic Games/UE_4.23",
"4.24": "C:/Epic Games/UE_4.24" "4.24": "C:/Epic Games/UE_4.24"
} }
"""
try:
engine_locations = {}
root, dirs, files = next(os.walk(os.environ["UNREAL_ENGINE_LOCATION"]))
for dir in dirs: """
if dir.startswith("UE_"): env = env or os.environ
ver = dir.split("_")[1] engine_locations = {}
engine_locations[ver] = os.path.join(root, dir) try:
root, dirs, _ = next(os.walk(env["UNREAL_ENGINE_LOCATION"]))
for directory in dirs:
if directory.startswith("UE"):
try:
ver = re.split(r"[-_]", directory)[1]
except IndexError:
continue
engine_locations[ver] = os.path.join(root, directory)
except KeyError: except KeyError:
# environment variable not set # environment variable not set
pass pass
@ -40,32 +53,52 @@ def get_engine_versions():
# specified directory doesn't exists # specified directory doesn't exists
pass pass
# if we've got something, terminate autodetection process # if we've got something, terminate auto-detection process
if engine_locations: if engine_locations:
return engine_locations return OrderedDict(sorted(engine_locations.items()))
# else kick in platform specific detection # else kick in platform specific detection
if platform.system().lower() == "windows": if platform.system().lower() == "windows":
return _win_get_engine_versions() return OrderedDict(sorted(_win_get_engine_versions().items()))
elif platform.system().lower() == "linux": if platform.system().lower() == "linux":
# on linux, there is no installation and getting Unreal Engine involves # on linux, there is no installation and getting Unreal Engine involves
# git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`. # git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`.
pass pass
elif platform.system().lower() == "darwin": if platform.system().lower() == "darwin":
return _darwin_get_engine_version() return OrderedDict(sorted(_darwin_get_engine_version().items()))
return {} return OrderedDict()
def get_editor_executable_path(engine_path: Path) -> Path:
"""Get UE4 Editor executable path."""
ue4_path = engine_path / "Engine/Binaries"
if platform.system().lower() == "windows":
ue4_path /= "Win64/UE4Editor.exe"
elif platform.system().lower() == "linux":
ue4_path /= "Linux/UE4Editor"
elif platform.system().lower() == "darwin":
ue4_path /= "Mac/UE4Editor"
return ue4_path
def _win_get_engine_versions(): def _win_get_engine_versions():
""" """Get Unreal Engine versions on Windows.
If engines are installed via Epic Games Launcher then there is: If engines are installed via Epic Games Launcher then there is:
`%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat` `%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat`
This file is JSON file listing installed stuff, Unreal engines This file is JSON file listing installed stuff, Unreal engines
are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24` are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24`
Returns:
dict: version as a key and path as a value.
""" """
install_json_path = os.path.join( install_json_path = os.path.join(
os.environ.get("PROGRAMDATA"), os.getenv("PROGRAMDATA"),
"Epic", "Epic",
"UnrealEngineLauncher", "UnrealEngineLauncher",
"LauncherInstalled.dat", "LauncherInstalled.dat",
@ -75,11 +108,19 @@ def _win_get_engine_versions():
def _darwin_get_engine_version() -> dict: def _darwin_get_engine_version() -> dict:
""" """Get Unreal Engine versions on MacOS.
It works the same as on Windows, just JSON file location is different. It works the same as on Windows, just JSON file location is different.
Returns:
dict: version as a key and path as a value.
See Aslo:
:func:`_win_get_engine_versions`.
""" """
install_json_path = os.path.join( install_json_path = os.path.join(
os.environ.get("HOME"), os.getenv("HOME"),
"Library", "Library",
"Application Support", "Application Support",
"Epic", "Epic",
@ -91,25 +132,26 @@ def _darwin_get_engine_version() -> dict:
def _parse_launcher_locations(install_json_path: str) -> dict: def _parse_launcher_locations(install_json_path: str) -> dict:
""" """This will parse locations from json file.
This will parse locations from json file.
Args:
install_json_path (str): Path to `LauncherInstalled.dat`.
Returns:
dict: with unreal engine versions as keys and
paths to those engine installations as value.
:param install_json_path: path to `LauncherInstalled.dat`
:type install_json_path: str
:returns: returns dict with unreal engine versions as keys and
paths to those engine installations as value.
:rtype: dict
""" """
engine_locations = {} engine_locations = {}
if os.path.isfile(install_json_path): if os.path.isfile(install_json_path):
with open(install_json_path, "r") as ilf: with open(install_json_path, "r") as ilf:
try: try:
install_data = json.load(ilf) install_data = json.load(ilf)
except json.JSONDecodeError: except json.JSONDecodeError as e:
raise Exception( raise Exception(
"Invalid `LauncherInstalled.dat file. `" "Invalid `LauncherInstalled.dat file. `"
"Cannot determine Unreal Engine location." "Cannot determine Unreal Engine location."
) ) from e
for installation in install_data.get("InstallationList", []): for installation in install_data.get("InstallationList", []):
if installation.get("AppName").startswith("UE_"): if installation.get("AppName").startswith("UE_"):
@ -121,55 +163,91 @@ def _parse_launcher_locations(install_json_path: str) -> dict:
def create_unreal_project(project_name: str, def create_unreal_project(project_name: str,
ue_version: str, ue_version: str,
pr_dir: str, pr_dir: Path,
engine_path: str, engine_path: Path,
dev_mode: bool = False) -> None: dev_mode: bool = False,
""" env: dict = None) -> None:
This will create `.uproject` file at specified location. As there is no """This will create `.uproject` file at specified location.
way I know to create project via command line, this is easiest option.
Unreal project file is basically JSON file. If we find As there is no way I know to create project via command line, this is
easiest option. Unreal project file is basically JSON file. If we find
`AVALON_UNREAL_PLUGIN` environment variable we assume this is location `AVALON_UNREAL_PLUGIN` environment variable we assume this is location
of Avalon Integration Plugin and we copy its content to project folder of Avalon Integration Plugin and we copy its content to project folder
and enable this plugin. and enable this plugin.
:param project_name: project name Args:
:type project_name: str project_name (str): Name of the project.
:param ue_version: unreal engine version (like 4.23) ue_version (str): Unreal engine version (like 4.23).
:type ue_version: str pr_dir (Path): Path to directory where project will be created.
:param pr_dir: path to directory where project will be created engine_path (Path): Path to Unreal Engine installation.
:type pr_dir: str dev_mode (bool, optional): Flag to trigger C++ style Unreal project
:param engine_path: Path to Unreal Engine installation needing Visual Studio and other tools to compile plugins from
:type engine_path: str sources. This will trigger automatically if `Binaries`
:param dev_mode: Flag to trigger C++ style Unreal project needing directory is not found in plugin folders as this indicates
Visual Studio and other tools to compile plugins from this is only source distribution of the plugin. Dev mode
sources. This will trigger automatically if `Binaries` is also set by preset file `unreal/project_setup.json` in
directory is not found in plugin folders as this indicates **OPENPYPE_CONFIG**.
this is only source distribution of the plugin. Dev mode env (dict, optional): Environment to use. If not set, `os.environ`.
is also set by preset file `unreal/project_setup.json` in
**OPENPYPE_CONFIG**.
:type dev_mode: bool
:returns: None
"""
preset = get_project_settings(project_name)["unreal"]["project_setup"]
if os.path.isdir(os.environ.get("AVALON_UNREAL_PLUGIN", "")): Throws:
NotImplementedError: For unsupported platforms.
Returns:
None
"""
env = env or os.environ
preset = get_project_settings(project_name)["unreal"]["project_setup"]
ue_id = ".".join(ue_version.split(".")[:2])
# get unreal engine identifier
# -------------------------------------------------------------------------
# FIXME (antirotor): As of 4.26 this is problem with UE4 built from
# sources. In that case Engine ID is calculated per machine/user and not
# from Engine files as this code then reads. This then prevents UE4
# to directly open project as it will complain about project being
# created in different UE4 version. When user convert such project
# to his UE4 version, Engine ID is replaced in uproject file. If some
# other user tries to open it, it will present him with similar error.
ue4_modules = Path()
if platform.system().lower() == "windows":
ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
"Win64", "UE4Editor.modules"))
if platform.system().lower() == "linux":
ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
"Linux", "UE4Editor.modules"))
if platform.system().lower() == "darwin":
ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
"Mac", "UE4Editor.modules"))
if ue4_modules.exists():
print("--- Loading Engine ID from modules file ...")
with open(ue4_modules, "r") as mp:
loaded_modules = json.load(mp)
if loaded_modules.get("BuildId"):
ue_id = "{" + loaded_modules.get("BuildId") + "}"
plugins_path = None
if os.path.isdir(env.get("AVALON_UNREAL_PLUGIN", "")):
# copy plugin to correct path under project # copy plugin to correct path under project
plugins_path = os.path.join(pr_dir, "Plugins") plugins_path = pr_dir / "Plugins"
avalon_plugin_path = os.path.join(plugins_path, "Avalon") avalon_plugin_path = plugins_path / "Avalon"
if not os.path.isdir(avalon_plugin_path): if not avalon_plugin_path.is_dir():
os.makedirs(avalon_plugin_path, exist_ok=True) avalon_plugin_path.mkdir(parents=True, exist_ok=True)
dir_util._path_created = {} dir_util._path_created = {}
dir_util.copy_tree(os.environ.get("AVALON_UNREAL_PLUGIN"), dir_util.copy_tree(os.environ.get("AVALON_UNREAL_PLUGIN"),
avalon_plugin_path) avalon_plugin_path.as_posix())
if (not os.path.isdir(os.path.join(avalon_plugin_path, "Binaries")) if not (avalon_plugin_path / "Binaries").is_dir() \
or not os.path.join(avalon_plugin_path, "Intermediate")): or not (avalon_plugin_path / "Intermediate").is_dir():
dev_mode = True dev_mode = True
# data for project file # data for project file
data = { data = {
"FileVersion": 3, "FileVersion": 3,
"EngineAssociation": ue_version, "EngineAssociation": ue_id,
"Category": "", "Category": "",
"Description": "", "Description": "",
"Plugins": [ "Plugins": [
@ -179,35 +257,6 @@ def create_unreal_project(project_name: str,
] ]
} }
if preset["install_unreal_python_engine"]:
# If `OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN` is set, copy it from there
# to support offline installation.
# Otherwise clone UnrealEnginePython to Plugins directory
# https://github.com/20tab/UnrealEnginePython.git
uep_path = os.path.join(plugins_path, "UnrealEnginePython")
if os.environ.get("OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN"):
os.makedirs(uep_path, exist_ok=True)
dir_util._path_created = {}
dir_util.copy_tree(
os.environ.get("OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN"),
uep_path)
else:
# WARNING: this will trigger dev_mode, because we need to compile
# this plugin.
dev_mode = True
import git
git.Repo.clone_from(
"https://github.com/20tab/UnrealEnginePython.git",
uep_path)
data["Plugins"].append(
{"Name": "UnrealEnginePython", "Enabled": True})
if (not os.path.isdir(os.path.join(uep_path, "Binaries"))
or not os.path.join(uep_path, "Intermediate")):
dev_mode = True
if dev_mode or preset["dev_mode"]: if dev_mode or preset["dev_mode"]:
# this will add project module and necessary source file to make it # this will add project module and necessary source file to make it
# C++ project and to (hopefully) make Unreal Editor to compile all # C++ project and to (hopefully) make Unreal Editor to compile all
@ -220,51 +269,39 @@ def create_unreal_project(project_name: str,
"AdditionalDependencies": ["Engine"], "AdditionalDependencies": ["Engine"],
}] }]
if preset["install_unreal_python_engine"]:
# now we need to fix python path in:
# `UnrealEnginePython.Build.cs`
# to point to our python
with open(os.path.join(
uep_path, "Source",
"UnrealEnginePython",
"UnrealEnginePython.Build.cs"), mode="r") as f:
build_file = f.read()
fix = build_file.replace(
'private string pythonHome = "";',
'private string pythonHome = "{}";'.format(
sys.base_prefix.replace("\\", "/")))
with open(os.path.join(
uep_path, "Source",
"UnrealEnginePython",
"UnrealEnginePython.Build.cs"), mode="w") as f:
f.write(fix)
# write project file # write project file
project_file = os.path.join(pr_dir, "{}.uproject".format(project_name)) project_file = pr_dir / f"{project_name}.uproject"
with open(project_file, mode="w") as pf: with open(project_file, mode="w") as pf:
json.dump(data, pf, indent=4) json.dump(data, pf, indent=4)
# UE < 4.26 have Python2 by default, so we need PySide # ensure we have PySide2 installed in engine
# but we will not need it in 4.26 and up python_path = None
if int(ue_version.split(".")[1]) < 26: if platform.system().lower() == "windows":
# ensure we have PySide installed in engine python_path = engine_path / ("Engine/Binaries/ThirdParty/"
# TODO: make it work for other platforms 🍎 🐧 "Python3/Win64/pythonw.exe")
if platform.system().lower() == "windows":
python_path = os.path.join(engine_path, "Engine", "Binaries",
"ThirdParty", "Python", "Win64",
"python.exe")
subprocess.run([python_path, "-m", if platform.system().lower() == "linux":
"pip", "install", "pyside"]) python_path = engine_path / ("Engine/Binaries/ThirdParty/"
"Python3/Linux/bin/python3")
if platform.system().lower() == "darwin":
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
"Python3/Mac/bin/python3")
if not python_path:
raise NotImplementedError("Unsupported platform")
if not python_path.exists():
raise RuntimeError(f"Unreal Python not found at {python_path}")
subprocess.run(
[python_path.as_posix(), "-m", "pip", "install", "pyside2"])
if dev_mode or preset["dev_mode"]: if dev_mode or preset["dev_mode"]:
_prepare_cpp_project(project_file, engine_path) _prepare_cpp_project(project_file, engine_path)
def _prepare_cpp_project(project_file: str, engine_path: str) -> None: def _prepare_cpp_project(project_file: Path, engine_path: Path) -> None:
""" """Prepare CPP Unreal Project.
This function will add source files needed for project to be This function will add source files needed for project to be
rebuild along with the avalon integration plugin. rebuild along with the avalon integration plugin.
@ -273,19 +310,19 @@ def _prepare_cpp_project(project_file: str, engine_path: str) -> None:
by some generator. This needs more research as manually writing by some generator. This needs more research as manually writing
those files is rather hackish. :skull_and_crossbones: those files is rather hackish. :skull_and_crossbones:
:param project_file: path to .uproject file
:type project_file: str Args:
:param engine_path: path to unreal engine associated with project project_file (str): Path to .uproject file.
:type engine_path: str engine_path (str): Path to unreal engine associated with project.
""" """
project_name = project_file.stem
project_dir = project_file.parent
targets_dir = project_dir / "Source"
sources_dir = targets_dir / project_name
project_name = os.path.splitext(os.path.basename(project_file))[0] sources_dir.mkdir(parents=True, exist_ok=True)
project_dir = os.path.dirname(project_file) (project_dir / "Content").mkdir(parents=True, exist_ok=True)
targets_dir = os.path.join(project_dir, "Source")
sources_dir = os.path.join(targets_dir, project_name)
os.makedirs(sources_dir, exist_ok=True)
os.makedirs(os.path.join(project_dir, "Content"), exist_ok=True)
module_target = ''' module_target = '''
using UnrealBuildTool; using UnrealBuildTool;
@ -360,59 +397,59 @@ class {1}_API A{0}GameModeBase : public AGameModeBase
}}; }};
'''.format(project_name, project_name.upper()) '''.format(project_name, project_name.upper())
with open(os.path.join( with open(targets_dir / f"{project_name}.Target.cs", mode="w") as f:
targets_dir, f"{project_name}.Target.cs"), mode="w") as f:
f.write(module_target) f.write(module_target)
with open(os.path.join( with open(targets_dir / f"{project_name}Editor.Target.cs", mode="w") as f:
targets_dir, f"{project_name}Editor.Target.cs"), mode="w") as f:
f.write(editor_module_target) f.write(editor_module_target)
with open(os.path.join( with open(sources_dir / f"{project_name}.Build.cs", mode="w") as f:
sources_dir, f"{project_name}.Build.cs"), mode="w") as f:
f.write(module_build) f.write(module_build)
with open(os.path.join( with open(sources_dir / f"{project_name}.cpp", mode="w") as f:
sources_dir, f"{project_name}.cpp"), mode="w") as f:
f.write(module_cpp) f.write(module_cpp)
with open(os.path.join( with open(sources_dir / f"{project_name}.h", mode="w") as f:
sources_dir, f"{project_name}.h"), mode="w") as f:
f.write(module_header) f.write(module_header)
with open(os.path.join( with open(sources_dir / f"{project_name}GameModeBase.cpp", mode="w") as f:
sources_dir, f"{project_name}GameModeBase.cpp"), mode="w") as f:
f.write(game_mode_cpp) f.write(game_mode_cpp)
with open(os.path.join( with open(sources_dir / f"{project_name}GameModeBase.h", mode="w") as f:
sources_dir, f"{project_name}GameModeBase.h"), mode="w") as f:
f.write(game_mode_h) f.write(game_mode_h)
u_build_tool = Path(
engine_path / "Engine/Binaries/DotNET/UnrealBuildTool.exe")
u_header_tool = None
arch = "Win64"
if platform.system().lower() == "windows": if platform.system().lower() == "windows":
u_build_tool = (f"{engine_path}/Engine/Binaries/DotNET/" arch = "Win64"
"UnrealBuildTool.exe") u_header_tool = Path(
u_header_tool = (f"{engine_path}/Engine/Binaries/Win64/" engine_path / "Engine/Binaries/Win64/UnrealHeaderTool.exe")
f"UnrealHeaderTool.exe")
elif platform.system().lower() == "linux": elif platform.system().lower() == "linux":
# WARNING: there is no UnrealBuildTool on linux? arch = "Linux"
u_build_tool = "" u_header_tool = Path(
u_header_tool = "" engine_path / "Engine/Binaries/Linux/UnrealHeaderTool")
elif platform.system().lower() == "darwin": elif platform.system().lower() == "darwin":
# WARNING: there is no UnrealBuildTool on Mac? # we need to test this out
u_build_tool = "" arch = "Mac"
u_header_tool = "" u_header_tool = Path(
engine_path / "Engine/Binaries/Mac/UnrealHeaderTool")
u_build_tool = u_build_tool.replace("\\", "/") if not u_header_tool:
u_header_tool = u_header_tool.replace("\\", "/") raise NotImplementedError("Unsupported platform")
command1 = [u_build_tool, "-projectfiles", f"-project={project_file}", command1 = [u_build_tool.as_posix(), "-projectfiles",
"-progress"] f"-project={project_file}", "-progress"]
subprocess.run(command1) subprocess.run(command1)
command2 = [u_build_tool, f"-ModuleWithSuffix={project_name},3555" command2 = [u_build_tool.as_posix(),
"Win64", "Development", "-TargetType=Editor" f"-ModuleWithSuffix={project_name},3555", arch,
f'-Project="{project_file}"', f'"{project_file}"' "Development", "-TargetType=Editor",
f'-Project={project_file}',
f'{project_file}',
"-IgnoreJunk"] "-IgnoreJunk"]
subprocess.run(command2) subprocess.run(command2)

View file

@ -1,31 +1,49 @@
# -*- coding: utf-8 -*-
"""Hook to launch Unreal and prepare projects."""
import os import os
from pathlib import Path
from openpype.lib import ( from openpype.lib import (
PreLaunchHook, PreLaunchHook,
ApplicationLaunchFailed ApplicationLaunchFailed,
ApplicationNotFound
) )
from openpype.hosts.unreal.api import lib as unreal_lib from openpype.hosts.unreal.api import lib as unreal_lib
class UnrealPrelaunchHook(PreLaunchHook): class UnrealPrelaunchHook(PreLaunchHook):
""" """Hook to handle launching Unreal.
This hook will check if current workfile path has Unreal This hook will check if current workfile path has Unreal
project inside. IF not, it initialize it and finally it pass project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Unreal launcher path to the project by environment variable to Unreal launcher
shell script. shell script.
"""
"""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.signature = "( {} )".format(self.__class__.__name__) self.signature = "( {} )".format(self.__class__.__name__)
def execute(self): def execute(self):
"""Hook entry method."""
asset_name = self.data["asset_name"] asset_name = self.data["asset_name"]
task_name = self.data["task_name"] task_name = self.data["task_name"]
workdir = self.launch_context.env["AVALON_WORKDIR"] workdir = self.launch_context.env["AVALON_WORKDIR"]
engine_version = self.app_name.split("/")[-1].replace("-", ".") engine_version = self.app_name.split("/")[-1].replace("-", ".")
unreal_project_name = f"{asset_name}_{task_name}" unreal_project_name = f"{asset_name}_{task_name}"
try:
if int(engine_version.split(".")[0]) < 4 and \
int(engine_version.split(".")[1]) < 26:
raise ApplicationLaunchFailed((
f"{self.signature} Old unsupported version of UE4 "
f"detected - {engine_version}"))
except ValueError:
# there can be string in minor version and in that case
# int cast is failing. This probably happens only with
# early access versions and is of no concert for this check
# so lets keep it quite.
...
# Unreal is sensitive about project names longer then 20 chars # Unreal is sensitive about project names longer then 20 chars
if len(unreal_project_name) > 20: if len(unreal_project_name) > 20:
@ -45,19 +63,21 @@ class UnrealPrelaunchHook(PreLaunchHook):
)) ))
unreal_project_name = f"P{unreal_project_name}" unreal_project_name = f"P{unreal_project_name}"
project_path = os.path.join(workdir, unreal_project_name) project_path = Path(os.path.join(workdir, unreal_project_name))
self.log.info(( self.log.info((
f"{self.signature} requested UE4 version: " f"{self.signature} requested UE4 version: "
f"[ {engine_version} ]" f"[ {engine_version} ]"
)) ))
detected = unreal_lib.get_engine_versions() detected = unreal_lib.get_engine_versions(self.launch_context.env)
detected_str = ', '.join(detected.keys()) or 'none' detected_str = ', '.join(detected.keys()) or 'none'
self.log.info(( self.log.info((
f"{self.signature} detected UE4 versions: " f"{self.signature} detected UE4 versions: "
f"[ {detected_str} ]" f"[ {detected_str} ]"
)) ))
if not detected:
raise ApplicationNotFound("No Unreal Engines are found.")
engine_version = ".".join(engine_version.split(".")[:2]) engine_version = ".".join(engine_version.split(".")[:2])
if engine_version not in detected.keys(): if engine_version not in detected.keys():
@ -66,13 +86,14 @@ class UnrealPrelaunchHook(PreLaunchHook):
f"detected [ {engine_version} ]" f"detected [ {engine_version} ]"
)) ))
os.makedirs(project_path, exist_ok=True) ue4_path = unreal_lib.get_editor_executable_path(
Path(detected[engine_version]))
project_file = os.path.join( self.launch_context.launch_args.append(ue4_path.as_posix())
project_path, project_path.mkdir(parents=True, exist_ok=True)
f"{unreal_project_name}.uproject"
) project_file = project_path / f"{unreal_project_name}.uproject"
if not os.path.isfile(project_file): if not project_file.is_file():
engine_path = detected[engine_version] engine_path = detected[engine_version]
self.log.info(( self.log.info((
f"{self.signature} creating unreal " f"{self.signature} creating unreal "
@ -88,8 +109,9 @@ class UnrealPrelaunchHook(PreLaunchHook):
unreal_project_name, unreal_project_name,
engine_version, engine_version,
project_path, project_path,
engine_path=engine_path engine_path=Path(engine_path)
) )
# Append project file to launch arguments # Append project file to launch arguments
self.launch_context.launch_args.append(f"\"{project_file}\"") self.launch_context.launch_args.append(
f"\"{project_file.as_posix()}\"")

View file

@ -18,6 +18,48 @@ import pyblish.api
from .abstract_metaplugins import AbstractMetaInstancePlugin from .abstract_metaplugins import AbstractMetaInstancePlugin
def requests_post(*args, **kwargs):
"""Wrap request post method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.post(*args, **kwargs)
def requests_get(*args, **kwargs):
"""Wrap request get method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.get(*args, **kwargs)
@attr.s @attr.s
class DeadlineJobInfo(object): class DeadlineJobInfo(object):
"""Mapping of all Deadline *JobInfo* attributes. """Mapping of all Deadline *JobInfo* attributes.
@ -579,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
""" """
url = "{}/api/jobs".format(self._deadline_url) url = "{}/api/jobs".format(self._deadline_url)
response = self._requests_post(url, json=payload) response = requests_post(url, json=payload)
if not response.ok: if not response.ok:
self.log.error("Submission failed!") self.log.error("Submission failed!")
self.log.error(response.status_code) self.log.error(response.status_code)
@ -592,41 +634,3 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
self._instance.data["deadlineSubmissionJob"] = result self._instance.data["deadlineSubmissionJob"] = result
return result["_id"] return result["_id"]
def _requests_post(self, *args, **kwargs):
"""Wrap request post method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.post(*args, **kwargs)
def _requests_get(self, *args, **kwargs):
"""Wrap request get method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.get(*args, **kwargs)

View file

@ -733,6 +733,9 @@ class Templates:
continue continue
default_key_values[key] = templates.pop(key) default_key_values[key] = templates.pop(key)
# Pop "others" key before before expected keys are processed
other_templates = templates.pop("others") or {}
keys_by_subkey = {} keys_by_subkey = {}
for sub_key, sub_value in templates.items(): for sub_key, sub_value in templates.items():
key_values = {} key_values = {}
@ -740,7 +743,6 @@ class Templates:
key_values.update(sub_value) key_values.update(sub_value)
keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values)
other_templates = templates.get("others") or {}
for sub_key, sub_value in other_templates.items(): for sub_key, sub_value in other_templates.items():
if sub_key in keys_by_subkey: if sub_key in keys_by_subkey:
log.warning(( log.warning((

View file

@ -180,7 +180,7 @@ class Application:
if group.enabled: if group.enabled:
enabled = data.get("enabled", True) enabled = data.get("enabled", True)
self.enabled = enabled self.enabled = enabled
self.use_python_2 = data["use_python_2"] self.use_python_2 = data.get("use_python_2", False)
self.label = data.get("variant_label") or name self.label = data.get("variant_label") or name
self.full_name = "/".join((group.name, name)) self.full_name = "/".join((group.name, name))
@ -192,26 +192,32 @@ class Application:
self.full_label = full_label self.full_label = full_label
self._environment = data.get("environment") or {} self._environment = data.get("environment") or {}
arguments = data.get("arguments")
if isinstance(arguments, dict):
arguments = arguments.get(platform.system().lower())
if not arguments:
arguments = []
self.arguments = arguments
if "executables" not in data:
self.executables = [
UndefinedApplicationExecutable()
]
return
_executables = data["executables"] _executables = data["executables"]
if isinstance(_executables, dict):
_executables = _executables.get(platform.system().lower())
if not _executables: if not _executables:
_executables = [] _executables = []
elif isinstance(_executables, dict):
_executables = _executables.get(platform.system().lower()) or []
_arguments = data["arguments"]
if not _arguments:
_arguments = []
elif isinstance(_arguments, dict):
_arguments = _arguments.get(platform.system().lower()) or []
executables = [] executables = []
for executable in _executables: for executable in _executables:
executables.append(ApplicationExecutable(executable)) executables.append(ApplicationExecutable(executable))
self.executables = executables self.executables = executables
self.arguments = _arguments
def __repr__(self): def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.full_name) return "<{}> - {}".format(self.__class__.__name__, self.full_name)
@ -444,6 +450,12 @@ class ApplicationExecutable:
"""Representation of executable loaded from settings.""" """Representation of executable loaded from settings."""
def __init__(self, executable): def __init__(self, executable):
# Try to format executable with environments
try:
executable = executable.format(**os.environ)
except Exception:
pass
# On MacOS check if exists path to executable when ends with `.app` # On MacOS check if exists path to executable when ends with `.app`
# - it is common that path will lead to "/Applications/Blender" but # - it is common that path will lead to "/Applications/Blender" but
# real path is "/Applications/Blender.app" # real path is "/Applications/Blender.app"
@ -485,6 +497,27 @@ class ApplicationExecutable:
return bool(self._realpath()) return bool(self._realpath())
class UndefinedApplicationExecutable(ApplicationExecutable):
"""Some applications do not require executable path from settings.
In that case this class is used to "fake" existing executable.
"""
def __init__(self):
pass
def __str__(self):
return self.__class__.__name__
def __repr__(self):
return "<{}>".format(self.__class__.__name__)
def as_args(self):
return []
def exists(self):
return True
@six.add_metaclass(ABCMeta) @six.add_metaclass(ABCMeta)
class LaunchHook: class LaunchHook:
"""Abstract base class of launch hook.""" """Abstract base class of launch hook."""
@ -1131,6 +1164,9 @@ def prepare_host_environments(data, implementation_envs=True):
def apply_project_environments_value(project_name, env, project_settings=None): def apply_project_environments_value(project_name, env, project_settings=None):
"""Apply project specific environments on passed environments. """Apply project specific environments on passed environments.
The enviornments are applied on passed `env` argument value so it is not
required to apply changes back.
Args: Args:
project_name (str): Name of project for which environemnts should be project_name (str): Name of project for which environemnts should be
received. received.
@ -1139,6 +1175,9 @@ def apply_project_environments_value(project_name, env, project_settings=None):
project_settings (dict): Project settings for passed project name. project_settings (dict): Project settings for passed project name.
Optional if project settings are already prepared. Optional if project settings are already prepared.
Returns:
dict: Passed env values with applied project environments.
Raises: Raises:
KeyError: If project settings do not contain keys for project specific KeyError: If project settings do not contain keys for project specific
environments. environments.
@ -1149,10 +1188,9 @@ def apply_project_environments_value(project_name, env, project_settings=None):
project_settings = get_project_settings(project_name) project_settings = get_project_settings(project_name)
env_value = project_settings["global"]["project_environments"] env_value = project_settings["global"]["project_environments"]
if not env_value: if env_value:
return env env.update(_merge_env(acre.parse(env_value), env))
parsed = acre.parse(env_value) return env
return _merge_env(parsed, env)
def prepare_context_environments(data): def prepare_context_environments(data):
@ -1181,9 +1219,8 @@ def prepare_context_environments(data):
# Load project specific environments # Load project specific environments
project_name = project_doc["name"] project_name = project_doc["name"]
data["env"] = apply_project_environments_value( # Apply project specific environments on current env value
project_name, data["env"] apply_project_environments_value(project_name, data["env"])
)
app = data["app"] app = data["app"]
workdir_data = get_workdir_data( workdir_data = get_workdir_data(

View file

@ -7,6 +7,8 @@ try:
import opentimelineio as otio import opentimelineio as otio
from opentimelineio import opentime as _ot from opentimelineio import opentime as _ot
except ImportError: except ImportError:
if not os.environ.get("AVALON_APP"):
raise
otio = discover_host_vendor_module("opentimelineio") otio = discover_host_vendor_module("opentimelineio")
_ot = discover_host_vendor_module("opentimelineio.opentime") _ot = discover_host_vendor_module("opentimelineio.opentime")

View file

@ -89,8 +89,13 @@ def ffprobe_streams(path_to_file, logger=None):
popen_stdout, popen_stderr = popen.communicate() popen_stdout, popen_stderr = popen.communicate()
if popen_stdout: if popen_stdout:
logger.debug("ffprobe stdout: {}".format(popen_stdout)) logger.debug("FFprobe stdout:\n{}".format(
popen_stdout.decode("utf-8")
))
if popen_stderr: if popen_stderr:
logger.debug("ffprobe stderr: {}".format(popen_stderr)) logger.warning("FFprobe stderr:\n{}".format(
popen_stderr.decode("utf-8")
))
return json.loads(popen_stdout)["streams"] return json.loads(popen_stdout)["streams"]

View file

@ -36,6 +36,7 @@ class ClockifyAPI:
self._secure_registry = None self._secure_registry = None
@property
def secure_registry(self): def secure_registry(self):
if self._secure_registry is None: if self._secure_registry is None:
self._secure_registry = OpenPypeSecureRegistry("clockify") self._secure_registry = OpenPypeSecureRegistry("clockify")

View file

@ -1,6 +1,5 @@
from Qt import QtCore, QtGui, QtWidgets from Qt import QtCore, QtGui, QtWidgets
from avalon import style from openpype import resources, style
from openpype import resources
class MessageWidget(QtWidgets.QWidget): class MessageWidget(QtWidgets.QWidget):
@ -22,14 +21,6 @@ class MessageWidget(QtWidgets.QWidget):
QtCore.Qt.WindowMinimizeButtonHint QtCore.Qt.WindowMinimizeButtonHint
) )
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting # Size setting
self.resize(self.SIZE_W, self.SIZE_H) self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
@ -53,7 +44,6 @@ class MessageWidget(QtWidgets.QWidget):
labels = [] labels = []
for message in messages: for message in messages:
label = QtWidgets.QLabel(message) label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText) label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True) label.setWordWrap(True)
@ -103,84 +93,64 @@ class ClockifySettings(QtWidgets.QWidget):
icon = QtGui.QIcon(resources.pype_icon_filepath()) icon = QtGui.QIcon(resources.pype_icon_filepath())
self.setWindowIcon(icon) self.setWindowIcon(icon)
self.setWindowTitle("Clockify settings")
self.setWindowFlags( self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint QtCore.Qt.WindowMinimizeButtonHint
) )
self._translate = QtCore.QCoreApplication.translate
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting # Size setting
self.resize(self.SIZE_W, self.SIZE_H) self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100)) self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
self.setStyleSheet(style.load_stylesheet()) self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._main()) self._ui_init()
self.setWindowTitle('Clockify settings')
def _main(self): def _ui_init(self):
self.main = QtWidgets.QVBoxLayout() label_api_key = QtWidgets.QLabel("Clockify API key:")
self.main.setObjectName("main")
self.form = QtWidgets.QFormLayout() input_api_key = QtWidgets.QLineEdit()
self.form.setContentsMargins(10, 15, 10, 5) input_api_key.setFrame(True)
self.form.setObjectName("form") input_api_key.setPlaceholderText("e.g. XX1XxXX2x3x4xXxx")
self.label_api_key = QtWidgets.QLabel("Clockify API key:") error_label = QtWidgets.QLabel("")
self.label_api_key.setFont(self.font) error_label.setTextFormat(QtCore.Qt.RichText)
self.label_api_key.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) error_label.setWordWrap(True)
self.label_api_key.setTextFormat(QtCore.Qt.RichText) error_label.hide()
self.label_api_key.setObjectName("label_api_key")
self.input_api_key = QtWidgets.QLineEdit() form_layout = QtWidgets.QFormLayout()
self.input_api_key.setEnabled(True) form_layout.setContentsMargins(10, 15, 10, 5)
self.input_api_key.setFrame(True) form_layout.addRow(label_api_key, input_api_key)
self.input_api_key.setObjectName("input_api_key") form_layout.addRow(error_label)
self.input_api_key.setPlaceholderText(
self._translate("main", "e.g. XX1XxXX2x3x4xXxx")
)
self.error_label = QtWidgets.QLabel("") btn_ok = QtWidgets.QPushButton("Ok")
self.error_label.setFont(self.font) btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
self.error_label.setTextFormat(QtCore.Qt.RichText)
self.error_label.setObjectName("error_label")
self.error_label.setWordWrap(True)
self.error_label.hide()
self.form.addRow(self.label_api_key, self.input_api_key) btn_cancel = QtWidgets.QPushButton("Cancel")
self.form.addRow(self.error_label)
self.btn_group = QtWidgets.QHBoxLayout()
self.btn_group.addStretch(1)
self.btn_group.setObjectName("btn_group")
self.btn_ok = QtWidgets.QPushButton("Ok")
self.btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
self.btn_ok.clicked.connect(self.click_ok)
self.btn_cancel = QtWidgets.QPushButton("Cancel")
cancel_tooltip = 'Application won\'t start' cancel_tooltip = 'Application won\'t start'
if self.optional: if self.optional:
cancel_tooltip = 'Close this window' cancel_tooltip = 'Close this window'
self.btn_cancel.setToolTip(cancel_tooltip) btn_cancel.setToolTip(cancel_tooltip)
self.btn_cancel.clicked.connect(self._close_widget)
self.btn_group.addWidget(self.btn_ok) btn_group = QtWidgets.QHBoxLayout()
self.btn_group.addWidget(self.btn_cancel) btn_group.addStretch(1)
btn_group.addWidget(btn_ok)
btn_group.addWidget(btn_cancel)
self.main.addLayout(self.form) main_layout = QtWidgets.QVBoxLayout(self)
self.main.addLayout(self.btn_group) main_layout.addLayout(form_layout)
main_layout.addLayout(btn_group)
return self.main btn_ok.clicked.connect(self.click_ok)
btn_cancel.clicked.connect(self._close_widget)
self.label_api_key = label_api_key
self.input_api_key = input_api_key
self.error_label = error_label
self.btn_ok = btn_ok
self.btn_cancel = btn_cancel
def setError(self, msg): def setError(self, msg):
self.error_label.setText(msg) self.error_label.setText(msg)
@ -212,6 +182,17 @@ class ClockifySettings(QtWidgets.QWidget):
"Entered invalid API key" "Entered invalid API key"
) )
def showEvent(self, event):
super(ClockifySettings, self).showEvent(event)
# Make btns same width
max_width = max(
self.btn_ok.sizeHint().width(),
self.btn_cancel.sizeHint().width()
)
self.btn_ok.setMinimumWidth(max_width)
self.btn_cancel.setMinimumWidth(max_width)
def closeEvent(self, event): def closeEvent(self, event):
if self.optional is True: if self.optional is True:
event.ignore() event.ignore()

View file

@ -271,6 +271,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
["DEADLINE_REST_URL"] ["DEADLINE_REST_URL"]
) )
self._job_info = (
context.data["project_settings"].get(
"deadline", {}).get(
"publish", {}).get(
"MayaSubmitDeadline", {}).get(
"jobInfo", {})
)
self._plugin_info = (
context.data["project_settings"].get(
"deadline", {}).get(
"publish", {}).get(
"MayaSubmitDeadline", {}).get(
"pluginInfo", {})
)
assert self._deadline_url, "Requires DEADLINE_REST_URL" assert self._deadline_url, "Requires DEADLINE_REST_URL"
context = instance.context context = instance.context
@ -407,7 +423,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.payload_skeleton["JobInfo"]["Priority"] = \ self.payload_skeleton["JobInfo"]["Priority"] = \
self._instance.data.get("priority", 50) self._instance.data.get("priority", 50)
if self.group != "none": if self.group != "none" and self.group:
self.payload_skeleton["JobInfo"]["Group"] = self.group self.payload_skeleton["JobInfo"]["Group"] = self.group
if self.limit_groups: if self.limit_groups:
@ -536,6 +552,10 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.preflight_check(instance) self.preflight_check(instance)
# add jobInfo and pluginInfo variables from Settings
payload["JobInfo"].update(self._job_info)
payload["PluginInfo"].update(self._plugin_info)
# Prepare tiles data ------------------------------------------------ # Prepare tiles data ------------------------------------------------
if instance.data.get("tileRendering"): if instance.data.get("tileRendering"):
# if we have sequence of files, we need to create tile job for # if we have sequence of files, we need to create tile job for

View file

@ -32,6 +32,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
department = "" department = ""
limit_groups = {} limit_groups = {}
use_gpu = False use_gpu = False
env_allowed_keys = []
env_search_replace_values = {}
def process(self, instance): def process(self, instance):
instance.data["toBeRenderedOn"] = "deadline" instance.data["toBeRenderedOn"] = "deadline"
@ -242,19 +244,19 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"PYBLISHPLUGINPATH", "PYBLISHPLUGINPATH",
"NUKE_PATH", "NUKE_PATH",
"TOOL_ENV", "TOOL_ENV",
"OPENPYPE_DEV",
"FOUNDRY_LICENSE" "FOUNDRY_LICENSE"
] ]
# add allowed keys from preset if any
if self.env_allowed_keys:
keys += self.env_allowed_keys
environment = dict({key: os.environ[key] for key in keys environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session) if key in os.environ}, **api.Session)
# self.log.debug("enviro: {}".format(pprint(environment))) # self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'): for _path in os.environ:
environment[path] = os.environ[path] if _path.lower().startswith('openpype_'):
if path.lower().startswith('nuke_'): environment[_path] = os.environ[_path]
environment[path] = os.environ[path]
if 'license' in path.lower():
environment[path] = os.environ[path]
clean_environment = {} clean_environment = {}
for key, value in environment.items(): for key, value in environment.items():
@ -285,6 +287,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
environment = clean_environment environment = clean_environment
# to recognize job from PYPE for turning Event On/Off # to recognize job from PYPE for turning Event On/Off
environment["OPENPYPE_RENDER_JOB"] = "1" environment["OPENPYPE_RENDER_JOB"] = "1"
# finally search replace in values of any key
if self.env_search_replace_values:
for key, value in environment.items():
for _k, _v in self.env_search_replace_values.items():
environment[key] = value.replace(_k, _v)
payload["JobInfo"].update({ payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format( "EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key, key=key,

View file

@ -231,7 +231,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
args = [ args = [
'publish', 'publish',
roothless_metadata_path roothless_metadata_path,
"--targets {}".format("deadline")
] ]
# Generate the payload for Deadline submission # Generate the payload for Deadline submission

View file

@ -0,0 +1,186 @@
import os
import json
import pyblish.api
from avalon.vendor import requests
from openpype.api import get_system_settings
from openpype.lib.abstract_submit_deadline import requests_get
from openpype.lib.delivery import collect_frames
class ValidateExpectedFiles(pyblish.api.InstancePlugin):
"""Compare rendered and expected files"""
label = "Validate rendered files from Deadline"
order = pyblish.api.ValidatorOrder
families = ["render"]
targets = ["deadline"]
# check if actual frame range on render job wasn't different
# case when artists wants to render only subset of frames
allow_user_override = True
def process(self, instance):
frame_list = self._get_frame_list(instance.data["render_job_id"])
for repre in instance.data["representations"]:
expected_files = self._get_expected_files(repre)
staging_dir = repre["stagingDir"]
existing_files = self._get_existing_files(staging_dir)
expected_non_existent = expected_files.difference(
existing_files)
if len(expected_non_existent) != 0:
self.log.info("Some expected files missing {}".format(
expected_non_existent))
if self.allow_user_override:
file_name_template, frame_placeholder = \
self._get_file_name_template_and_placeholder(
expected_files)
if not file_name_template:
return
real_expected_rendered = self._get_real_render_expected(
file_name_template,
frame_placeholder,
frame_list)
real_expected_non_existent = \
real_expected_rendered.difference(existing_files)
if len(real_expected_non_existent) != 0:
raise RuntimeError("Still missing some files {}".
format(real_expected_non_existent))
self.log.info("Update range from actual job range")
repre["files"] = sorted(list(real_expected_rendered))
else:
raise RuntimeError("Some expected files missing {}".format(
expected_non_existent))
def _get_frame_list(self, original_job_id):
"""
Returns list of frame ranges from all render job.
Render job might be requeried so job_id in metadata.json is invalid
GlobalJobPreload injects current ids to RENDER_JOB_IDS.
Args:
original_job_id (str)
Returns:
(list)
"""
all_frame_lists = []
render_job_ids = os.environ.get("RENDER_JOB_IDS")
if render_job_ids:
render_job_ids = render_job_ids.split(',')
else: # fallback
render_job_ids = [original_job_id]
for job_id in render_job_ids:
job_info = self._get_job_info(job_id)
frame_list = job_info["Props"]["Frames"]
if frame_list:
all_frame_lists.extend(frame_list.split(','))
return all_frame_lists
def _get_real_render_expected(self, file_name_template, frame_placeholder,
frame_list):
"""
Calculates list of names of expected rendered files.
Might be different from job expected files if user explicitly and
manually change frame list on Deadline job.
"""
real_expected_rendered = set()
src_padding_exp = "%0{}d".format(len(frame_placeholder))
for frames in frame_list:
if '-' not in frames: # single frame
frames = "{}-{}".format(frames, frames)
start, end = frames.split('-')
for frame in range(int(start), int(end) + 1):
ren_name = file_name_template.replace(
frame_placeholder, src_padding_exp % frame)
real_expected_rendered.add(ren_name)
return real_expected_rendered
def _get_file_name_template_and_placeholder(self, files):
"""Returns file name with frame replaced with # and this placeholder"""
sources_and_frames = collect_frames(files)
file_name_template = frame_placeholder = None
for file_name, frame in sources_and_frames.items():
frame_placeholder = "#" * len(frame)
file_name_template = os.path.basename(
file_name.replace(frame, frame_placeholder))
break
return file_name_template, frame_placeholder
def _get_job_info(self, job_id):
"""
Calls DL for actual job info for 'job_id'
Might be different than job info saved in metadata.json if user
manually changes job pre/during rendering.
"""
deadline_url = (
get_system_settings()
["modules"]
["deadline"]
["DEADLINE_REST_URL"]
)
assert deadline_url, "Requires DEADLINE_REST_URL"
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
try:
response = requests_get(url)
except requests.exceptions.ConnectionError:
print("Deadline is not accessible at {}".format(deadline_url))
# self.log("Deadline is not accessible at {}".format(deadline_url))
return {}
if not response.ok:
self.log.error("Submission failed!")
self.log.error(response.status_code)
self.log.error(response.content)
raise RuntimeError(response.text)
json_content = response.json()
if json_content:
return json_content.pop()
return {}
def _parse_metadata_json(self, json_path):
if not os.path.exists(json_path):
msg = "Metadata file {} doesn't exist".format(json_path)
raise RuntimeError(msg)
with open(json_path) as fp:
try:
return json.load(fp)
except Exception as exc:
self.log.error(
"Error loading json: "
"{} - Exception: {}".format(json_path, exc)
)
def _get_existing_files(self, out_dir):
"""Returns set of existing file names from 'out_dir'"""
existing_files = set()
for file_name in os.listdir(out_dir):
existing_files.add(file_name)
return existing_files
def _get_expected_files(self, repre):
"""Returns set of file names from metadata.json"""
expected_files = set()
for file_name in repre["files"]:
expected_files.add(file_name)
return expected_files

View file

@ -16,11 +16,13 @@ def clone_review_session(session, entity):
# Add all invitees. # Add all invitees.
for invitee in entity["review_session_invitees"]: for invitee in entity["review_session_invitees"]:
# Make sure email is not None but string
email = invitee["email"] or ""
session.create( session.create(
"ReviewSessionInvitee", "ReviewSessionInvitee",
{ {
"name": invitee["name"], "name": invitee["name"],
"email": invitee["email"], "email": email,
"review_session": review_session "review_session": review_session
} }
) )

View file

@ -0,0 +1,167 @@
from openpype.modules.ftrack.lib import ServerAction
class MultipleNotesServer(ServerAction):
"""Action adds same note for muliple AssetVersions.
Note is added to selection of AssetVersions. Note is created with user
who triggered the action. It is possible to define note category of note.
"""
identifier = "multiple.notes.server"
label = "Multiple Notes (Server)"
description = "Add same note to multiple Asset Versions"
_none_category = "__NONE__"
def discover(self, session, entities, event):
"""Show action only on AssetVersions."""
if not entities:
return False
for entity in entities:
if entity.entity_type.lower() != "assetversion":
return False
return True
def interface(self, session, entities, event):
event_source = event["source"]
user_info = event_source.get("user") or {}
user_id = user_info.get("id")
if not user_id:
return None
values = event["data"].get("values")
if values:
return None
note_label = {
"type": "label",
"value": "# Enter note: #"
}
note_value = {
"name": "note",
"type": "textarea"
}
category_label = {
"type": "label",
"value": "## Category: ##"
}
category_data = []
category_data.append({
"label": "- None -",
"value": self._none_category
})
all_categories = session.query(
"select id, name from NoteCategory"
).all()
for cat in all_categories:
category_data.append({
"label": cat["name"],
"value": cat["id"]
})
category_value = {
"type": "enumerator",
"name": "category",
"data": category_data,
"value": self._none_category
}
splitter = {
"type": "label",
"value": "---"
}
return [
note_label,
note_value,
splitter,
category_label,
category_value
]
def launch(self, session, entities, event):
if "values" not in event["data"]:
return None
values = event["data"]["values"]
if len(values) <= 0 or "note" not in values:
return False
# Get Note text
note_value = values["note"]
if note_value.lower().strip() == "":
return {
"success": True,
"message": "Note was not entered. Skipping"
}
# Get User
event_source = event["source"]
user_info = event_source.get("user") or {}
user_id = user_info.get("id")
user = None
if user_id:
user = session.query(
'User where id is "{}"'.format(user_id)
).first()
if not user:
return {
"success": False,
"message": "Couldn't get user information."
}
# Logging message preparation
# - username
username = user.get("username") or "N/A"
# - AssetVersion ids
asset_version_ids_str = ",".join([entity["id"] for entity in entities])
# Base note data
note_data = {
"content": note_value,
"author": user
}
# Get category
category_id = values["category"]
if category_id == self._none_category:
category_id = None
category_name = None
if category_id is not None:
category = session.query(
"select id, name from NoteCategory where id is \"{}\"".format(
category_id
)
).first()
if category:
note_data["category"] = category
category_name = category["name"]
category_msg = ""
if category_name:
category_msg = " with category: \"{}\"".format(category_name)
self.log.warning((
"Creating note{} as User \"{}\" on "
"AssetVersions: {} with value \"{}\""
).format(category_msg, username, asset_version_ids_str, note_value))
# Create notes for entities
for entity in entities:
new_note = session.create("Note", note_data)
entity["notes"].append(new_note)
session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
MultipleNotesServer(session).register()

View file

@ -2,7 +2,10 @@ import collections
import datetime import datetime
import ftrack_api import ftrack_api
from openpype.modules.ftrack.lib import BaseEvent from openpype.modules.ftrack.lib import (
BaseEvent,
query_custom_attributes
)
class PushFrameValuesToTaskEvent(BaseEvent): class PushFrameValuesToTaskEvent(BaseEvent):
@ -55,10 +58,6 @@ class PushFrameValuesToTaskEvent(BaseEvent):
if entity_info.get("entityType") != "task": if entity_info.get("entityType") != "task":
continue continue
# Skip `Task` entity type
if entity_info["entity_type"].lower() == "task":
continue
# Care only about changes of status # Care only about changes of status
changes = entity_info.get("changes") changes = entity_info.get("changes")
if not changes: if not changes:
@ -74,6 +73,14 @@ class PushFrameValuesToTaskEvent(BaseEvent):
if project_id is None: if project_id is None:
continue continue
# Skip `Task` entity type if parent didn't change
if entity_info["entity_type"].lower() == "task":
if (
"parent_id" not in changes
or changes["parent_id"]["new"] is None
):
continue
if project_id not in entities_info_by_project_id: if project_id not in entities_info_by_project_id:
entities_info_by_project_id[project_id] = [] entities_info_by_project_id[project_id] = []
entities_info_by_project_id[project_id].append(entity_info) entities_info_by_project_id[project_id].append(entity_info)
@ -117,11 +124,24 @@ class PushFrameValuesToTaskEvent(BaseEvent):
)) ))
return return
interest_attributes = set(interest_attributes)
interest_entity_types = set(interest_entity_types)
# Separate value changes and task parent changes
_entities_info = []
task_parent_changes = []
for entity_info in entities_info:
if entity_info["entity_type"].lower() == "task":
task_parent_changes.append(entity_info)
else:
_entities_info.append(entity_info)
entities_info = _entities_info
# Filter entities info with changes # Filter entities info with changes
interesting_data, changed_keys_by_object_id = self.filter_changes( interesting_data, changed_keys_by_object_id = self.filter_changes(
session, event, entities_info, interest_attributes session, event, entities_info, interest_attributes
) )
if not interesting_data: if not interesting_data and not task_parent_changes:
return return
# Prepare object types # Prepare object types
@ -131,6 +151,289 @@ class PushFrameValuesToTaskEvent(BaseEvent):
name_low = object_type["name"].lower() name_low = object_type["name"].lower()
object_types_by_name[name_low] = object_type object_types_by_name[name_low] = object_type
# NOTE it would be nice to check if `interesting_data` do not contain
# value changs of tasks that were created or moved
# - it is a complex way how to find out
if interesting_data:
self.process_attribute_changes(
session, object_types_by_name,
interesting_data, changed_keys_by_object_id,
interest_entity_types, interest_attributes
)
if task_parent_changes:
self.process_task_parent_change(
session, object_types_by_name, task_parent_changes,
interest_entity_types, interest_attributes
)
def process_task_parent_change(
self, session, object_types_by_name, task_parent_changes,
interest_entity_types, interest_attributes
):
"""Push custom attribute values if task parent has changed.
Parent is changed if task is created or if is moved under different
entity. We don't care about all task changes only about those that
have it's parent in interest types (from settings).
Tasks hierarchical value should be unset or set based on parents
real hierarchical value and non hierarchical custom attribute value
should be set to hierarchical value.
"""
# Store task ids which were created or moved under parent with entity
# type defined in settings (interest_entity_types).
task_ids = set()
# Store parent ids of matching task ids
matching_parent_ids = set()
# Store all entity ids of all entities to be able query hierarchical
# values.
whole_hierarchy_ids = set()
# Store parent id of each entity id
parent_id_by_entity_id = {}
for entity_info in task_parent_changes:
# Ignore entities with less parents than 2
# NOTE entity itself is also part of "parents" value
parents = entity_info.get("parents") or []
if len(parents) < 2:
continue
parent_info = parents[1]
# Check if parent has entity type we care about.
if parent_info["entity_type"] not in interest_entity_types:
continue
task_ids.add(entity_info["entityId"])
matching_parent_ids.add(parent_info["entityId"])
# Store whole hierarchi of task entity
prev_id = None
for item in parents:
item_id = item["entityId"]
whole_hierarchy_ids.add(item_id)
if prev_id is None:
prev_id = item_id
continue
parent_id_by_entity_id[prev_id] = item_id
if item["entityType"] == "show":
break
prev_id = item_id
# Just skip if nothing is interesting for our settings
if not matching_parent_ids:
return
# Query object type ids of parent ids for custom attribute
# definitions query
entities = session.query(
"select object_type_id from TypedContext where id in ({})".format(
self.join_query_keys(matching_parent_ids)
)
)
# Prepare task object id
task_object_id = object_types_by_name["task"]["id"]
# All object ids for which we're querying custom attribute definitions
object_type_ids = set()
object_type_ids.add(task_object_id)
for entity in entities:
object_type_ids.add(entity["object_type_id"])
attrs_by_obj_id, hier_attrs = self.attrs_configurations(
session, object_type_ids, interest_attributes
)
# Skip if all task attributes are not available
task_attrs = attrs_by_obj_id.get(task_object_id)
if not task_attrs:
return
# Skip attributes that is not in both hierarchical and nonhierarchical
# TODO be able to push values if hierarchical is available
for key in interest_attributes:
if key not in hier_attrs:
task_attrs.pop(key, None)
elif key not in task_attrs:
hier_attrs.pop(key)
# Skip if nothing remained
if not task_attrs:
return
# Do some preparations for custom attribute values query
attr_key_by_id = {}
nonhier_id_by_key = {}
hier_attr_ids = []
for key, attr_id in hier_attrs.items():
attr_key_by_id[attr_id] = key
hier_attr_ids.append(attr_id)
conf_ids = list(hier_attr_ids)
for key, attr_id in task_attrs.items():
attr_key_by_id[attr_id] = key
nonhier_id_by_key[key] = attr_id
conf_ids.append(attr_id)
# Query custom attribute values
# - result does not contain values for all entities only result of
# query callback to ftrack server
result = query_custom_attributes(
session, conf_ids, whole_hierarchy_ids
)
# Prepare variables where result will be stored
# - hierachical values should not contain attribute with value by
# default
hier_values_by_entity_id = {
entity_id: {}
for entity_id in whole_hierarchy_ids
}
# - real values of custom attributes
values_by_entity_id = {
entity_id: {
attr_id: None
for attr_id in conf_ids
}
for entity_id in whole_hierarchy_ids
}
for item in result:
attr_id = item["configuration_id"]
entity_id = item["entity_id"]
value = item["value"]
values_by_entity_id[entity_id][attr_id] = value
if attr_id in hier_attr_ids and value is not None:
hier_values_by_entity_id[entity_id][attr_id] = value
# Prepare values for all task entities
# - going through all parents and storing first value value
# - store None to those that are already known that do not have set
# value at all
for task_id in tuple(task_ids):
for attr_id in hier_attr_ids:
entity_ids = []
value = None
entity_id = task_id
while value is None:
entity_value = hier_values_by_entity_id[entity_id]
if attr_id in entity_value:
value = entity_value[attr_id]
if value is None:
break
if value is None:
entity_ids.append(entity_id)
entity_id = parent_id_by_entity_id.get(entity_id)
if entity_id is None:
break
for entity_id in entity_ids:
hier_values_by_entity_id[entity_id][attr_id] = value
# Prepare changes to commit
changes = []
for task_id in tuple(task_ids):
parent_id = parent_id_by_entity_id[task_id]
for attr_id in hier_attr_ids:
attr_key = attr_key_by_id[attr_id]
nonhier_id = nonhier_id_by_key[attr_key]
# Real value of hierarchical attribute on parent
# - If is none then should be unset
real_parent_value = values_by_entity_id[parent_id][attr_id]
# Current hierarchical value of a task
# - Will be compared to real parent value
hier_value = hier_values_by_entity_id[task_id][attr_id]
# Parent value that can be inherited from it's parent entity
parent_value = hier_values_by_entity_id[parent_id][attr_id]
# Task value of nonhierarchical custom attribute
nonhier_value = values_by_entity_id[task_id][nonhier_id]
if real_parent_value != hier_value:
changes.append({
"new_value": real_parent_value,
"attr_id": attr_id,
"entity_id": task_id,
"attr_key": attr_key
})
if parent_value != nonhier_value:
changes.append({
"new_value": parent_value,
"attr_id": nonhier_id,
"entity_id": task_id,
"attr_key": attr_key
})
self._commit_changes(session, changes)
def _commit_changes(self, session, changes):
uncommited_changes = False
for idx, item in enumerate(changes):
new_value = item["new_value"]
attr_id = item["attr_id"]
entity_id = item["entity_id"]
attr_key = item["attr_key"]
entity_key = collections.OrderedDict()
entity_key["configuration_id"] = attr_id
entity_key["entity_id"] = entity_id
self._cached_changes.append({
"attr_key": attr_key,
"entity_id": entity_id,
"value": new_value,
"time": datetime.datetime.now()
})
if new_value is None:
op = ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
else:
op = ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
entity_key,
"value",
ftrack_api.symbol.NOT_SET,
new_value
)
session.recorded_operations.push(op)
self.log.info((
"Changing Custom Attribute \"{}\" to value"
" \"{}\" on entity: {}"
).format(attr_key, new_value, entity_id))
if (idx + 1) % 20 == 0:
uncommited_changes = False
try:
session.commit()
except Exception:
session.rollback()
self.log.warning(
"Changing of values failed.", exc_info=True
)
else:
uncommited_changes = True
if uncommited_changes:
try:
session.commit()
except Exception:
session.rollback()
self.log.warning("Changing of values failed.", exc_info=True)
def process_attribute_changes(
self, session, object_types_by_name,
interesting_data, changed_keys_by_object_id,
interest_entity_types, interest_attributes
):
# Prepare task object id # Prepare task object id
task_object_id = object_types_by_name["task"]["id"] task_object_id = object_types_by_name["task"]["id"]
@ -216,13 +519,13 @@ class PushFrameValuesToTaskEvent(BaseEvent):
task_entity_ids.add(task_id) task_entity_ids.add(task_id)
parent_id_by_task_id[task_id] = task_entity["parent_id"] parent_id_by_task_id[task_id] = task_entity["parent_id"]
self.finalize( self.finalize_attribute_changes(
session, interesting_data, session, interesting_data,
changed_keys, attrs_by_obj_id, hier_attrs, changed_keys, attrs_by_obj_id, hier_attrs,
task_entity_ids, parent_id_by_task_id task_entity_ids, parent_id_by_task_id
) )
def finalize( def finalize_attribute_changes(
self, session, interesting_data, self, session, interesting_data,
changed_keys, attrs_by_obj_id, hier_attrs, changed_keys, attrs_by_obj_id, hier_attrs,
task_entity_ids, parent_id_by_task_id task_entity_ids, parent_id_by_task_id
@ -248,6 +551,7 @@ class PushFrameValuesToTaskEvent(BaseEvent):
session, attr_ids, entity_ids, task_entity_ids, hier_attrs session, attr_ids, entity_ids, task_entity_ids, hier_attrs
) )
changes = []
for entity_id, current_values in current_values_by_id.items(): for entity_id, current_values in current_values_by_id.items():
parent_id = parent_id_by_task_id.get(entity_id) parent_id = parent_id_by_task_id.get(entity_id)
if not parent_id: if not parent_id:
@ -272,39 +576,13 @@ class PushFrameValuesToTaskEvent(BaseEvent):
if new_value == old_value: if new_value == old_value:
continue continue
entity_key = collections.OrderedDict() changes.append({
entity_key["configuration_id"] = attr_id "new_value": new_value,
entity_key["entity_id"] = entity_id "attr_id": attr_id,
self._cached_changes.append({
"attr_key": attr_key,
"entity_id": entity_id, "entity_id": entity_id,
"value": new_value, "attr_key": attr_key
"time": datetime.datetime.now()
}) })
if new_value is None: self._commit_changes(session, changes)
op = ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
else:
op = ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
entity_key,
"value",
ftrack_api.symbol.NOT_SET,
new_value
)
session.recorded_operations.push(op)
self.log.info((
"Changing Custom Attribute \"{}\" to value"
" \"{}\" on entity: {}"
).format(attr_key, new_value, entity_id))
try:
session.commit()
except Exception:
session.rollback()
self.log.warning("Changing of values failed.", exc_info=True)
def filter_changes( def filter_changes(
self, session, event, entities_info, interest_attributes self, session, event, entities_info, interest_attributes

View file

@ -66,15 +66,7 @@ class VersionToTaskStatus(BaseEvent):
)) ))
return return
_status_mapping = event_settings["mapping"] _status_mapping = event_settings["mapping"] or {}
if not _status_mapping:
self.log.debug(
"Project \"{}\" does not have set mapping for {}".format(
project_name, self.__class__.__name__
)
)
return
status_mapping = { status_mapping = {
key.lower(): value key.lower(): value
for key, value in _status_mapping.items() for key, value in _status_mapping.items()

View file

@ -1,5 +1,6 @@
import os import os
import re import re
import json
from openpype.modules.ftrack.lib import BaseAction, statics_icon from openpype.modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import Anatomy, get_project_settings from openpype.api import Anatomy, get_project_settings
@ -84,6 +85,9 @@ class CreateProjectFolders(BaseAction):
} }
try: try:
if isinstance(project_folder_structure, str):
project_folder_structure = json.loads(project_folder_structure)
# Get paths based on presets # Get paths based on presets
basic_paths = self.get_path_items(project_folder_structure) basic_paths = self.get_path_items(project_folder_structure)
self.create_folders(basic_paths, project_entity) self.create_folders(basic_paths, project_entity)

View file

@ -13,7 +13,8 @@ from .custom_attributes import (
default_custom_attributes_definition, default_custom_attributes_definition,
app_definitions_from_app_manager, app_definitions_from_app_manager,
tool_definitions_from_app_manager, tool_definitions_from_app_manager,
get_openpype_attr get_openpype_attr,
query_custom_attributes
) )
from . import avalon_sync from . import avalon_sync
@ -37,6 +38,7 @@ __all__ = (
"app_definitions_from_app_manager", "app_definitions_from_app_manager",
"tool_definitions_from_app_manager", "tool_definitions_from_app_manager",
"get_openpype_attr", "get_openpype_attr",
"query_custom_attributes",
"avalon_sync", "avalon_sync",

View file

@ -402,16 +402,18 @@ class SyncEntitiesFactory:
items = [] items = []
items.append({ items.append({
"type": "label", "type": "label",
"value": "# Can't access Custom attribute <{}>".format( "value": (
CUST_ATTR_ID_KEY "# Can't access Custom attribute: <b>\"{}\"</b>"
) ).format(CUST_ATTR_ID_KEY)
}) })
items.append({ items.append({
"type": "label", "type": "label",
"value": ( "value": (
"<p>- Check if user \"{}\" has permissions" "<p>- Check if your User and API key has permissions"
" to access the Custom attribute</p>" " to access the Custom attribute."
).format(self._api_key) "<br>Username:\"{}\""
"<br>API key:\"{}\"</p>"
).format(self._api_user, self._api_key)
}) })
items.append({ items.append({
"type": "label", "type": "label",

View file

@ -81,3 +81,60 @@ def get_openpype_attr(session, split_hierarchical=True, query_keys=None):
return custom_attributes, hier_custom_attributes return custom_attributes, hier_custom_attributes
return custom_attributes return custom_attributes
def join_query_keys(keys):
"""Helper to join keys to query."""
return ",".join(["\"{}\"".format(key) for key in keys])
def query_custom_attributes(session, conf_ids, entity_ids, table_name=None):
"""Query custom attribute values from ftrack database.
Using ftrack call method result may differ based on used table name and
version of ftrack server.
Args:
session(ftrack_api.Session): Connected ftrack session.
conf_id(list, set, tuple): Configuration(attribute) ids which are
queried.
entity_ids(list, set, tuple): Entity ids for which are values queried.
table_name(str): Table nam from which values are queried. Not
recommended to change until you know what it means.
"""
output = []
# Just skip
if not conf_ids or not entity_ids:
return output
if table_name is None:
table_name = "ContextCustomAttributeValue"
# Prepare values to query
attributes_joined = join_query_keys(conf_ids)
attributes_len = len(conf_ids)
# Query values in chunks
chunk_size = int(5000 / attributes_len)
# Make sure entity_ids is `list` for chunk selection
entity_ids = list(entity_ids)
for idx in range(0, len(entity_ids), chunk_size):
entity_ids_joined = join_query_keys(
entity_ids[idx:idx + chunk_size]
)
call_expr = [{
"action": "query",
"expression": (
"select value, entity_id from {}"
" where entity_id in ({}) and configuration_id in ({})"
).format(table_name, entity_ids_joined, attributes_joined)
}]
if hasattr(session, "call"):
[result] = session.call(call_expr)
else:
[result] = session._call(call_expr)
for item in result["data"]:
output.append(item)
return output

View file

@ -1,29 +1,107 @@
"""
Requires:
none
Provides:
instance -> families ([])
"""
import pyblish.api import pyblish.api
import avalon.api
from openpype.lib.plugin_tools import filter_profiles
class CollectFtrackFamilies(pyblish.api.InstancePlugin): class CollectFtrackFamily(pyblish.api.InstancePlugin):
"""Collect family for ftrack publishing
Add ftrack family to those instance that should be published to ftrack
""" """
Adds explicitly 'ftrack' to families to upload instance to FTrack.
order = pyblish.api.CollectorOrder + 0.3 Uses selection by combination of hosts/families/tasks names via
label = 'Add ftrack family' profiles resolution.
families = ["model",
"setdress", Triggered everywhere, checks instance against configured.
"model",
"animation", Checks advanced filtering which works on 'families' not on main
"look", 'family', as some variants dynamically resolves addition of ftrack
"rig", based on 'families' (editorial drives it by presence of 'review')
"camera" """
] label = "Collect Ftrack Family"
hosts = ["maya"] order = pyblish.api.CollectorOrder + 0.4998
profiles = None
def process(self, instance): def process(self, instance):
if not self.profiles:
self.log.warning("No profiles present for adding Ftrack family")
return
# make ftrack publishable task_name = instance.data.get("task",
if instance.data.get('families'): avalon.api.Session["AVALON_TASK"])
instance.data['families'].append('ftrack') host_name = avalon.api.Session["AVALON_APP"]
family = instance.data["family"]
filtering_criteria = {
"hosts": host_name,
"families": family,
"tasks": task_name
}
profile = filter_profiles(self.profiles, filtering_criteria,
logger=self.log)
if profile:
families = instance.data.get("families")
add_ftrack_family = profile["add_ftrack_family"]
additional_filters = profile.get("advanced_filtering")
if additional_filters:
add_ftrack_family = self._get_add_ftrack_f_from_addit_filters(
additional_filters,
families,
add_ftrack_family
)
if add_ftrack_family:
self.log.debug("Adding ftrack family for '{}'".
format(instance.data.get("family")))
if families and "ftrack" not in families:
instance.data["families"].append("ftrack")
else:
instance.data["families"] = ["ftrack"]
else: else:
instance.data['families'] = ['ftrack'] self.log.debug("Instance '{}' doesn't match any profile".format(
instance.data.get("family")))
def _get_add_ftrack_f_from_addit_filters(self,
additional_filters,
families,
add_ftrack_family):
"""
Compares additional filters - working on instance's families.
Triggered for more detailed filtering when main family matches,
but content of 'families' actually matter.
(For example 'review' in 'families' should result in adding to
Ftrack)
Args:
additional_filters (dict) - from Setting
families (list) - subfamilies
add_ftrack_family (bool) - add ftrack to families if True
"""
override_filter = None
override_filter_value = -1
for additional_filter in additional_filters:
filter_families = set(additional_filter["families"])
valid = filter_families <= set(families) # issubset
if not valid:
continue
value = len(filter_families)
if value > override_filter_value:
override_filter = additional_filter
override_filter_value = value
if override_filter:
add_ftrack_family = override_filter["add_ftrack_family"]
return add_ftrack_family

View file

@ -1,6 +1,6 @@
import os import os
import requests import requests
from avalon import style from openpype import style
from openpype.modules.ftrack.lib import credentials from openpype.modules.ftrack.lib import credentials
from . import login_tools from . import login_tools
from openpype import resources from openpype import resources
@ -46,8 +46,11 @@ class CredentialsDialog(QtWidgets.QDialog):
self.user_label = QtWidgets.QLabel("Username:") self.user_label = QtWidgets.QLabel("Username:")
self.api_label = QtWidgets.QLabel("API Key:") self.api_label = QtWidgets.QLabel("API Key:")
self.ftsite_input = QtWidgets.QLineEdit() self.ftsite_input = QtWidgets.QLabel()
self.ftsite_input.setReadOnly(True) self.ftsite_input.setTextInteractionFlags(
QtCore.Qt.TextBrowserInteraction
)
# self.ftsite_input.setReadOnly(True)
self.ftsite_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) self.ftsite_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor))
self.user_input = QtWidgets.QLineEdit() self.user_input = QtWidgets.QLineEdit()

View file

@ -1,6 +1,6 @@
from Qt import QtWidgets, QtCore from Qt import QtWidgets, QtCore
from .widgets import LogsWidget, OutputWidget from .widgets import LogsWidget, OutputWidget
from avalon import style from openpype import style
class LogsWindow(QtWidgets.QWidget): class LogsWindow(QtWidgets.QWidget):
@ -14,7 +14,7 @@ class LogsWindow(QtWidgets.QWidget):
main_layout = QtWidgets.QHBoxLayout() main_layout = QtWidgets.QHBoxLayout()
log_splitter = QtWidgets.QSplitter() log_splitter = QtWidgets.QSplitter(self)
log_splitter.setOrientation(QtCore.Qt.Horizontal) log_splitter.setOrientation(QtCore.Qt.Horizontal)
log_splitter.addWidget(logs_widget) log_splitter.addWidget(logs_widget)
log_splitter.addWidget(log_detail) log_splitter.addWidget(log_detail)

View file

@ -83,7 +83,6 @@ class CustomCombo(QtWidgets.QWidget):
self.setLayout(layout) self.setLayout(layout)
# toolmenu.selection_changed.connect(self.on_selection_changed)
toolmenu.selection_changed.connect(self.selection_changed) toolmenu.selection_changed.connect(self.selection_changed)
self.toolbutton = toolbutton self.toolbutton = toolbutton
@ -119,7 +118,6 @@ class LogsWidget(QtWidgets.QWidget):
filter_layout = QtWidgets.QHBoxLayout() filter_layout = QtWidgets.QHBoxLayout()
# user_filter = SearchComboBox(self, "Users")
user_filter = CustomCombo("Users", self) user_filter = CustomCombo("Users", self)
users = model.dbcon.distinct("username") users = model.dbcon.distinct("username")
user_filter.populate(users) user_filter.populate(users)
@ -128,21 +126,18 @@ class LogsWidget(QtWidgets.QWidget):
proxy_model.update_users_filter(users) proxy_model.update_users_filter(users)
level_filter = CustomCombo("Levels", self) level_filter = CustomCombo("Levels", self)
# levels = [(level, True) for level in model.dbcon.distinct("level")]
levels = model.dbcon.distinct("level") levels = model.dbcon.distinct("level")
level_filter.addItems(levels) level_filter.addItems(levels)
level_filter.selection_changed.connect(self._level_changed) level_filter.selection_changed.connect(self._level_changed)
detail_widget.update_level_filter(levels) detail_widget.update_level_filter(levels)
spacer = QtWidgets.QWidget()
icon = qtawesome.icon("fa.refresh", color="white") icon = qtawesome.icon("fa.refresh", color="white")
refresh_btn = QtWidgets.QPushButton(icon, "") refresh_btn = QtWidgets.QPushButton(icon, "")
filter_layout.addWidget(user_filter) filter_layout.addWidget(user_filter)
filter_layout.addWidget(level_filter) filter_layout.addWidget(level_filter)
filter_layout.addWidget(spacer, 1) filter_layout.addStretch(1)
filter_layout.addWidget(refresh_btn) filter_layout.addWidget(refresh_btn)
view = QtWidgets.QTreeView(self) view = QtWidgets.QTreeView(self)

View file

@ -1,13 +1,12 @@
import os import os
from Qt import QtCore, QtGui, QtWidgets from Qt import QtCore, QtGui, QtWidgets
from avalon import style from openpype import resources, style
from openpype import resources
class MusterLogin(QtWidgets.QWidget): class MusterLogin(QtWidgets.QWidget):
SIZE_W = 300 SIZE_W = 300
SIZE_H = 130 SIZE_H = 150
loginSignal = QtCore.Signal(object, object, object) loginSignal = QtCore.Signal(object, object, object)
@ -123,7 +122,6 @@ class MusterLogin(QtWidgets.QWidget):
super().keyPressEvent(key_event) super().keyPressEvent(key_event)
def setError(self, msg): def setError(self, msg):
self.error_label.setText(msg) self.error_label.setText(msg)
self.error_label.show() self.error_label.show()
@ -149,6 +147,17 @@ class MusterLogin(QtWidgets.QWidget):
def save_credentials(self, username, password): def save_credentials(self, username, password):
self.module.get_auth_token(username, password) self.module.get_auth_token(username, password)
def showEvent(self, event):
super(MusterLogin, self).showEvent(event)
# Make btns same width
max_width = max(
self.btn_ok.sizeHint().width(),
self.btn_cancel.sizeHint().width()
)
self.btn_ok.setMinimumWidth(max_width)
self.btn_cancel.setMinimumWidth(max_width)
def closeEvent(self, event): def closeEvent(self, event):
event.ignore() event.ignore()
self._close_widget() self._close_widget()

View file

@ -114,6 +114,7 @@ class LocalSettingsAction(PypeModule, ITrayAction):
# Tray attributes # Tray attributes
self.settings_window = None self.settings_window = None
self._first_trigger = True
def connect_with_modules(self, *_a, **_kw): def connect_with_modules(self, *_a, **_kw):
return return
@ -153,6 +154,9 @@ class LocalSettingsAction(PypeModule, ITrayAction):
self.settings_window.raise_() self.settings_window.raise_()
self.settings_window.activateWindow() self.settings_window.activateWindow()
# Reset content if was not visible # Do not reset if it's first trigger of action
if not was_visible: if self._first_trigger:
self._first_trigger = False
elif not was_visible:
# Reset content if was not visible
self.settings_window.reset() self.settings_window.reset()

View file

@ -1,6 +1,5 @@
from avalon import style
from Qt import QtCore, QtGui, QtWidgets from Qt import QtCore, QtGui, QtWidgets
from openpype import resources from openpype import resources, style
class WidgetUserIdle(QtWidgets.QWidget): class WidgetUserIdle(QtWidgets.QWidget):

View file

@ -40,6 +40,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
otio_clip = instance.data["otioClip"] otio_clip = instance.data["otioClip"]
otio_avalable_range = otio_clip.available_range() otio_avalable_range = otio_clip.available_range()
media_fps = otio_avalable_range.start_time.rate media_fps = otio_avalable_range.start_time.rate
available_duration = otio_avalable_range.duration.value
# get available range trimmed with processed retimes # get available range trimmed with processed retimes
retimed_attributes = editorial.get_media_range_with_retimes( retimed_attributes = editorial.get_media_range_with_retimes(
@ -68,6 +69,8 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
a_frame_start_h, (a_frame_end_h - a_frame_start_h + 1), a_frame_start_h, (a_frame_end_h - a_frame_start_h + 1),
media_fps media_fps
) )
trimmed_duration = trimmed_media_range_h.duration.value
self.log.debug("trimmed_media_range_h: {}".format( self.log.debug("trimmed_media_range_h: {}".format(
trimmed_media_range_h)) trimmed_media_range_h))
self.log.debug("a_frame_start_h: {}".format( self.log.debug("a_frame_start_h: {}".format(
@ -150,12 +153,18 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
repre = self._create_representation( repre = self._create_representation(
frame_start, frame_end, collection=collection) frame_start, frame_end, collection=collection)
else: else:
_trim = False
dirname, filename = os.path.split(media_ref.target_url) dirname, filename = os.path.split(media_ref.target_url)
self.staging_dir = dirname self.staging_dir = dirname
if trimmed_duration < available_duration:
self.log.debug("Ready for Trimming")
instance.data["families"].append("trim")
instance.data["otioTrimmingRange"] = trimmed_media_range_h
_trim = True
self.log.debug(filename) self.log.debug(filename)
repre = self._create_representation( repre = self._create_representation(
frame_start, frame_end, file=filename) frame_start, frame_end, file=filename, trim=_trim)
if repre: if repre:
# add representation to instance data # add representation to instance data
@ -196,7 +205,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
"frameStart": start, "frameStart": start,
"frameEnd": end, "frameEnd": end,
}) })
return representation_data
if kwargs.get("file"): if kwargs.get("file"):
file = kwargs.get("file") file = kwargs.get("file")
ext = os.path.splitext(file)[-1] ext = os.path.splitext(file)[-1]
@ -207,4 +216,9 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
"frameStart": start, "frameStart": start,
"frameEnd": end, "frameEnd": end,
}) })
return representation_data
if kwargs.get("trim") is True:
representation_data.update({
"tags": ["trim"]
})
return representation_data

View file

@ -87,11 +87,14 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
instance = self._context.create_instance( instance = self._context.create_instance(
instance_data.get("subset") instance_data.get("subset")
) )
self.log.info("Filling stagignDir...") self.log.info("Filling stagingDir...")
self._fill_staging_dir(instance_data, anatomy) self._fill_staging_dir(instance_data, anatomy)
instance.data.update(instance_data) instance.data.update(instance_data)
# stash render job id for later validation
instance.data["render_job_id"] = data.get("job").get("_id")
representations = [] representations = []
for repre_data in instance_data.get("representations") or []: for repre_data in instance_data.get("representations") or []:
self._fill_staging_dir(repre_data, anatomy) self._fill_staging_dir(repre_data, anatomy)

View file

@ -0,0 +1,125 @@
"""
Requires:
instance -> otioTrimmingRange
instance -> representations
"""
import os
from pyblish import api
import openpype
from copy import deepcopy
class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
"""
Trimming video file longer then required lenght
"""
order = api.ExtractorOrder
label = "Extract OTIO trim longer video"
families = ["trim"]
hosts = ["resolve", "hiero"]
def process(self, instance):
self.staging_dir = self.staging_dir(instance)
otio_trim_range = instance.data["otioTrimmingRange"]
representations = instance.data["representations"]
self.log.debug("otio_trim_range: {}".format(otio_trim_range))
self.log.debug("self.staging_dir: {}".format(self.staging_dir))
# get corresponding representation
for _repre in representations:
if "trim" not in _repre.get("tags", []):
continue
input_file = _repre["files"]
input_file_path = os.path.normpath(os.path.join(
_repre["stagingDir"], input_file
))
self.log.debug("input_file_path: {}".format(input_file_path))
# trim via ffmpeg
new_file = self._ffmpeg_trim_seqment(
input_file_path, otio_trim_range)
# prepare new representation data
repre_data = deepcopy(_repre)
# remove tags as we dont need them
repre_data.pop("tags")
repre_data["stagingDir"] = self.staging_dir
repre_data["files"] = new_file
# romove `trim` tagged representation
representations.remove(_repre)
representations.append(repre_data)
self.log.debug(repre_data)
self.log.debug("representations: {}".format(representations))
def _ffmpeg_trim_seqment(self, input_file_path, otio_range):
"""
Trim seqment of video file.
Using ffmpeg to trim video to desired length.
Args:
input_file_path (str): path string
otio_range (opentime.TimeRange): range to trim to
"""
# get rendering app path
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
# create path to destination
output_path = self._get_ffmpeg_output(input_file_path)
# start command list
command = ['"{}"'.format(ffmpeg_path)]
video_path = input_file_path
frame_start = otio_range.start_time.value
input_fps = otio_range.start_time.rate
frame_duration = (otio_range.duration.value + 1)
sec_start = openpype.lib.frames_to_secons(frame_start, input_fps)
sec_duration = openpype.lib.frames_to_secons(frame_duration, input_fps)
# form command for rendering gap files
command.extend([
"-ss {}".format(sec_start),
"-t {}".format(sec_duration),
"-i \"{}\"".format(video_path),
"-c copy",
output_path
])
# execute
self.log.debug("Executing: {}".format(" ".join(command)))
output = openpype.api.run_subprocess(
" ".join(command), logger=self.log
)
self.log.debug("Output: {}".format(output))
return os.path.basename(output_path)
def _get_ffmpeg_output(self, file_path):
"""
Returning ffmpeg output command arguments.
Arguments"
file_path (str): path string
Returns:
str: output_path is path
"""
basename = os.path.basename(file_path)
name, ext = os.path.splitext(basename)
output_file = "{}_{}{}".format(
name,
"trimmed",
ext
)
# create path to destination
return os.path.join(self.staging_dir, output_file)

View file

@ -975,20 +975,52 @@ class ExtractReview(pyblish.api.InstancePlugin):
# NOTE Skipped using instance's resolution # NOTE Skipped using instance's resolution
full_input_path_single_file = temp_data["full_input_path_single_file"] full_input_path_single_file = temp_data["full_input_path_single_file"]
input_data = ffprobe_streams( try:
full_input_path_single_file, self.log streams = ffprobe_streams(
)[0] full_input_path_single_file, self.log
input_width = int(input_data["width"]) )
input_height = int(input_data["height"]) except Exception:
raise AssertionError((
"FFprobe couldn't read information about input file: \"{}\""
).format(full_input_path_single_file))
# Try to find first stream with defined 'width' and 'height'
# - this is to avoid order of streams where audio can be as first
# - there may be a better way (checking `codec_type`?)
input_width = None
input_height = None
for stream in streams:
if "width" in stream and "height" in stream:
input_width = int(stream["width"])
input_height = int(stream["height"])
break
# Raise exception of any stream didn't define input resolution
if input_width is None:
raise AssertionError((
"FFprobe couldn't read resolution from input file: \"{}\""
).format(full_input_path_single_file))
# NOTE Setting only one of `width` or `heigth` is not allowed # NOTE Setting only one of `width` or `heigth` is not allowed
# - settings value can't have None but has value of 0 # - settings value can't have None but has value of 0
output_width = output_def.get("width") or None output_width = output_def.get("width") or None
output_height = output_def.get("height") or None output_height = output_def.get("height") or None
# Overscal color
overscan_color_value = "black"
overscan_color = output_def.get("overscan_color")
if overscan_color:
bg_red, bg_green, bg_blue, _ = overscan_color
overscan_color_value = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
bg_red, bg_green, bg_blue
)
self.log.debug("Overscan color: `{}`".format(overscan_color_value))
# Convert overscan value video filters # Convert overscan value video filters
overscan_crop = output_def.get("overscan_crop") overscan_crop = output_def.get("overscan_crop")
overscan = OverscanCrop(input_width, input_height, overscan_crop) overscan = OverscanCrop(
input_width, input_height, overscan_crop, overscan_color_value
)
overscan_crop_filters = overscan.video_filters() overscan_crop_filters = overscan.video_filters()
# Add overscan filters to filters if are any and modify input # Add overscan filters to filters if are any and modify input
# resolution by it's values # resolution by it's values
@ -1158,9 +1190,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
"scale={}x{}:flags=lanczos".format( "scale={}x{}:flags=lanczos".format(
width_scale, height_scale width_scale, height_scale
), ),
"pad={}:{}:{}:{}:black".format( "pad={}:{}:{}:{}:{}".format(
output_width, output_height, output_width, output_height,
width_half_pad, height_half_pad width_half_pad, height_half_pad,
overscan_color_value
), ),
"setsar=1" "setsar=1"
]) ])
@ -1707,12 +1740,15 @@ class OverscanCrop:
item_regex = re.compile(r"([\+\-])?([0-9]+)(.+)?") item_regex = re.compile(r"([\+\-])?([0-9]+)(.+)?")
relative_source_regex = re.compile(r"%([\+\-])") relative_source_regex = re.compile(r"%([\+\-])")
def __init__(self, input_width, input_height, string_value): def __init__(
self, input_width, input_height, string_value, overscal_color=None
):
# Make sure that is not None # Make sure that is not None
string_value = string_value or "" string_value = string_value or ""
self.input_width = input_width self.input_width = input_width
self.input_height = input_height self.input_height = input_height
self.overscal_color = overscal_color
width, height = self._convert_string_to_values(string_value) width, height = self._convert_string_to_values(string_value)
self._width_value = width self._width_value = width
@ -1767,16 +1803,22 @@ class OverscanCrop:
elif width >= self.input_width and height >= self.input_height: elif width >= self.input_width and height >= self.input_height:
output.append( output.append(
"pad={}:{}:(iw-ow)/2:(ih-oh)/2".format(width, height) "pad={}:{}:(iw-ow)/2:(ih-oh)/2:{}".format(
width, height, self.overscal_color
)
) )
elif width > self.input_width and height < self.input_height: elif width > self.input_width and height < self.input_height:
output.append("crop=iw:{}".format(height)) output.append("crop=iw:{}".format(height))
output.append("pad={}:ih:(iw-ow)/2:(ih-oh)/2".format(width)) output.append("pad={}:ih:(iw-ow)/2:(ih-oh)/2:{}".format(
width, self.overscal_color
))
elif width < self.input_width and height > self.input_height: elif width < self.input_width and height > self.input_height:
output.append("crop={}:ih".format(width)) output.append("crop={}:ih".format(width))
output.append("pad=iw:{}:(iw-ow)/2:(ih-oh)/2".format(height)) output.append("pad=iw:{}:(iw-ow)/2:(ih-oh)/2:{}".format(
height, self.overscal_color
))
return output return output

View file

@ -26,9 +26,23 @@ class ExtractReviewSlate(openpype.api.Extractor):
slate_path = inst_data.get("slateFrame") slate_path = inst_data.get("slateFrame")
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
slate_stream = openpype.lib.ffprobe_streams(slate_path, self.log)[0] slate_streams = openpype.lib.ffprobe_streams(slate_path, self.log)
slate_width = slate_stream["width"] # Try to find first stream with defined 'width' and 'height'
slate_height = slate_stream["height"] # - this is to avoid order of streams where audio can be as first
# - there may be a better way (checking `codec_type`?)+
slate_width = None
slate_height = None
for slate_stream in slate_streams:
if "width" in slate_stream and "height" in slate_stream:
slate_width = int(slate_stream["width"])
slate_height = int(slate_stream["height"])
break
# Raise exception of any stream didn't define input resolution
if slate_width is None:
raise AssertionError((
"FFprobe couldn't read resolution from input file: \"{}\""
).format(slate_path))
if "reviewToWidth" in inst_data: if "reviewToWidth" in inst_data:
use_legacy_code = True use_legacy_code = True
@ -309,16 +323,29 @@ class ExtractReviewSlate(openpype.api.Extractor):
) )
return codec_args return codec_args
codec_name = streams[0].get("codec_name") # Try to find first stream that is not an audio
no_audio_stream = None
for stream in streams:
if stream.get("codec_type") != "audio":
no_audio_stream = stream
break
if no_audio_stream is None:
self.log.warning((
"Couldn't find stream that is not an audio from file \"{}\""
).format(full_input_path))
return codec_args
codec_name = no_audio_stream.get("codec_name")
if codec_name: if codec_name:
codec_args.append("-codec:v {}".format(codec_name)) codec_args.append("-codec:v {}".format(codec_name))
profile_name = streams[0].get("profile") profile_name = no_audio_stream.get("profile")
if profile_name: if profile_name:
profile_name = profile_name.replace(" ", "_").lower() profile_name = profile_name.replace(" ", "_").lower()
codec_args.append("-profile:v {}".format(profile_name)) codec_args.append("-profile:v {}".format(profile_name))
pix_fmt = streams[0].get("pix_fmt") pix_fmt = no_audio_stream.get("pix_fmt")
if pix_fmt: if pix_fmt:
codec_args.append("-pix_fmt {}".format(pix_fmt)) codec_args.append("-pix_fmt {}".format(pix_fmt))
return codec_args return codec_args

View file

@ -1,21 +0,0 @@
import pyblish.api
class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin):
"""
Set `component_overwrite` to True on all instances `ftrackComponentsList`
"""
order = pyblish.api.IntegratorOrder + 0.49
label = 'Overwrite ftrack created versions'
families = ["clip"]
optional = True
active = False
def process(self, instance):
component_list = instance.data['ftrackComponentsList']
for cl in component_list:
cl['component_overwrite'] = True
self.log.debug('Component {} overwriting'.format(
cl['component_data']['name']))

View file

@ -0,0 +1,112 @@
import pyblish.api
from avalon import io
from pprint import pformat
class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
""" Validating if editorial's asset names are not already created in db.
Checking variations of names with different size of caps or with
or without underscores.
"""
order = pyblish.api.ValidatorOrder
label = "Validate Asset Name"
def process(self, context):
asset_and_parents = self.get_parents(context)
if not io.Session:
io.install()
db_assets = list(io.find(
{"type": "asset"}, {"name": 1, "data.parents": 1}))
self.log.debug("__ db_assets: {}".format(db_assets))
asset_db_docs = {
str(e["name"]): e["data"]["parents"] for e in db_assets}
self.log.debug("__ project_entities: {}".format(
pformat(asset_db_docs)))
assets_missing_name = {}
assets_wrong_parent = {}
for asset in asset_and_parents.keys():
if asset not in asset_db_docs.keys():
# add to some nonexistent list for next layer of check
assets_missing_name.update({asset: asset_and_parents[asset]})
continue
if asset_and_parents[asset] != asset_db_docs[asset]:
# add to some nonexistent list for next layer of check
assets_wrong_parent.update({
asset: {
"required": asset_and_parents[asset],
"already_in_db": asset_db_docs[asset]
}
})
continue
self.log.info("correct asset: {}".format(asset))
if assets_missing_name:
wrong_names = {}
self.log.debug(
">> assets_missing_name: {}".format(assets_missing_name))
for asset in assets_missing_name.keys():
_asset = asset.lower().replace("_", "")
if _asset in [a.lower().replace("_", "")
for a in asset_db_docs.keys()]:
wrong_names.update({
"required_name": asset,
"used_variants_in_db": [
a for a in asset_db_docs.keys()
if a.lower().replace("_", "") == _asset
]
})
if wrong_names:
self.log.debug(
">> wrong_names: {}".format(wrong_names))
raise Exception(
"Some already existing asset name variants `{}`".format(
wrong_names))
if assets_wrong_parent:
self.log.debug(
">> assets_wrong_parent: {}".format(assets_wrong_parent))
raise Exception(
"Wrong parents on assets `{}`".format(assets_wrong_parent))
def _get_all_assets(self, input_dict):
""" Returns asset names in list.
List contains all asset names including parents
"""
for key in input_dict.keys():
# check if child key is available
if input_dict[key].get("childs"):
# loop deeper
self._get_all_assets(
input_dict[key]["childs"])
else:
self.all_testing_assets.append(key)
def get_parents(self, context):
return_dict = {}
for instance in context:
asset = instance.data["asset"]
families = instance.data.get("families", []) + [
instance.data["family"]
]
# filter out non-shot families
if "shot" not in families:
continue
parents = instance.data["parents"]
return_dict.update({
asset: [p["entity_name"] for p in parents]
})
return return_dict

View file

@ -46,16 +46,18 @@ class PypeCommands:
standalonepublish.main() standalonepublish.main()
@staticmethod @staticmethod
def publish(paths): def publish(paths, targets=None):
"""Start headless publishing. """Start headless publishing.
Publish use json from passed paths argument. Publish use json from passed paths argument.
Args: Args:
paths (list): Paths to jsons. paths (list): Paths to jsons.
targets (string): What module should be targeted
(to choose validator for example)
Raises: Raises:
RuntimeError: When there is no pathto process. RuntimeError: When there is no path to process.
""" """
if not any(paths): if not any(paths):
raise RuntimeError("No publish paths specified") raise RuntimeError("No publish paths specified")
@ -82,6 +84,10 @@ class PypeCommands:
pyblish.api.register_target("filesequence") pyblish.api.register_target("filesequence")
pyblish.api.register_host("shell") pyblish.api.register_host("shell")
if targets:
for target in targets:
pyblish.api.register_target(target)
os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths) os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths)
log.info("Running publish ...") log.info("Running publish ...")

View file

@ -78,6 +78,10 @@
{ {
"name": "colorspace", "name": "colorspace",
"value": "linear" "value": "linear"
},
{
"name": "create_directories",
"value": "True"
} }
] ]
}, },
@ -114,6 +118,10 @@
{ {
"name": "colorspace", "name": "colorspace",
"value": "linear" "value": "linear"
},
{
"name": "create_directories",
"value": "True"
} }
] ]
} }

View file

@ -1,5 +1,16 @@
{ {
"publish": { "publish": {
"ValidateExpectedFiles": {
"enabled": true,
"active": true,
"allow_user_override": true,
"families": [
"render"
],
"targets": [
"deadline"
]
},
"MayaSubmitDeadline": { "MayaSubmitDeadline": {
"enabled": true, "enabled": true,
"optional": false, "optional": false,
@ -8,7 +19,9 @@
"use_published": true, "use_published": true,
"asset_dependencies": true, "asset_dependencies": true,
"group": "none", "group": "none",
"limit": [] "limit": [],
"jobInfo": {},
"pluginInfo": {}
}, },
"NukeSubmitDeadline": { "NukeSubmitDeadline": {
"enabled": true, "enabled": true,
@ -22,6 +35,8 @@
"group": "", "group": "",
"department": "", "department": "",
"use_gpu": true, "use_gpu": true,
"env_allowed_keys": [],
"env_search_replace_values": {},
"limit_groups": {} "limit_groups": {}
}, },
"HarmonySubmitDeadline": { "HarmonySubmitDeadline": {

View file

@ -200,6 +200,106 @@
} }
}, },
"publish": { "publish": {
"CollectFtrackFamily": {
"enabled": true,
"profiles": [
{
"hosts": [
"standalonepublisher"
],
"families": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
},
{
"hosts": [
"standalonepublisher"
],
"families": [
"matchmove",
"shot"
],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": []
},
{
"hosts": [
"standalonepublisher"
],
"families": [
"plate"
],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": [
{
"families": [
"clip",
"review"
],
"add_ftrack_family": true
}
]
},
{
"hosts": [
"maya"
],
"families": [
"model",
"setdress",
"animation",
"look",
"rig",
"camera"
],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
},
{
"hosts": [
"tvpaint"
],
"families": [
"renderPass"
],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": []
},
{
"hosts": [
"tvpaint"
],
"families": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
},
{
"hosts": [
"nuke"
],
"families": [
"write",
"render"
],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": [
{
"families": [
"review"
],
"add_ftrack_family": true
}
]
}
]
},
"IntegrateFtrackNote": { "IntegrateFtrackNote": {
"enabled": true, "enabled": true,
"note_with_intent_template": "{intent}: {comment}", "note_with_intent_template": "{intent}: {comment}",

View file

@ -56,6 +56,12 @@
] ]
}, },
"overscan_crop": "", "overscan_crop": "",
"overscan_color": [
0,
0,
0,
255
],
"width": 0, "width": 0,
"height": 0, "height": 0,
"bg_color": [ "bg_color": [
@ -166,6 +172,8 @@
"deadline_group": "", "deadline_group": "",
"deadline_chunk_size": 1, "deadline_chunk_size": 1,
"deadline_priority": 50, "deadline_priority": 50,
"publishing_script": "",
"skip_integration_repre_list": [],
"aov_filter": { "aov_filter": {
"maya": [ "maya": [
".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*" ".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*"
@ -178,6 +186,10 @@
".*" ".*"
] ]
} }
},
"CleanUp": {
"paterns": [],
"remove_temp_renders": false
} }
}, },
"tools": { "tools": {
@ -226,6 +238,17 @@
], ],
"tasks": [], "tasks": [],
"template": "{family}{Task}_{Render_layer}_{Render_pass}" "template": "{family}{Task}_{Render_layer}_{Render_pass}"
},
{
"families": [
"review",
"workfile"
],
"hosts": [
"tvpaint"
],
"tasks": [],
"template": "{family}{Task}"
} }
] ]
}, },
@ -254,28 +277,7 @@
} }
} }
}, },
"project_folder_structure": { "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets[ftrack.Library]\": {\"characters[ftrack]\": {}, \"locations[ftrack]\": {}}, \"shots[ftrack.Sequence]\": {\"scripts\": {}, \"editorial[ftrack.Folder]\": {}}}}",
"__project_root__": {
"prod": {},
"resources": {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial": {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
},
"sync_server": { "sync_server": {
"enabled": true, "enabled": true,
"config": { "config": {

View file

@ -5,6 +5,11 @@
".*" ".*"
] ]
}, },
"ValidateContainers": {
"enabled": true,
"optional": true,
"active": true
},
"ValidateSceneSettings": { "ValidateSceneSettings": {
"enabled": true, "enabled": true,
"optional": true, "optional": true,

View file

@ -0,0 +1,9 @@
{
"publish": {
"ValidateContainers": {
"enabled": true,
"optional": true,
"active": true
}
}
}

View file

@ -127,6 +127,11 @@
"CollectMayaRender": { "CollectMayaRender": {
"sync_workfile_version": false "sync_workfile_version": false
}, },
"ValidateContainers": {
"enabled": true,
"optional": true,
"active": true
},
"ValidateShaderName": { "ValidateShaderName": {
"enabled": false, "enabled": false,
"regex": "(?P<asset>.*)_(.*)_SHD" "regex": "(?P<asset>.*)_(.*)_SHD"

View file

@ -10,16 +10,33 @@
}, },
"create": { "create": {
"CreateWriteRender": { "CreateWriteRender": {
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}" "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}",
"defaults": [
"Main",
"Mask"
]
}, },
"CreateWritePrerender": { "CreateWritePrerender": {
"fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}" "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}",
"use_range_limit": true,
"defaults": [
"Key01",
"Bg01",
"Fg01",
"Branch01",
"Part01"
]
} }
}, },
"publish": { "publish": {
"PreCollectNukeInstances": { "PreCollectNukeInstances": {
"sync_workfile_version": true "sync_workfile_version": true
}, },
"ValidateContainers": {
"enabled": true,
"optional": true,
"active": true
},
"ValidateKnobs": { "ValidateKnobs": {
"enabled": false, "enabled": false,
"knobs": { "knobs": {

View file

@ -7,6 +7,11 @@
} }
}, },
"publish": { "publish": {
"ValidateContainers": {
"enabled": true,
"optional": true,
"active": true
},
"ExtractImage": { "ExtractImage": {
"formats": [ "formats": [
"png", "png",

View file

@ -105,16 +105,23 @@
"label": "Render", "label": "Render",
"family": "render", "family": "render",
"icon": "image", "icon": "image",
"defaults": ["Animation", "Lighting", "Lookdev", "Compositing"], "defaults": [
"Animation",
"Lighting",
"Lookdev",
"Compositing"
],
"help": "Rendered images or video files" "help": "Rendered images or video files"
}, },
"create_mov_batch": { "create_mov_batch": {
"name": "mov_batch", "name": "mov_batch",
"label": "Batch Mov", "label": "Batch Mov",
"family": "render_mov_batch", "family": "render_mov_batch",
"icon": "image", "icon": "image",
"defaults": ["Main"], "defaults": [
"help": "Process multiple Mov files and publish them for layout and comp." "Main"
],
"help": "Process multiple Mov files and publish them for layout and comp."
}, },
"__dynamic_keys_labels__": { "__dynamic_keys_labels__": {
"create_workfile": "Workfile", "create_workfile": "Workfile",
@ -154,10 +161,62 @@
"ExtractThumbnailSP": { "ExtractThumbnailSP": {
"ffmpeg_args": { "ffmpeg_args": {
"input": [ "input": [
"-gamma 2.2" "-apply_trc gamma22"
], ],
"output": [] "output": []
} }
},
"CollectEditorial": {
"source_dir": "",
"extensions": [
"mov",
"mp4"
]
},
"CollectHierarchyInstance": {
"shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}",
"shot_rename_search_patterns": {
"_sequence_": "(\\d{4})(?=_\\d{4})",
"_shot_": "(\\d{4})(?!_\\d{4})"
},
"shot_add_hierarchy": {
"parents_path": "{project}/{folder}/{sequence}",
"parents": {
"project": "{project[name]}",
"sequence": "{_sequence_}",
"folder": "shots"
}
},
"shot_add_tasks": {}
},
"shot_add_tasks": {
"custom_start_frame": 0,
"timeline_frame_start": 900000,
"timeline_frame_offset": 0,
"subsets": {
"referenceMain": {
"family": "review",
"families": [
"clip"
],
"extensions": [
"mp4"
],
"version": 0,
"keepSequence": false
},
"audioMain": {
"family": "audio",
"families": [
"clip"
],
"extensions": [
"wav"
],
"version": 0,
"keepSequence": false
}
}
} }
} }
} }

View file

@ -1,5 +1,13 @@
{ {
"publish": { "publish": {
"ExtractSequence": {
"review_bg": [
255,
255,
255,
255
]
},
"ValidateProjectSettings": { "ValidateProjectSettings": {
"enabled": true, "enabled": true,
"optional": true, "optional": true,

View file

@ -807,7 +807,6 @@
"environment": {}, "environment": {},
"variants": { "variants": {
"2-83": { "2-83": {
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe" "C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe"
@ -829,7 +828,6 @@
"environment": {} "environment": {}
}, },
"2-90": { "2-90": {
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe" "C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe"
@ -851,7 +849,6 @@
"environment": {} "environment": {}
}, },
"2-91": { "2-91": {
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe" "C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe"
@ -891,7 +888,6 @@
"20": { "20": {
"enabled": true, "enabled": true,
"variant_label": "20", "variant_label": "20",
"use_python_2": false,
"executables": { "executables": {
"windows": [], "windows": [],
"darwin": [], "darwin": [],
@ -907,7 +903,6 @@
"17": { "17": {
"enabled": true, "enabled": true,
"variant_label": "17", "variant_label": "17",
"use_python_2": false,
"executables": { "executables": {
"windows": [], "windows": [],
"darwin": [ "darwin": [
@ -932,7 +927,6 @@
"environment": {}, "environment": {},
"variants": { "variants": {
"animation_11-64bits": { "animation_11-64bits": {
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe" "C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe"
@ -948,7 +942,6 @@
"environment": {} "environment": {}
}, },
"animation_11-32bits": { "animation_11-32bits": {
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe" "C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe"
@ -982,7 +975,6 @@
"2020": { "2020": {
"enabled": true, "enabled": true,
"variant_label": "2020", "variant_label": "2020",
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe" "C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe"
@ -1000,7 +992,6 @@
"2021": { "2021": {
"enabled": true, "enabled": true,
"variant_label": "2021", "variant_label": "2021",
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe" "C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe"
@ -1030,7 +1021,6 @@
"2020": { "2020": {
"enabled": true, "enabled": true,
"variant_label": "2020", "variant_label": "2020",
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"" ""
@ -1048,7 +1038,6 @@
"2021": { "2021": {
"enabled": true, "enabled": true,
"variant_label": "2021", "variant_label": "2021",
"use_python_2": false,
"executables": { "executables": {
"windows": [ "windows": [
"C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe"
@ -1095,22 +1084,12 @@
"unreal": { "unreal": {
"enabled": true, "enabled": true,
"label": "Unreal Editor", "label": "Unreal Editor",
"icon": "{}/app_icons/ue4.png'", "icon": "{}/app_icons/ue4.png",
"host_name": "unreal", "host_name": "unreal",
"environment": {}, "environment": {},
"variants": { "variants": {
"4-26": { "4-26": {
"use_python_2": false, "use_python_2": false,
"executables": {
"windows": [],
"darwin": [],
"linux": []
},
"arguments": {
"windows": [],
"darwin": [],
"linux": []
},
"environment": {} "environment": {}
} }
} }

View file

@ -101,6 +101,7 @@ from .color_entity import ColorEntity
from .enum_entity import ( from .enum_entity import (
BaseEnumEntity, BaseEnumEntity,
EnumEntity, EnumEntity,
HostsEnumEntity,
AppsEnumEntity, AppsEnumEntity,
ToolsEnumEntity, ToolsEnumEntity,
TaskTypeEnumEntity, TaskTypeEnumEntity,
@ -110,6 +111,7 @@ from .enum_entity import (
from .list_entity import ListEntity from .list_entity import ListEntity
from .dict_immutable_keys_entity import DictImmutableKeysEntity from .dict_immutable_keys_entity import DictImmutableKeysEntity
from .dict_mutable_keys_entity import DictMutableKeysEntity from .dict_mutable_keys_entity import DictMutableKeysEntity
from .dict_conditional import DictConditionalEntity
from .anatomy_entities import AnatomyEntity from .anatomy_entities import AnatomyEntity
@ -153,6 +155,7 @@ __all__ = (
"BaseEnumEntity", "BaseEnumEntity",
"EnumEntity", "EnumEntity",
"HostsEnumEntity",
"AppsEnumEntity", "AppsEnumEntity",
"ToolsEnumEntity", "ToolsEnumEntity",
"TaskTypeEnumEntity", "TaskTypeEnumEntity",
@ -164,5 +167,7 @@ __all__ = (
"DictMutableKeysEntity", "DictMutableKeysEntity",
"DictConditionalEntity",
"AnatomyEntity" "AnatomyEntity"
) )

View file

@ -136,6 +136,7 @@ class BaseItemEntity(BaseEntity):
# Override state defines which values are used, saved and how. # Override state defines which values are used, saved and how.
# TODO convert to private attribute # TODO convert to private attribute
self._override_state = OverrideState.NOT_DEFINED self._override_state = OverrideState.NOT_DEFINED
self._ignore_missing_defaults = None
# These attributes may change values during existence of an object # These attributes may change values during existence of an object
# Default value, studio override values and project override values # Default value, studio override values and project override values
@ -279,8 +280,13 @@ class BaseItemEntity(BaseEntity):
self, "Dynamic entity can't require restart." self, "Dynamic entity can't require restart."
) )
@abstractproperty
def root_key(self):
"""Root is represented as this dictionary key."""
pass
@abstractmethod @abstractmethod
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
"""Set override state and trigger it on children. """Set override state and trigger it on children.
Method discard all changes in hierarchy and use values, metadata Method discard all changes in hierarchy and use values, metadata
@ -290,8 +296,15 @@ class BaseItemEntity(BaseEntity):
Should start on root entity and when triggered then must be called on Should start on root entity and when triggered then must be called on
all entities in hierarchy. all entities in hierarchy.
Argument `ignore_missing_defaults` should be used when entity has
children that are not saved or used all the time but override statu
must be changed and children must have any default value.
Args: Args:
state (OverrideState): State to which should be data changed. state (OverrideState): State to which should be data changed.
ignore_missing_defaults (bool): Ignore missing default values.
Entity won't raise `DefaultsNotDefined` and
`StudioDefaultsNotDefined`.
""" """
pass pass
@ -866,6 +879,10 @@ class ItemEntity(BaseItemEntity):
"""Call save on root item.""" """Call save on root item."""
self.root_item.save() self.root_item.save()
@property
def root_key(self):
return self.root_item.root_key
def schema_validations(self): def schema_validations(self):
if not self.label and self.use_label_wrap: if not self.label and self.use_label_wrap:
reason = ( reason = (
@ -885,7 +902,11 @@ class ItemEntity(BaseItemEntity):
def create_schema_object(self, *args, **kwargs): def create_schema_object(self, *args, **kwargs):
"""Reference method for creation of entities defined in RootEntity.""" """Reference method for creation of entities defined in RootEntity."""
return self.root_item.create_schema_object(*args, **kwargs) return self.schema_hub.create_schema_object(*args, **kwargs)
@property
def schema_hub(self):
return self.root_item.schema_hub
def get_entity_from_path(self, path): def get_entity_from_path(self, path):
return self.root_item.get_entity_from_path(path) return self.root_item.get_entity_from_path(path)

View file

@ -12,6 +12,17 @@ class ColorEntity(InputEntity):
def _item_initalization(self): def _item_initalization(self):
self.valid_value_types = (list, ) self.valid_value_types = (list, )
self.value_on_not_set = [0, 0, 0, 255] self.value_on_not_set = [0, 0, 0, 255]
self.use_alpha = self.schema_data.get("use_alpha", True)
def set_override_state(self, *args, **kwargs):
super(ColorEntity, self).set_override_state(*args, **kwargs)
value = self._current_value
if (
not self.use_alpha
and isinstance(value, list)
and len(value) == 4
):
value[3] = 255
def convert_to_valid_type(self, value): def convert_to_valid_type(self, value):
"""Conversion to valid type. """Conversion to valid type.
@ -51,4 +62,8 @@ class ColorEntity(InputEntity):
).format(value) ).format(value)
raise BaseInvalidValueType(reason, self.path) raise BaseInvalidValueType(reason, self.path)
new_value.append(item) new_value.append(item)
# Make sure
if not self.use_alpha:
new_value[3] = 255
return new_value return new_value

View file

@ -0,0 +1,707 @@
import copy
from .lib import (
OverrideState,
NOT_SET
)
from openpype.settings.constants import (
METADATA_KEYS,
M_OVERRIDEN_KEY,
KEY_REGEX
)
from . import (
BaseItemEntity,
ItemEntity,
GUIEntity
)
from .exceptions import (
SchemaDuplicatedKeys,
EntitySchemaError,
InvalidKeySymbols
)
class DictConditionalEntity(ItemEntity):
"""Entity represents dictionay with only one persistent key definition.
The persistent key is enumerator which define rest of children under
dictionary. There is not possibility of shared children.
Entity's keys can't be removed or added. But they may change based on
the persistent key. If you're change value manually (key by key) make sure
you'll change value of the persistent key as first. It is recommended to
use `set` method which handle this for you.
It is possible to use entity similar way as `dict` object. Returned values
are not real settings values but entities representing the value.
"""
schema_types = ["dict-conditional"]
_default_label_wrap = {
"use_label_wrap": False,
"collapsible": False,
"collapsed": True
}
def __getitem__(self, key):
"""Return entity inder key."""
if key == self.enum_key:
return self.enum_entity
return self.non_gui_children[self.current_enum][key]
def __setitem__(self, key, value):
"""Set value of item under key."""
if key == self.enum_key:
child_obj = self.enum_entity
else:
child_obj = self.non_gui_children[self.current_enum][key]
child_obj.set(value)
def __iter__(self):
"""Iter through keys."""
for key in self.keys():
yield key
def __contains__(self, key):
"""Check if key is available."""
if key == self.enum_key:
return True
return key in self.non_gui_children[self.current_enum]
def get(self, key, default=None):
"""Safe entity getter by key."""
if key == self.enum_key:
return self.enum_entity
return self.non_gui_children[self.current_enum].get(key, default)
def keys(self):
"""Entity's keys."""
keys = list(self.non_gui_children[self.current_enum].keys())
keys.insert(0, [self.enum_key])
return keys
def values(self):
"""Children entities."""
values = [
self.enum_entity
]
for child_entiy in self.non_gui_children[self.current_enum].values():
values.append(child_entiy)
return values
def items(self):
"""Children entities paired with their key (key, value)."""
items = [
(self.enum_key, self.enum_entity)
]
for key, value in self.non_gui_children[self.current_enum].items():
items.append((key, value))
return items
def set(self, value):
"""Set value."""
new_value = self.convert_to_valid_type(value)
# First change value of enum key if available
if self.enum_key in new_value:
self.enum_entity.set(new_value.pop(self.enum_key))
for _key, _value in new_value.items():
self.non_gui_children[self.current_enum][_key].set(_value)
def _item_initalization(self):
self._default_metadata = NOT_SET
self._studio_override_metadata = NOT_SET
self._project_override_metadata = NOT_SET
self._ignore_child_changes = False
# `current_metadata` are still when schema is loaded
# - only metadata stored with dict item are gorup overrides in
# M_OVERRIDEN_KEY
self._current_metadata = {}
self._metadata_are_modified = False
# Entity must be group or in group
if (
self.group_item is None
and not self.is_dynamic_item
and not self.is_in_dynamic_item
):
self.is_group = True
# Children are stored by key as keys are immutable and are defined by
# schema
self.valid_value_types = (dict, )
self.children = {}
self.non_gui_children = {}
self.gui_layout = {}
if self.is_dynamic_item:
self.require_key = False
self.enum_key = self.schema_data.get("enum_key")
self.enum_label = self.schema_data.get("enum_label")
self.enum_children = self.schema_data.get("enum_children")
self.enum_entity = None
self.highlight_content = self.schema_data.get(
"highlight_content", False
)
self.show_borders = self.schema_data.get("show_borders", True)
self._add_children()
@property
def current_enum(self):
"""Current value of enum entity.
This value define what children are used.
"""
if self.enum_entity is None:
return None
return self.enum_entity.value
def schema_validations(self):
"""Validation of schema data."""
# Enum key must be defined
if self.enum_key is None:
raise EntitySchemaError(self, "Key 'enum_key' is not set.")
# Validate type of enum children
if not isinstance(self.enum_children, list):
raise EntitySchemaError(
self, "Key 'enum_children' must be a list. Got: {}".format(
str(type(self.enum_children))
)
)
# Without defined enum children entity has nothing to do
if not self.enum_children:
raise EntitySchemaError(self, (
"Key 'enum_children' have empty value. Entity can't work"
" without children definitions."
))
children_def_keys = []
for children_def in self.enum_children:
if not isinstance(children_def, dict):
raise EntitySchemaError((
"Children definition under key 'enum_children' must"
" be a dictionary."
))
if "key" not in children_def:
raise EntitySchemaError((
"Children definition under key 'enum_children' miss"
" 'key' definition."
))
# We don't validate regex of these keys because they will be stored
# as value at the end.
key = children_def["key"]
if key in children_def_keys:
# TODO this hould probably be different exception?
raise SchemaDuplicatedKeys(self, key)
children_def_keys.append(key)
# Validate key duplications per each enum item
for children in self.children.values():
children_keys = set()
children_keys.add(self.enum_key)
for child_entity in children:
if not isinstance(child_entity, BaseItemEntity):
continue
elif child_entity.key not in children_keys:
children_keys.add(child_entity.key)
else:
raise SchemaDuplicatedKeys(self, child_entity.key)
# Enum key must match key regex
if not KEY_REGEX.match(self.enum_key):
raise InvalidKeySymbols(self.path, self.enum_key)
# Validate all remaining keys with key regex
for children_by_key in self.non_gui_children.values():
for key in children_by_key.keys():
if not KEY_REGEX.match(key):
raise InvalidKeySymbols(self.path, key)
super(DictConditionalEntity, self).schema_validations()
# Trigger schema validation on children entities
for children in self.children.values():
for child_obj in children:
child_obj.schema_validations()
def on_change(self):
"""Update metadata on change and pass change to parent."""
self._update_current_metadata()
for callback in self.on_change_callbacks:
callback()
self.parent.on_child_change(self)
def on_child_change(self, child_obj):
"""Trigger on change callback if child changes are not ignored."""
if self._ignore_child_changes:
return
if (
child_obj is self.enum_entity
or child_obj in self.children[self.current_enum]
):
self.on_change()
def _add_children(self):
"""Add children from schema data and repare enum items.
Each enum item must have defined it's children. None are shared across
all enum items.
Nice to have: Have ability to have shared keys across all enum items.
All children are stored by their enum item.
"""
# Skip if are not defined
# - schema validations should raise and exception
if not self.enum_children or not self.enum_key:
return
valid_enum_items = []
for item in self.enum_children:
if isinstance(item, dict) and "key" in item:
valid_enum_items.append(item)
enum_items = []
for item in valid_enum_items:
item_key = item["key"]
item_label = item.get("label") or item_key
enum_items.append({item_key: item_label})
if not enum_items:
return
# Create Enum child first
enum_key = self.enum_key or "invalid"
enum_schema = {
"type": "enum",
"multiselection": False,
"enum_items": enum_items,
"key": enum_key,
"label": self.enum_label or enum_key
}
enum_entity = self.create_schema_object(enum_schema, self)
self.enum_entity = enum_entity
# Create children per each enum item
for item in valid_enum_items:
item_key = item["key"]
# Make sure all keys have set value in these variables
# - key 'children' is optional
self.non_gui_children[item_key] = {}
self.children[item_key] = []
self.gui_layout[item_key] = []
children = item.get("children") or []
for children_schema in children:
child_obj = self.create_schema_object(children_schema, self)
self.children[item_key].append(child_obj)
self.gui_layout[item_key].append(child_obj)
if isinstance(child_obj, GUIEntity):
continue
self.non_gui_children[item_key][child_obj.key] = child_obj
def get_child_path(self, child_obj):
"""Get hierarchical path of child entity.
Child must be entity's direct children. This must be possible to get
for any children even if not from current enum value.
"""
if child_obj is self.enum_entity:
return "/".join([self.path, self.enum_key])
result_key = None
for children in self.non_gui_children.values():
for key, _child_obj in children.items():
if _child_obj is child_obj:
result_key = key
break
if result_key is None:
raise ValueError("Didn't found child {}".format(child_obj))
return "/".join([self.path, result_key])
def _update_current_metadata(self):
current_metadata = {}
for key, child_obj in self.non_gui_children[self.current_enum].items():
if self._override_state is OverrideState.DEFAULTS:
break
if not child_obj.is_group:
continue
if (
self._override_state is OverrideState.STUDIO
and not child_obj.has_studio_override
):
continue
if (
self._override_state is OverrideState.PROJECT
and not child_obj.has_project_override
):
continue
if M_OVERRIDEN_KEY not in current_metadata:
current_metadata[M_OVERRIDEN_KEY] = []
current_metadata[M_OVERRIDEN_KEY].append(key)
# Define if current metadata are avaialble for current override state
metadata = NOT_SET
if self._override_state is OverrideState.STUDIO:
metadata = self._studio_override_metadata
elif self._override_state is OverrideState.PROJECT:
metadata = self._project_override_metadata
if metadata is NOT_SET:
metadata = {}
self._metadata_are_modified = current_metadata != metadata
self._current_metadata = current_metadata
def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same
if self.root_item.override_state is not state:
self.root_item.set_override_state(state)
return
# Change has/had override states
self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
# Set override state on enum entity first
self.enum_entity.set_override_state(state, ignore_missing_defaults)
# Set override state on other enum children
# - these must not raise exception about missing defaults
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.set_override_state(state, True)
self._update_current_metadata()
@property
def value(self):
output = {
self.enum_key: self.enum_entity.value
}
for key, child_obj in self.non_gui_children[self.current_enum].items():
output[key] = child_obj.value
return output
@property
def has_unsaved_changes(self):
if self._metadata_are_modified:
return True
return self._child_has_unsaved_changes
@property
def _child_has_unsaved_changes(self):
if self.enum_entity.has_unsaved_changes:
return True
for child_obj in self.non_gui_children[self.current_enum].values():
if child_obj.has_unsaved_changes:
return True
return False
@property
def has_studio_override(self):
return self._child_has_studio_override
@property
def _child_has_studio_override(self):
if self._override_state >= OverrideState.STUDIO:
if self.enum_entity.has_studio_override:
return True
for child_obj in self.non_gui_children[self.current_enum].values():
if child_obj.has_studio_override:
return True
return False
@property
def has_project_override(self):
return self._child_has_project_override
@property
def _child_has_project_override(self):
if self._override_state >= OverrideState.PROJECT:
if self.enum_entity.has_project_override:
return True
for child_obj in self.non_gui_children[self.current_enum].values():
if child_obj.has_project_override:
return True
return False
def settings_value(self):
if self._override_state is OverrideState.NOT_DEFINED:
return NOT_SET
if self._override_state is OverrideState.DEFAULTS:
children_items = [
(self.enum_key, self.enum_entity)
]
for item in self.non_gui_children[self.current_enum].items():
children_items.append(item)
output = {}
for key, child_obj in children_items:
child_value = child_obj.settings_value()
if not child_obj.is_file and not child_obj.file_item:
for _key, _value in child_value.items():
new_key = "/".join([key, _key])
output[new_key] = _value
else:
output[key] = child_value
return output
if self.is_group:
if self._override_state is OverrideState.STUDIO:
if not self.has_studio_override:
return NOT_SET
elif self._override_state is OverrideState.PROJECT:
if not self.has_project_override:
return NOT_SET
output = {}
children_items = [
(self.enum_key, self.enum_entity)
]
for item in self.non_gui_children[self.current_enum].items():
children_items.append(item)
for key, child_obj in children_items:
value = child_obj.settings_value()
if value is not NOT_SET:
output[key] = value
if not output:
return NOT_SET
output.update(self._current_metadata)
return output
def _prepare_value(self, value):
if value is NOT_SET or self.enum_key not in value:
return NOT_SET, NOT_SET
enum_value = value.get(self.enum_key)
if enum_value not in self.non_gui_children:
return NOT_SET, NOT_SET
# Create copy of value before poping values
value = copy.deepcopy(value)
metadata = {}
for key in METADATA_KEYS:
if key in value:
metadata[key] = value.pop(key)
enum_value = value.get(self.enum_key)
old_metadata = metadata.get(M_OVERRIDEN_KEY)
if old_metadata:
old_metadata_set = set(old_metadata)
new_metadata = []
non_gui_children = self.non_gui_children[enum_value]
for key in non_gui_children.keys():
if key in old_metadata:
new_metadata.append(key)
old_metadata_set.remove(key)
for key in old_metadata_set:
new_metadata.append(key)
metadata[M_OVERRIDEN_KEY] = new_metadata
return value, metadata
def update_default_value(self, value):
"""Update default values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "default")
self.has_default_value = value is not NOT_SET
# TODO add value validation
value, metadata = self._prepare_value(value)
self._default_metadata = metadata
if value is NOT_SET:
self.enum_entity.update_default_value(value)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.update_default_value(value)
return
value_keys = set(value.keys())
enum_value = value[self.enum_key]
expected_keys = set(self.non_gui_children[enum_value].keys())
expected_keys.add(self.enum_key)
unknown_keys = value_keys - expected_keys
if unknown_keys:
self.log.warning(
"{} Unknown keys in default values: {}".format(
self.path,
", ".join("\"{}\"".format(key) for key in unknown_keys)
)
)
self.enum_entity.update_default_value(enum_value)
for children_by_key in self.non_gui_children.values():
for key, child_obj in children_by_key.items():
child_value = value.get(key, NOT_SET)
child_obj.update_default_value(child_value)
def update_studio_value(self, value):
"""Update studio override values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "studio override")
value, metadata = self._prepare_value(value)
self._studio_override_metadata = metadata
self.had_studio_override = metadata is not NOT_SET
if value is NOT_SET:
self.enum_entity.update_studio_value(value)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.update_studio_value(value)
return
value_keys = set(value.keys())
enum_value = value[self.enum_key]
expected_keys = set(self.non_gui_children[enum_value])
expected_keys.add(self.enum_key)
unknown_keys = value_keys - expected_keys
if unknown_keys:
self.log.warning(
"{} Unknown keys in studio overrides: {}".format(
self.path,
", ".join("\"{}\"".format(key) for key in unknown_keys)
)
)
self.enum_entity.update_studio_value(enum_value)
for children_by_key in self.non_gui_children.values():
for key, child_obj in children_by_key.items():
child_value = value.get(key, NOT_SET)
child_obj.update_studio_value(child_value)
def update_project_value(self, value):
"""Update project override values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "project override")
value, metadata = self._prepare_value(value)
self._project_override_metadata = metadata
self.had_project_override = metadata is not NOT_SET
if value is NOT_SET:
self.enum_entity.update_project_value(value)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.update_project_value(value)
return
value_keys = set(value.keys())
enum_value = value[self.enum_key]
expected_keys = set(self.non_gui_children[enum_value])
expected_keys.add(self.enum_key)
unknown_keys = value_keys - expected_keys
if unknown_keys:
self.log.warning(
"{} Unknown keys in project overrides: {}".format(
self.path,
", ".join("\"{}\"".format(key) for key in unknown_keys)
)
)
self.enum_entity.update_project_value(enum_value)
for children_by_key in self.non_gui_children.values():
for key, child_obj in children_by_key.items():
child_value = value.get(key, NOT_SET)
child_obj.update_project_value(child_value)
def _discard_changes(self, on_change_trigger):
self._ignore_child_changes = True
self.enum_entity.discard_changes(on_change_trigger)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.discard_changes(on_change_trigger)
self._ignore_child_changes = False
def _add_to_studio_default(self, on_change_trigger):
self._ignore_child_changes = True
self.enum_entity.add_to_studio_default(on_change_trigger)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.add_to_studio_default(on_change_trigger)
self._ignore_child_changes = False
self._update_current_metadata()
self.parent.on_child_change(self)
def _remove_from_studio_default(self, on_change_trigger):
self._ignore_child_changes = True
self.enum_entity.remove_from_studio_default(on_change_trigger)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.remove_from_studio_default(on_change_trigger)
self._ignore_child_changes = False
def _add_to_project_override(self, on_change_trigger):
self._ignore_child_changes = True
self.enum_entity.add_to_project_override(on_change_trigger)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.add_to_project_override(on_change_trigger)
self._ignore_child_changes = False
self._update_current_metadata()
self.parent.on_child_change(self)
def _remove_from_project_override(self, on_change_trigger):
if self._override_state is not OverrideState.PROJECT:
return
self._ignore_child_changes = True
self.enum_entity.remove_from_project_override(on_change_trigger)
for children_by_key in self.non_gui_children.values():
for child_obj in children_by_key.values():
child_obj.remove_from_project_override(on_change_trigger)
self._ignore_child_changes = False
def reset_callbacks(self):
"""Reset registered callbacks on entity and children."""
super(DictConditionalEntity, self).reset_callbacks()
for children in self.children.values():
for child_entity in children:
child_entity.reset_callbacks()

View file

@ -1,4 +1,5 @@
import copy import copy
import collections
from .lib import ( from .lib import (
WRAPPER_TYPES, WRAPPER_TYPES,
@ -138,7 +139,16 @@ class DictImmutableKeysEntity(ItemEntity):
method when handling gui wrappers. method when handling gui wrappers.
""" """
added_children = [] added_children = []
for children_schema in schema_data["children"]: children_deque = collections.deque()
for _children_schema in schema_data["children"]:
children_schemas = self.schema_hub.resolve_schema_data(
_children_schema
)
for children_schema in children_schemas:
children_deque.append(children_schema)
while children_deque:
children_schema = children_deque.popleft()
if children_schema["type"] in WRAPPER_TYPES: if children_schema["type"] in WRAPPER_TYPES:
_children_schema = copy.deepcopy(children_schema) _children_schema = copy.deepcopy(children_schema)
wrapper_children = self._add_children( wrapper_children = self._add_children(
@ -248,7 +258,7 @@ class DictImmutableKeysEntity(ItemEntity):
self._metadata_are_modified = current_metadata != metadata self._metadata_are_modified = current_metadata != metadata
self._current_metadata = current_metadata self._current_metadata = current_metadata
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
@ -256,9 +266,10 @@ class DictImmutableKeysEntity(ItemEntity):
# Change has/had override states # Change has/had override states
self._override_state = state self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
for child_obj in self.non_gui_children.values(): for child_obj in self.non_gui_children.values():
child_obj.set_override_state(state) child_obj.set_override_state(state, ignore_missing_defaults)
self._update_current_metadata() self._update_current_metadata()

View file

@ -154,7 +154,9 @@ class DictMutableKeysEntity(EndpointEntity):
def add_key(self, key): def add_key(self, key):
new_child = self._add_key(key) new_child = self._add_key(key)
new_child.set_override_state(self._override_state) new_child.set_override_state(
self._override_state, self._ignore_missing_defaults
)
self.on_change() self.on_change()
return new_child return new_child
@ -320,7 +322,7 @@ class DictMutableKeysEntity(EndpointEntity):
def _metadata_for_current_state(self): def _metadata_for_current_state(self):
return self._get_metadata_for_state(self._override_state) return self._get_metadata_for_state(self._override_state)
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
@ -328,14 +330,22 @@ class DictMutableKeysEntity(EndpointEntity):
# TODO change metadata # TODO change metadata
self._override_state = state self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
# Ignore if is dynamic item and use default in that case # Ignore if is dynamic item and use default in that case
if not self.is_dynamic_item and not self.is_in_dynamic_item: if not self.is_dynamic_item and not self.is_in_dynamic_item:
if state > OverrideState.DEFAULTS: if state > OverrideState.DEFAULTS:
if not self.has_default_value: if (
not self.has_default_value
and not ignore_missing_defaults
):
raise DefaultsNotDefined(self) raise DefaultsNotDefined(self)
elif state > OverrideState.STUDIO: elif state > OverrideState.STUDIO:
if not self.had_studio_override: if (
not self.had_studio_override
and not ignore_missing_defaults
):
raise StudioDefaultsNotDefined(self) raise StudioDefaultsNotDefined(self)
if state is OverrideState.STUDIO: if state is OverrideState.STUDIO:
@ -426,7 +436,7 @@ class DictMutableKeysEntity(EndpointEntity):
if label: if label:
children_label_by_id[child_entity.id] = label children_label_by_id[child_entity.id] = label
child_entity.set_override_state(state) child_entity.set_override_state(state, ignore_missing_defaults)
self.children_label_by_id = children_label_by_id self.children_label_by_id = children_label_by_id
@ -610,7 +620,9 @@ class DictMutableKeysEntity(EndpointEntity):
if not self._can_discard_changes: if not self._can_discard_changes:
return return
self.set_override_state(self._override_state) self.set_override_state(
self._override_state, self._ignore_missing_defaults
)
on_change_trigger.append(self.on_change) on_change_trigger.append(self.on_change)
def _add_to_studio_default(self, _on_change_trigger): def _add_to_studio_default(self, _on_change_trigger):
@ -645,7 +657,9 @@ class DictMutableKeysEntity(EndpointEntity):
if label: if label:
children_label_by_id[child_entity.id] = label children_label_by_id[child_entity.id] = label
child_entity.set_override_state(self._override_state) child_entity.set_override_state(
self._override_state, self._ignore_missing_defaults
)
self.children_label_by_id = children_label_by_id self.children_label_by_id = children_label_by_id
@ -694,7 +708,9 @@ class DictMutableKeysEntity(EndpointEntity):
if label: if label:
children_label_by_id[child_entity.id] = label children_label_by_id[child_entity.id] = label
child_entity.set_override_state(self._override_state) child_entity.set_override_state(
self._override_state, self._ignore_missing_defaults
)
self.children_label_by_id = children_label_by_id self.children_label_by_id = children_label_by_id

View file

@ -101,6 +101,79 @@ class EnumEntity(BaseEnumEntity):
super(EnumEntity, self).schema_validations() super(EnumEntity, self).schema_validations()
class HostsEnumEntity(BaseEnumEntity):
"""Enumeration of host names.
Enum items are hardcoded in definition of the entity.
Hosts enum can have defined empty value as valid option which is
represented by empty string. Schema key to set this option is
`use_empty_value` (true/false). And to set label of empty value set
`empty_label` (string).
Enum can have single and multiselection.
NOTE:
Host name is not the same as application name. Host name defines
implementation instead of application name.
"""
schema_types = ["hosts-enum"]
def _item_initalization(self):
self.multiselection = self.schema_data.get("multiselection", True)
self.use_empty_value = self.schema_data.get(
"use_empty_value", not self.multiselection
)
custom_labels = self.schema_data.get("custom_labels") or {}
host_names = [
"aftereffects",
"blender",
"celaction",
"fusion",
"harmony",
"hiero",
"houdini",
"maya",
"nuke",
"photoshop",
"resolve",
"tvpaint",
"unreal",
"standalonepublisher"
]
if self.use_empty_value:
host_names.insert(0, "")
# Add default label for empty value if not available
if "" not in custom_labels:
custom_labels[""] = "< without host >"
# These are hardcoded there is not list of available host in OpenPype
enum_items = []
valid_keys = set()
for key in host_names:
label = custom_labels.get(key, key)
valid_keys.add(key)
enum_items.append({key: label})
self.enum_items = enum_items
self.valid_keys = valid_keys
if self.multiselection:
self.valid_value_types = (list, )
self.value_on_not_set = []
else:
for key in valid_keys:
if self.value_on_not_set is NOT_SET:
self.value_on_not_set = key
break
self.valid_value_types = (STRING_TYPE, )
# GUI attribute
self.placeholder = self.schema_data.get("placeholder")
class AppsEnumEntity(BaseEnumEntity): class AppsEnumEntity(BaseEnumEntity):
schema_types = ["apps-enum"] schema_types = ["apps-enum"]

View file

@ -1,5 +1,6 @@
import re import re
import copy import copy
import json
from abc import abstractmethod from abc import abstractmethod
from .base_entity import ItemEntity from .base_entity import ItemEntity
@ -217,21 +218,28 @@ class InputEntity(EndpointEntity):
return True return True
return False return False
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
return return
self._override_state = state self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
# Ignore if is dynamic item and use default in that case # Ignore if is dynamic item and use default in that case
if not self.is_dynamic_item and not self.is_in_dynamic_item: if not self.is_dynamic_item and not self.is_in_dynamic_item:
if state > OverrideState.DEFAULTS: if state > OverrideState.DEFAULTS:
if not self.has_default_value: if (
not self.has_default_value
and not ignore_missing_defaults
):
raise DefaultsNotDefined(self) raise DefaultsNotDefined(self)
elif state > OverrideState.STUDIO: elif state > OverrideState.STUDIO:
if not self.had_studio_override: if (
not self.had_studio_override
and not ignore_missing_defaults
):
raise StudioDefaultsNotDefined(self) raise StudioDefaultsNotDefined(self)
if state is OverrideState.STUDIO: if state is OverrideState.STUDIO:
@ -433,6 +441,7 @@ class RawJsonEntity(InputEntity):
def _item_initalization(self): def _item_initalization(self):
# Schema must define if valid value is dict or list # Schema must define if valid value is dict or list
store_as_string = self.schema_data.get("store_as_string", False)
is_list = self.schema_data.get("is_list", False) is_list = self.schema_data.get("is_list", False)
if is_list: if is_list:
valid_value_types = (list, ) valid_value_types = (list, )
@ -441,6 +450,8 @@ class RawJsonEntity(InputEntity):
valid_value_types = (dict, ) valid_value_types = (dict, )
value_on_not_set = {} value_on_not_set = {}
self.store_as_string = store_as_string
self._is_list = is_list self._is_list = is_list
self.valid_value_types = valid_value_types self.valid_value_types = valid_value_types
self.value_on_not_set = value_on_not_set self.value_on_not_set = value_on_not_set
@ -484,6 +495,23 @@ class RawJsonEntity(InputEntity):
result = self.metadata != self._metadata_for_current_state() result = self.metadata != self._metadata_for_current_state()
return result return result
def schema_validations(self):
if self.store_as_string and self.is_env_group:
reason = (
"RawJson entity can't store environment group metadata"
" as string."
)
raise EntitySchemaError(self, reason)
super(RawJsonEntity, self).schema_validations()
def _convert_to_valid_type(self, value):
if isinstance(value, STRING_TYPE):
try:
return json.loads(value)
except Exception:
pass
return super(RawJsonEntity, self)._convert_to_valid_type(value)
def _metadata_for_current_state(self): def _metadata_for_current_state(self):
if ( if (
self._override_state is OverrideState.PROJECT self._override_state is OverrideState.PROJECT
@ -503,6 +531,9 @@ class RawJsonEntity(InputEntity):
value = super(RawJsonEntity, self)._settings_value() value = super(RawJsonEntity, self)._settings_value()
if self.is_env_group and isinstance(value, dict): if self.is_env_group and isinstance(value, dict):
value.update(self.metadata) value.update(self.metadata)
if self.store_as_string:
return json.dumps(value)
return value return value
def _prepare_value(self, value): def _prepare_value(self, value):

View file

@ -150,14 +150,15 @@ class PathEntity(ItemEntity):
def value(self): def value(self):
return self.child_obj.value return self.child_obj.value
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
return return
self._override_state = state self._override_state = state
self.child_obj.set_override_state(state) self._ignore_missing_defaults = ignore_missing_defaults
self.child_obj.set_override_state(state, ignore_missing_defaults)
def update_default_value(self, value): def update_default_value(self, value):
self.child_obj.update_default_value(value) self.child_obj.update_default_value(value)
@ -344,25 +345,32 @@ class ListStrictEntity(ItemEntity):
return True return True
return False return False
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
return return
self._override_state = state self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
# Ignore if is dynamic item and use default in that case # Ignore if is dynamic item and use default in that case
if not self.is_dynamic_item and not self.is_in_dynamic_item: if not self.is_dynamic_item and not self.is_in_dynamic_item:
if state > OverrideState.DEFAULTS: if state > OverrideState.DEFAULTS:
if not self.has_default_value: if (
not self.has_default_value
and not ignore_missing_defaults
):
raise DefaultsNotDefined(self) raise DefaultsNotDefined(self)
elif state > OverrideState.STUDIO: elif state > OverrideState.STUDIO:
if not self.had_studio_override: if (
not self.had_studio_override
and not ignore_missing_defaults
):
raise StudioDefaultsNotDefined(self) raise StudioDefaultsNotDefined(self)
for child_entity in self.children: for child_entity in self.children:
child_entity.set_override_state(state) child_entity.set_override_state(state, ignore_missing_defaults)
self.initial_value = self.settings_value() self.initial_value = self.settings_value()

View file

@ -2,6 +2,7 @@ import os
import re import re
import json import json
import copy import copy
import inspect
from .exceptions import ( from .exceptions import (
SchemaTemplateMissingKeys, SchemaTemplateMissingKeys,
@ -25,335 +26,6 @@ TEMPLATE_METADATA_KEYS = (
template_key_pattern = re.compile(r"(\{.*?[^{0]*\})") template_key_pattern = re.compile(r"(\{.*?[^{0]*\})")
def _pop_metadata_item(template):
found_idx = None
for idx, item in enumerate(template):
if not isinstance(item, dict):
continue
for key in TEMPLATE_METADATA_KEYS:
if key in item:
found_idx = idx
break
if found_idx is not None:
break
metadata_item = {}
if found_idx is not None:
metadata_item = template.pop(found_idx)
return metadata_item
def _fill_schema_template_data(
template, template_data, skip_paths, required_keys=None, missing_keys=None
):
first = False
if required_keys is None:
first = True
if "skip_paths" in template_data:
skip_paths = template_data["skip_paths"]
if not isinstance(skip_paths, list):
skip_paths = [skip_paths]
# Cleanup skip paths (skip empty values)
skip_paths = [path for path in skip_paths if path]
required_keys = set()
missing_keys = set()
# Copy template data as content may change
template = copy.deepcopy(template)
# Get metadata item from template
metadata_item = _pop_metadata_item(template)
# Check for default values for template data
default_values = metadata_item.get(DEFAULT_VALUES_KEY) or {}
for key, value in default_values.items():
if key not in template_data:
template_data[key] = value
if not template:
output = template
elif isinstance(template, list):
# Store paths by first part if path
# - None value says that whole key should be skipped
skip_paths_by_first_key = {}
for path in skip_paths:
parts = path.split("/")
key = parts.pop(0)
if key not in skip_paths_by_first_key:
skip_paths_by_first_key[key] = []
value = "/".join(parts)
skip_paths_by_first_key[key].append(value or None)
output = []
for item in template:
# Get skip paths for children item
_skip_paths = []
if not isinstance(item, dict):
pass
elif item.get("type") in WRAPPER_TYPES:
_skip_paths = copy.deepcopy(skip_paths)
elif skip_paths_by_first_key:
# Check if this item should be skipped
key = item.get("key")
if key and key in skip_paths_by_first_key:
_skip_paths = skip_paths_by_first_key[key]
# Skip whole item if None is in skip paths value
if None in _skip_paths:
continue
output_item = _fill_schema_template_data(
item, template_data, _skip_paths, required_keys, missing_keys
)
if output_item:
output.append(output_item)
elif isinstance(template, dict):
output = {}
for key, value in template.items():
output[key] = _fill_schema_template_data(
value, template_data, skip_paths, required_keys, missing_keys
)
if output.get("type") in WRAPPER_TYPES and not output.get("children"):
return {}
elif isinstance(template, STRING_TYPE):
# TODO find much better way how to handle filling template data
template = template.replace("{{", "__dbcb__").replace("}}", "__decb__")
for replacement_string in template_key_pattern.findall(template):
key = str(replacement_string[1:-1])
required_keys.add(key)
if key not in template_data:
missing_keys.add(key)
continue
value = template_data[key]
if replacement_string == template:
# Replace the value with value from templates data
# - with this is possible to set value with different type
template = value
else:
# Only replace the key in string
template = template.replace(replacement_string, value)
output = template.replace("__dbcb__", "{").replace("__decb__", "}")
else:
output = template
if first and missing_keys:
raise SchemaTemplateMissingKeys(missing_keys, required_keys)
return output
def _fill_schema_template(child_data, schema_collection, schema_templates):
template_name = child_data["name"]
template = schema_templates.get(template_name)
if template is None:
if template_name in schema_collection:
raise KeyError((
"Schema \"{}\" is used as `schema_template`"
).format(template_name))
raise KeyError("Schema template \"{}\" was not found".format(
template_name
))
# Default value must be dictionary (NOT list)
# - empty list would not add any item if `template_data` are not filled
template_data = child_data.get("template_data") or {}
if isinstance(template_data, dict):
template_data = [template_data]
skip_paths = child_data.get("skip_paths") or []
if isinstance(skip_paths, STRING_TYPE):
skip_paths = [skip_paths]
output = []
for single_template_data in template_data:
try:
filled_child = _fill_schema_template_data(
template, single_template_data, skip_paths
)
except SchemaTemplateMissingKeys as exc:
raise SchemaTemplateMissingKeys(
exc.missing_keys, exc.required_keys, template_name
)
for item in filled_child:
filled_item = _fill_inner_schemas(
item, schema_collection, schema_templates
)
if filled_item["type"] == "schema_template":
output.extend(_fill_schema_template(
filled_item, schema_collection, schema_templates
))
else:
output.append(filled_item)
return output
def _fill_inner_schemas(schema_data, schema_collection, schema_templates):
if schema_data["type"] == "schema":
raise ValueError("First item in schema data can't be schema.")
children_key = "children"
object_type_key = "object_type"
for item_key in (children_key, object_type_key):
children = schema_data.get(item_key)
if not children:
continue
if object_type_key == item_key:
if not isinstance(children, dict):
continue
children = [children]
new_children = []
for child in children:
child_type = child["type"]
if child_type == "schema":
schema_name = child["name"]
if schema_name not in schema_collection:
if schema_name in schema_templates:
raise KeyError((
"Schema template \"{}\" is used as `schema`"
).format(schema_name))
raise KeyError(
"Schema \"{}\" was not found".format(schema_name)
)
filled_child = _fill_inner_schemas(
schema_collection[schema_name],
schema_collection,
schema_templates
)
elif child_type in ("template", "schema_template"):
for filled_child in _fill_schema_template(
child, schema_collection, schema_templates
):
new_children.append(filled_child)
continue
else:
filled_child = _fill_inner_schemas(
child, schema_collection, schema_templates
)
new_children.append(filled_child)
if item_key == object_type_key:
if len(new_children) != 1:
raise KeyError((
"Failed to fill object type with type: {} | name {}"
).format(
child_type, str(child.get("name"))
))
new_children = new_children[0]
schema_data[item_key] = new_children
return schema_data
# TODO reimplement logic inside entities
def validate_environment_groups_uniquenes(
schema_data, env_groups=None, keys=None
):
is_first = False
if env_groups is None:
is_first = True
env_groups = {}
keys = []
my_keys = copy.deepcopy(keys)
key = schema_data.get("key")
if key:
my_keys.append(key)
env_group_key = schema_data.get("env_group_key")
if env_group_key:
if env_group_key not in env_groups:
env_groups[env_group_key] = []
env_groups[env_group_key].append("/".join(my_keys))
children = schema_data.get("children")
if not children:
return
for child in children:
validate_environment_groups_uniquenes(
child, env_groups, copy.deepcopy(my_keys)
)
if is_first:
invalid = {}
for env_group_key, key_paths in env_groups.items():
if len(key_paths) > 1:
invalid[env_group_key] = key_paths
if invalid:
raise SchemaDuplicatedEnvGroupKeys(invalid)
def validate_schema(schema_data):
validate_environment_groups_uniquenes(schema_data)
def get_gui_schema(subfolder, main_schema_name):
dirpath = os.path.join(
os.path.dirname(__file__),
"schemas",
subfolder
)
loaded_schemas = {}
loaded_schema_templates = {}
for root, _, filenames in os.walk(dirpath):
for filename in filenames:
basename, ext = os.path.splitext(filename)
if ext != ".json":
continue
filepath = os.path.join(root, filename)
with open(filepath, "r") as json_stream:
try:
schema_data = json.load(json_stream)
except Exception as exc:
raise ValueError((
"Unable to parse JSON file {}\n{}"
).format(filepath, str(exc)))
if isinstance(schema_data, list):
loaded_schema_templates[basename] = schema_data
else:
loaded_schemas[basename] = schema_data
main_schema = _fill_inner_schemas(
loaded_schemas[main_schema_name],
loaded_schemas,
loaded_schema_templates
)
validate_schema(main_schema)
return main_schema
def get_studio_settings_schema():
return get_gui_schema("system_schema", "schema_main")
def get_project_settings_schema():
return get_gui_schema("projects_schema", "schema_main")
class OverrideStateItem: class OverrideStateItem:
"""Object used as item for `OverrideState` enum. """Object used as item for `OverrideState` enum.
@ -426,3 +98,506 @@ class OverrideState:
DEFAULTS = OverrideStateItem(0, "Defaults") DEFAULTS = OverrideStateItem(0, "Defaults")
STUDIO = OverrideStateItem(1, "Studio overrides") STUDIO = OverrideStateItem(1, "Studio overrides")
PROJECT = OverrideStateItem(2, "Project Overrides") PROJECT = OverrideStateItem(2, "Project Overrides")
class SchemasHub:
def __init__(self, schema_subfolder, reset=True):
self._schema_subfolder = schema_subfolder
self._loaded_types = {}
self._gui_types = tuple()
self._crashed_on_load = {}
self._loaded_templates = {}
self._loaded_schemas = {}
# It doesn't make sence to reload types on each reset as they can't be
# changed
self._load_types()
# Trigger reset
if reset:
self.reset()
def reset(self):
self._load_schemas()
@property
def gui_types(self):
return self._gui_types
def get_schema(self, schema_name):
"""Get schema definition data by it's name.
Returns:
dict: Copy of schema loaded from json files.
Raises:
KeyError: When schema name is stored in loaded templates or json
file was not possible to parse or when schema name was not
found.
"""
if schema_name not in self._loaded_schemas:
if schema_name in self._loaded_templates:
raise KeyError((
"Template \"{}\" is used as `schema`"
).format(schema_name))
elif schema_name in self._crashed_on_load:
crashed_item = self._crashed_on_load[schema_name]
raise KeyError(
"Unable to parse schema file \"{}\". {}".format(
crashed_item["filepath"], crashed_item["message"]
)
)
raise KeyError(
"Schema \"{}\" was not found".format(schema_name)
)
return copy.deepcopy(self._loaded_schemas[schema_name])
def get_template(self, template_name):
"""Get template definition data by it's name.
Returns:
list: Copy of template items loaded from json files.
Raises:
KeyError: When template name is stored in loaded schemas or json
file was not possible to parse or when template name was not
found.
"""
if template_name not in self._loaded_templates:
if template_name in self._loaded_schemas:
raise KeyError((
"Schema \"{}\" is used as `template`"
).format(template_name))
elif template_name in self._crashed_on_load:
crashed_item = self._crashed_on_load[template_name]
raise KeyError(
"Unable to parse template file \"{}\". {}".format(
crashed_item["filepath"], crashed_item["message"]
)
)
raise KeyError(
"Template \"{}\" was not found".format(template_name)
)
return copy.deepcopy(self._loaded_templates[template_name])
def resolve_schema_data(self, schema_data):
"""Resolve single item schema data as few types can be expanded.
This is mainly for 'schema' and 'template' types. Type 'schema' does
not have entity representation and 'template' may contain more than one
output schemas.
In other cases is retuned passed schema item in list.
Goal is to have schema and template resolving at one place.
Returns:
list: Resolved schema data.
"""
schema_type = schema_data["type"]
if schema_type not in ("schema", "template", "schema_template"):
return [schema_data]
if schema_type == "schema":
return self.resolve_schema_data(
self.get_schema(schema_data["name"])
)
template_name = schema_data["name"]
template_def = self.get_template(template_name)
filled_template = self._fill_template(
schema_data, template_def
)
return filled_template
def create_schema_object(self, schema_data, *args, **kwargs):
"""Create entity for passed schema data.
Args:
schema_data(dict): Schema definition of settings entity.
Returns:
ItemEntity: Created entity for passed schema data item.
Raises:
ValueError: When 'schema', 'template' or any of wrapper types are
passed.
KeyError: When type of passed schema is not known.
"""
schema_type = schema_data["type"]
if schema_type in ("schema", "template", "schema_template"):
raise ValueError(
"Got unresolved schema data of type \"{}\"".format(schema_type)
)
if schema_type in WRAPPER_TYPES:
raise ValueError((
"Function `create_schema_object` can't create entities"
" of any wrapper type. Got type: \"{}\""
).format(schema_type))
klass = self._loaded_types.get(schema_type)
if not klass:
raise KeyError("Unknown type \"{}\"".format(schema_type))
return klass(schema_data, *args, **kwargs)
def _load_types(self):
"""Prepare entity types for cretion of their objects.
Currently all classes in `openpype.settings.entities` that inherited
from `BaseEntity` are stored as loaded types. GUI types are stored to
separated attribute to not mess up api access of entities.
TODOs:
Add more dynamic way how to add custom types from anywhere and
better handling of abstract classes. Skipping them is dangerous.
"""
from openpype.settings import entities
# Define known abstract classes
known_abstract_classes = (
entities.BaseEntity,
entities.BaseItemEntity,
entities.ItemEntity,
entities.EndpointEntity,
entities.InputEntity,
entities.BaseEnumEntity
)
self._loaded_types = {}
_gui_types = []
for attr in dir(entities):
item = getattr(entities, attr)
# Filter classes
if not inspect.isclass(item):
continue
# Skip classes that do not inherit from BaseEntity
if not issubclass(item, entities.BaseEntity):
continue
# Skip class that is abstract by design
if item in known_abstract_classes:
continue
if inspect.isabstract(item):
# Create an object to get crash and get traceback
item()
# Backwards compatibility
# Single entity may have multiple schema types
for schema_type in item.schema_types:
self._loaded_types[schema_type] = item
if item.gui_type:
_gui_types.append(item)
self._gui_types = tuple(_gui_types)
def _load_schemas(self):
"""Load schema definitions from json files."""
# Refresh all affecting variables
self._crashed_on_load = {}
self._loaded_templates = {}
self._loaded_schemas = {}
dirpath = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"schemas",
self._schema_subfolder
)
loaded_schemas = {}
loaded_templates = {}
for root, _, filenames in os.walk(dirpath):
for filename in filenames:
basename, ext = os.path.splitext(filename)
if ext != ".json":
continue
filepath = os.path.join(root, filename)
with open(filepath, "r") as json_stream:
try:
schema_data = json.load(json_stream)
except Exception as exc:
msg = str(exc)
print("Unable to parse JSON file {}\n{}".format(
filepath, msg
))
self._crashed_on_load[basename] = {
"filepath": filepath,
"message": msg
}
continue
if basename in self._crashed_on_load:
crashed_item = self._crashed_on_load[basename]
raise KeyError((
"Duplicated filename \"{}\"."
" One of them crashed on load \"{}\" {}"
).format(
filename,
crashed_item["filepath"],
crashed_item["message"]
))
if isinstance(schema_data, list):
if basename in loaded_templates:
raise KeyError(
"Duplicated template filename \"{}\"".format(
filename
)
)
loaded_templates[basename] = schema_data
else:
if basename in loaded_schemas:
raise KeyError(
"Duplicated schema filename \"{}\"".format(
filename
)
)
loaded_schemas[basename] = schema_data
self._loaded_templates = loaded_templates
self._loaded_schemas = loaded_schemas
def _fill_template(self, child_data, template_def):
"""Fill template based on schema definition and template definition.
Based on `child_data` is `template_def` modified and result is
returned.
Template definition may have defined data to fill which
should be filled with data from child data.
Child data may contain more than one output definition of an template.
Child data can define paths to skip. Path is full path of an item
which won't be returned.
TODO:
Be able to handle wrapper items here.
Args:
child_data(dict): Schema data of template item.
template_def(dict): Template definition that will be filled with
child_data.
Returns:
list: Resolved template always returns list of schemas.
"""
template_name = child_data["name"]
# Default value must be dictionary (NOT list)
# - empty list would not add any item if `template_data` are not filled
template_data = child_data.get("template_data") or {}
if isinstance(template_data, dict):
template_data = [template_data]
skip_paths = child_data.get("skip_paths") or []
if isinstance(skip_paths, STRING_TYPE):
skip_paths = [skip_paths]
output = []
for single_template_data in template_data:
try:
output.extend(self._fill_template_data(
template_def, single_template_data, skip_paths
))
except SchemaTemplateMissingKeys as exc:
raise SchemaTemplateMissingKeys(
exc.missing_keys, exc.required_keys, template_name
)
return output
def _fill_template_data(
self,
template,
template_data,
skip_paths,
required_keys=None,
missing_keys=None
):
"""Fill template values with data from schema data.
Template has more abilities than schemas. It is expected that template
will be used at multiple places (but may not). Schema represents
exactly one entity and it's children but template may represent more
entities.
Template can have "keys to fill" from their definition. Some key may be
required and some may be optional because template has their default
values defined.
Template also have ability to "skip paths" which means to skip entities
from it's content. A template can be used across multiple places with
different requirements.
Raises:
SchemaTemplateMissingKeys: When fill data do not contain all
required keys for template.
"""
first = False
if required_keys is None:
first = True
if "skip_paths" in template_data:
skip_paths = template_data["skip_paths"]
if not isinstance(skip_paths, list):
skip_paths = [skip_paths]
# Cleanup skip paths (skip empty values)
skip_paths = [path for path in skip_paths if path]
required_keys = set()
missing_keys = set()
# Copy template data as content may change
template = copy.deepcopy(template)
# Get metadata item from template
metadata_item = self._pop_metadata_item(template)
# Check for default values for template data
default_values = metadata_item.get(DEFAULT_VALUES_KEY) or {}
for key, value in default_values.items():
if key not in template_data:
template_data[key] = value
if not template:
output = template
elif isinstance(template, list):
# Store paths by first part if path
# - None value says that whole key should be skipped
skip_paths_by_first_key = {}
for path in skip_paths:
parts = path.split("/")
key = parts.pop(0)
if key not in skip_paths_by_first_key:
skip_paths_by_first_key[key] = []
value = "/".join(parts)
skip_paths_by_first_key[key].append(value or None)
output = []
for item in template:
# Get skip paths for children item
_skip_paths = []
if not isinstance(item, dict):
pass
elif item.get("type") in WRAPPER_TYPES:
_skip_paths = copy.deepcopy(skip_paths)
elif skip_paths_by_first_key:
# Check if this item should be skipped
key = item.get("key")
if key and key in skip_paths_by_first_key:
_skip_paths = skip_paths_by_first_key[key]
# Skip whole item if None is in skip paths value
if None in _skip_paths:
continue
output_item = self._fill_template_data(
item,
template_data,
_skip_paths,
required_keys,
missing_keys
)
if output_item:
output.append(output_item)
elif isinstance(template, dict):
output = {}
for key, value in template.items():
output[key] = self._fill_template_data(
value,
template_data,
skip_paths,
required_keys,
missing_keys
)
if (
output.get("type") in WRAPPER_TYPES
and not output.get("children")
):
return {}
elif isinstance(template, STRING_TYPE):
# TODO find much better way how to handle filling template data
template = (
template
.replace("{{", "__dbcb__")
.replace("}}", "__decb__")
)
full_replacement = False
for replacement_string in template_key_pattern.findall(template):
key = str(replacement_string[1:-1])
required_keys.add(key)
if key not in template_data:
missing_keys.add(key)
continue
value = template_data[key]
if replacement_string == template:
# Replace the value with value from templates data
# - with this is possible to set value with different type
template = value
full_replacement = True
else:
# Only replace the key in string
template = template.replace(replacement_string, value)
if not full_replacement:
output = (
template
.replace("__dbcb__", "{")
.replace("__decb__", "}")
)
else:
output = template
else:
output = template
if first and missing_keys:
raise SchemaTemplateMissingKeys(missing_keys, required_keys)
return output
def _pop_metadata_item(self, template_def):
"""Pop template metadata from template definition.
Template metadata may define default values if are not passed from
schema data.
"""
found_idx = None
for idx, item in enumerate(template_def):
if not isinstance(item, dict):
continue
for key in TEMPLATE_METADATA_KEYS:
if key in item:
found_idx = idx
break
if found_idx is not None:
break
metadata_item = {}
if found_idx is not None:
metadata_item = template_def.pop(found_idx)
return metadata_item

View file

@ -102,7 +102,9 @@ class ListEntity(EndpointEntity):
def add_new_item(self, idx=None, trigger_change=True): def add_new_item(self, idx=None, trigger_change=True):
child_obj = self._add_new_item(idx) child_obj = self._add_new_item(idx)
child_obj.set_override_state(self._override_state) child_obj.set_override_state(
self._override_state, self._ignore_missing_defaults
)
if trigger_change: if trigger_change:
self.on_child_change(child_obj) self.on_child_change(child_obj)
@ -205,13 +207,14 @@ class ListEntity(EndpointEntity):
self._has_project_override = True self._has_project_override = True
self.on_change() self.on_change()
def set_override_state(self, state): def set_override_state(self, state, ignore_missing_defaults):
# Trigger override state change of root if is not same # Trigger override state change of root if is not same
if self.root_item.override_state is not state: if self.root_item.override_state is not state:
self.root_item.set_override_state(state) self.root_item.set_override_state(state)
return return
self._override_state = state self._override_state = state
self._ignore_missing_defaults = ignore_missing_defaults
while self.children: while self.children:
self.children.pop(0) self.children.pop(0)
@ -219,11 +222,17 @@ class ListEntity(EndpointEntity):
# Ignore if is dynamic item and use default in that case # Ignore if is dynamic item and use default in that case
if not self.is_dynamic_item and not self.is_in_dynamic_item: if not self.is_dynamic_item and not self.is_in_dynamic_item:
if state > OverrideState.DEFAULTS: if state > OverrideState.DEFAULTS:
if not self.has_default_value: if (
not self.has_default_value
and not ignore_missing_defaults
):
raise DefaultsNotDefined(self) raise DefaultsNotDefined(self)
elif state > OverrideState.STUDIO: elif state > OverrideState.STUDIO:
if not self.had_studio_override: if (
not self.had_studio_override
and not ignore_missing_defaults
):
raise StudioDefaultsNotDefined(self) raise StudioDefaultsNotDefined(self)
value = NOT_SET value = NOT_SET
@ -257,7 +266,9 @@ class ListEntity(EndpointEntity):
child_obj.update_studio_value(item) child_obj.update_studio_value(item)
for child_obj in self.children: for child_obj in self.children:
child_obj.set_override_state(self._override_state) child_obj.set_override_state(
self._override_state, ignore_missing_defaults
)
self.initial_value = self.settings_value() self.initial_value = self.settings_value()
@ -395,7 +406,9 @@ class ListEntity(EndpointEntity):
if self.had_studio_override: if self.had_studio_override:
child_obj.update_studio_value(item) child_obj.update_studio_value(item)
child_obj.set_override_state(self._override_state) child_obj.set_override_state(
self._override_state, self._ignore_missing_defaults
)
if self._override_state >= OverrideState.PROJECT: if self._override_state >= OverrideState.PROJECT:
self._has_project_override = self.had_project_override self._has_project_override = self.had_project_override
@ -427,7 +440,9 @@ class ListEntity(EndpointEntity):
for item in value: for item in value:
child_obj = self._add_new_item() child_obj = self._add_new_item()
child_obj.update_default_value(item) child_obj.update_default_value(item)
child_obj.set_override_state(self._override_state) child_obj.set_override_state(
self._override_state, self._ignore_missing_defaults
)
self._ignore_child_changes = False self._ignore_child_changes = False
@ -460,7 +475,10 @@ class ListEntity(EndpointEntity):
child_obj.update_default_value(item) child_obj.update_default_value(item)
if self._has_studio_override: if self._has_studio_override:
child_obj.update_studio_value(item) child_obj.update_studio_value(item)
child_obj.set_override_state(self._override_state) child_obj.set_override_state(
self._override_state,
self._ignore_missing_defaults
)
self._ignore_child_changes = False self._ignore_child_changes = False

View file

@ -1,7 +1,7 @@
import os import os
import json import json
import copy import copy
import inspect import collections
from abc import abstractmethod from abc import abstractmethod
@ -10,8 +10,7 @@ from .lib import (
NOT_SET, NOT_SET,
WRAPPER_TYPES, WRAPPER_TYPES,
OverrideState, OverrideState,
get_studio_settings_schema, SchemasHub
get_project_settings_schema
) )
from .exceptions import ( from .exceptions import (
SchemaError, SchemaError,
@ -53,7 +52,12 @@ class RootEntity(BaseItemEntity):
""" """
schema_types = ["root"] schema_types = ["root"]
def __init__(self, schema_data, reset): def __init__(self, schema_hub, reset, main_schema_name=None):
self.schema_hub = schema_hub
if not main_schema_name:
main_schema_name = "schema_main"
schema_data = schema_hub.get_schema(main_schema_name)
super(RootEntity, self).__init__(schema_data) super(RootEntity, self).__init__(schema_data)
self._require_restart_callbacks = [] self._require_restart_callbacks = []
self._item_ids_require_restart = set() self._item_ids_require_restart = set()
@ -130,7 +134,17 @@ class RootEntity(BaseItemEntity):
def _add_children(self, schema_data, first=True): def _add_children(self, schema_data, first=True):
added_children = [] added_children = []
for children_schema in schema_data["children"]: children_deque = collections.deque()
for _children_schema in schema_data["children"]:
children_schemas = self.schema_hub.resolve_schema_data(
_children_schema
)
for children_schema in children_schemas:
children_deque.append(children_schema)
while children_deque:
children_schema = children_deque.popleft()
if children_schema["type"] in WRAPPER_TYPES: if children_schema["type"] in WRAPPER_TYPES:
_children_schema = copy.deepcopy(children_schema) _children_schema = copy.deepcopy(children_schema)
wrapper_children = self._add_children( wrapper_children = self._add_children(
@ -143,11 +157,13 @@ class RootEntity(BaseItemEntity):
child_obj = self.create_schema_object(children_schema, self) child_obj = self.create_schema_object(children_schema, self)
self.children.append(child_obj) self.children.append(child_obj)
added_children.append(child_obj) added_children.append(child_obj)
if isinstance(child_obj, self._gui_types): if isinstance(child_obj, self.schema_hub.gui_types):
continue continue
if child_obj.key in self.non_gui_children: if child_obj.key in self.non_gui_children:
raise KeyError("Duplicated key \"{}\"".format(child_obj.key)) raise KeyError(
"Duplicated key \"{}\"".format(child_obj.key)
)
self.non_gui_children[child_obj.key] = child_obj self.non_gui_children[child_obj.key] = child_obj
if not first: if not first:
@ -160,9 +176,6 @@ class RootEntity(BaseItemEntity):
# Store `self` to `root_item` for children entities # Store `self` to `root_item` for children entities
self.root_item = self self.root_item = self
self._loaded_types = None
self._gui_types = None
# Children are stored by key as keys are immutable and are defined by # Children are stored by key as keys are immutable and are defined by
# schema # schema
self.valid_value_types = (dict, ) self.valid_value_types = (dict, )
@ -189,11 +202,10 @@ class RootEntity(BaseItemEntity):
if not KEY_REGEX.match(key): if not KEY_REGEX.match(key):
raise InvalidKeySymbols(self.path, key) raise InvalidKeySymbols(self.path, key)
@abstractmethod
def get_entity_from_path(self, path): def get_entity_from_path(self, path):
"""Return system settings entity.""" """Return entity matching passed path."""
raise NotImplementedError(( pass
"Method `get_entity_from_path` not available for \"{}\""
).format(self.__class__.__name__))
def create_schema_object(self, schema_data, *args, **kwargs): def create_schema_object(self, schema_data, *args, **kwargs):
"""Create entity by entered schema data. """Create entity by entered schema data.
@ -201,56 +213,11 @@ class RootEntity(BaseItemEntity):
Available entities are loaded on first run. Children entities can call Available entities are loaded on first run. Children entities can call
this method. this method.
""" """
if self._loaded_types is None: return self.schema_hub.create_schema_object(
# Load available entities schema_data, *args, **kwargs
from openpype.settings import entities )
# Define known abstract classes def set_override_state(self, state, ignore_missing_defaults=None):
known_abstract_classes = (
entities.BaseEntity,
entities.BaseItemEntity,
entities.ItemEntity,
entities.EndpointEntity,
entities.InputEntity,
entities.BaseEnumEntity
)
self._loaded_types = {}
_gui_types = []
for attr in dir(entities):
item = getattr(entities, attr)
# Filter classes
if not inspect.isclass(item):
continue
# Skip classes that do not inherit from BaseEntity
if not issubclass(item, entities.BaseEntity):
continue
# Skip class that is abstract by design
if item in known_abstract_classes:
continue
if inspect.isabstract(item):
# Create an object to get crash and get traceback
item()
# Backwards compatibility
# Single entity may have multiple schema types
for schema_type in item.schema_types:
self._loaded_types[schema_type] = item
if item.gui_type:
_gui_types.append(item)
self._gui_types = tuple(_gui_types)
klass = self._loaded_types.get(schema_data["type"])
if not klass:
raise KeyError("Unknown type \"{}\"".format(schema_data["type"]))
return klass(schema_data, *args, **kwargs)
def set_override_state(self, state):
"""Set override state and trigger it on children. """Set override state and trigger it on children.
Method will discard all changes in hierarchy and use values, metadata Method will discard all changes in hierarchy and use values, metadata
@ -259,9 +226,12 @@ class RootEntity(BaseItemEntity):
Args: Args:
state (OverrideState): State to which should be data changed. state (OverrideState): State to which should be data changed.
""" """
if not ignore_missing_defaults:
ignore_missing_defaults = False
self._override_state = state self._override_state = state
for child_obj in self.non_gui_children.values(): for child_obj in self.non_gui_children.values():
child_obj.set_override_state(state) child_obj.set_override_state(state, ignore_missing_defaults)
def on_change(self): def on_change(self):
"""Trigger callbacks on change.""" """Trigger callbacks on change."""
@ -491,18 +461,32 @@ class SystemSettings(RootEntity):
schema_data (dict): Pass schema data to entity. This is for development schema_data (dict): Pass schema data to entity. This is for development
and debugging purposes. and debugging purposes.
""" """
def __init__( root_key = SYSTEM_SETTINGS_KEY
self, set_studio_state=True, reset=True, schema_data=None
):
if schema_data is None:
# Load system schemas
schema_data = get_studio_settings_schema()
super(SystemSettings, self).__init__(schema_data, reset) def __init__(
self, set_studio_state=True, reset=True, schema_hub=None
):
if schema_hub is None:
# Load system schemas
schema_hub = SchemasHub("system_schema")
super(SystemSettings, self).__init__(schema_hub, reset)
if set_studio_state: if set_studio_state:
self.set_studio_state() self.set_studio_state()
def get_entity_from_path(self, path):
"""Return system settings entity."""
path_parts = path.split("/")
first_part = path_parts[0]
output = self
if first_part == self.root_key:
path_parts.pop(0)
for path_part in path_parts:
output = output[path_part]
return output
def _reset_values(self): def _reset_values(self):
default_value = get_default_settings()[SYSTEM_SETTINGS_KEY] default_value = get_default_settings()[SYSTEM_SETTINGS_KEY]
for key, child_obj in self.non_gui_children.items(): for key, child_obj in self.non_gui_children.items():
@ -600,22 +584,24 @@ class ProjectSettings(RootEntity):
schema_data (dict): Pass schema data to entity. This is for development schema_data (dict): Pass schema data to entity. This is for development
and debugging purposes. and debugging purposes.
""" """
root_key = PROJECT_SETTINGS_KEY
def __init__( def __init__(
self, self,
project_name=None, project_name=None,
change_state=True, change_state=True,
reset=True, reset=True,
schema_data=None schema_hub=None
): ):
self._project_name = project_name self._project_name = project_name
self._system_settings_entity = None self._system_settings_entity = None
if schema_data is None: if schema_hub is None:
# Load system schemas # Load system schemas
schema_data = get_project_settings_schema() schema_hub = SchemasHub("projects_schema")
super(ProjectSettings, self).__init__(schema_data, reset) super(ProjectSettings, self).__init__(schema_hub, reset)
if change_state: if change_state:
if self.project_name is None: if self.project_name is None:

View file

@ -181,6 +181,103 @@
} }
``` ```
## dict-conditional
- is similar to `dict` but has only one child entity that will be always available
- the one entity is enumerator of possible values and based on value of the entity are defined and used other children entities
- each value of enumerator have defined children that will be used
- there is no way how to have shared entities across multiple enum items
- value from enumerator is also stored next to other values
- to define the key under which will be enum value stored use `enum_key`
- `enum_key` must match key regex and any enum item can't have children with same key
- `enum_label` is label of the entity for UI purposes
- enum items are define with `enum_children`
- it's a list where each item represents enum item
- all items in `enum_children` must have at least `key` key which represents value stored under `enum_key`
- items can define `label` for UI purposes
- most important part is that item can define `children` key where are definitions of it's children (`children` value works the same way as in `dict`)
- entity must have defined `"label"` if is not used as widget
- is set as group if any parent is not group
- if `"label"` is entetered there which will be shown in GUI
- item with label can be collapsible
- that can be set with key `"collapsible"` as `True`/`False` (Default: `True`)
- with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`)
- it is possible to add darker background with `"highlight_content"` (Default: `False`)
- darker background has limits of maximum applies after 3-4 nested highlighted items there is not difference in the color
- output is dictionary `{the "key": children values}`
```
# Example
{
"type": "dict-conditional",
"key": "my_key",
"label": "My Key",
"enum_key": "type",
"enum_label": "label",
"enum_children": [
# Each item must be a dictionary with 'key'
{
"key": "action",
"label": "Action",
"children": [
{
"type": "text",
"key": "key",
"label": "Key"
},
{
"type": "text",
"key": "label",
"label": "Label"
},
{
"type": "text",
"key": "command",
"label": "Comand"
}
]
},
{
"key": "menu",
"label": "Menu",
"children": [
{
"key": "children",
"label": "Children",
"type": "list",
"object_type": "text"
}
]
},
{
# Separator does not have children as "separator" value is enough
"key": "separator",
"label": "Separator"
}
]
}
```
How output of the schema could look like on save:
```
{
"type": "separator"
}
{
"type": "action",
"key": "action_1",
"label": "Action 1",
"command": "run command -arg"
}
{
"type": "menu",
"children": [
"child_1",
"child_2"
]
}
```
## Inputs for setting any kind of value (`Pure` inputs) ## Inputs for setting any kind of value (`Pure` inputs)
- all these input must have defined `"key"` under which will be stored and `"label"` which will be shown next to input - all these input must have defined `"key"` under which will be stored and `"label"` which will be shown next to input
- unless they are used in different types of inputs (later) "as widgets" in that case `"key"` and `"label"` are not required as there is not place where to set them - unless they are used in different types of inputs (later) "as widgets" in that case `"key"` and `"label"` are not required as there is not place where to set them
@ -240,6 +337,11 @@
- schema also defines valid value type - schema also defines valid value type
- by default it is dictionary - by default it is dictionary
- to be able use list it is required to define `is_list` to `true` - to be able use list it is required to define `is_list` to `true`
- output can be stored as string
- this is to allow any keys in dictionary
- set key `store_as_string` to `true`
- code using that setting must expected that value is string and use json module to convert it to python types
``` ```
{ {
"type": "raw-json", "type": "raw-json",
@ -272,6 +374,25 @@
} }
``` ```
### hosts-enum
- enumeration of available hosts
- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`)
- it is possible to add empty value (represented with empty string) with setting `"use_empty_value"` to `True` (Default: `False`)
- it is possible to set `"custom_labels"` for host names where key `""` is empty value (Default: `{}`)
```
{
"key": "host",
"label": "Host name",
"type": "hosts-enum",
"multiselection": false,
"use_empty_value": true,
"custom_labels": {
"": "N/A",
"nuke": "Nuke"
}
}
```
## Inputs for setting value using Pure inputs ## Inputs for setting value using Pure inputs
- these inputs also have required `"key"` - these inputs also have required `"key"`
- attribute `"label"` is required in few conditions - attribute `"label"` is required in few conditions

View file

@ -82,6 +82,10 @@
"type": "schema", "type": "schema",
"name": "schema_project_hiero" "name": "schema_project_hiero"
}, },
{
"type": "schema",
"name": "schema_project_houdini"
},
{ {
"type": "schema", "type": "schema",
"name": "schema_project_blender" "name": "schema_project_blender"

View file

@ -11,6 +11,47 @@
"key": "publish", "key": "publish",
"label": "Publish plugins", "label": "Publish plugins",
"children": [ "children": [
{
"type": "dict",
"collapsible": true,
"key": "ValidateExpectedFiles",
"label": "Validate Expected Files",
"checkbox_key": "enabled",
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "boolean",
"key": "active",
"label": "Active"
},
{
"type": "label",
"label": "Validate if all expected files were rendered"
},
{
"type": "boolean",
"key": "allow_user_override",
"object_type": "text",
"label": "Allow user change frame range"
},
{
"type": "list",
"key": "families",
"object_type": "text",
"label": "Trigger on families"
},
{
"type": "list",
"key": "targets",
"object_type": "text",
"label": "Trigger for plugins"
}
]
},
{ {
"type": "dict", "type": "dict",
"collapsible": true, "collapsible": true,
@ -67,6 +108,16 @@
"key": "limit", "key": "limit",
"label": "Limit Groups", "label": "Limit Groups",
"object_type": "text" "object_type": "text"
},
{
"type": "raw-json",
"key": "jobInfo",
"label": "Additional JobInfo data"
},
{
"type": "raw-json",
"key": "pluginInfo",
"label": "Additional PluginInfo data"
} }
] ]
}, },
@ -132,6 +183,20 @@
"key": "use_gpu", "key": "use_gpu",
"label": "Use GPU" "label": "Use GPU"
}, },
{
"type": "list",
"key": "env_allowed_keys",
"object_type": "text",
"label": "Allowed environment keys"
},
{
"type": "dict-modifiable",
"key": "env_search_replace_values",
"label": "Search & replace in environment values",
"object_type": {
"type": "text"
}
},
{ {
"type": "dict-modifiable", "type": "dict-modifiable",
"key": "limit_groups", "key": "limit_groups",

View file

@ -604,6 +604,82 @@
"key": "publish", "key": "publish",
"label": "Publish plugins", "label": "Publish plugins",
"children": [ "children": [
{
"type": "dict",
"collapsible": true,
"checkbox_key": "enabled",
"key": "CollectFtrackFamily",
"label": "Collect Ftrack Family",
"is_group": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "list",
"collapsible": true,
"key": "profiles",
"label": "Profiles",
"use_label_wrap": true,
"object_type": {
"type": "dict",
"children": [
{
"key": "hosts",
"label": "Host names",
"type": "list",
"object_type": "text"
},
{
"key": "families",
"label": "Families",
"type": "list",
"object_type": "text"
},
{
"key": "tasks",
"label": "Task names",
"type": "list",
"object_type": "text"
},
{
"type": "separator"
},
{
"key": "add_ftrack_family",
"label": "Add Ftrack Family",
"type": "boolean"
},
{
"type": "list",
"collapsible": true,
"key": "advanced_filtering",
"label": "Advanced adding if additional families present",
"use_label_wrap": true,
"object_type": {
"type": "dict",
"children": [
{
"key": "families",
"label": "Additional Families",
"type": "list",
"object_type": "text"
},
{
"key": "add_ftrack_family",
"label": "Add Ftrack Family",
"type": "boolean"
}
]
}
}
]
}
}
]
},
{ {
"type": "dict", "type": "dict",
"collapsible": true, "collapsible": true,

Some files were not shown because too many files have changed in this diff Show more