diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index 45604e431d..d0853e74d6 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -43,10 +43,10 @@ jobs: uses: heinrichreimer/github-changelog-generator-action@v2.2 with: token: ${{ secrets.ADMIN_TOKEN }} - breakingLabel: '#### 💥 Breaking' - enhancementLabel: '#### 🚀 Enhancements' - bugsLabel: '#### 🐛 Bug fixes' - deprecatedLabel: '#### ⚠️ Deprecations' + breakingLabel: '**💥 Breaking**' + enhancementLabel: '**🚀 Enhancements**' + bugsLabel: '**🐛 Bug fixes**' + deprecatedLabel: '**⚠️ Deprecations**' addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' issues: false issuesWoLabels: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a5d1822310..37e1cb4b15 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,9 +1,9 @@ name: Stable Release on: - push: - tags: - - '*[0-9].*[0-9].*[0-9]*' + release: + types: + - prereleased jobs: create_release: @@ -23,35 +23,26 @@ jobs: - name: Install Python requirements run: pip install gitpython semver - - name: Set env - run: | - echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - git config user.email ${{ secrets.CI_EMAIL }} - git config user.name ${{ secrets.CI_USER }} - git fetch - git checkout -b main origin/main - git tag -d ${GITHUB_REF#refs/*/} - git remote set-url --push origin https://pypebot:${{ secrets.ADMIN_TOKEN }}@github.com/pypeclub/openpype - git push origin --delete ${GITHUB_REF#refs/*/} - echo PREVIOUS_VERSION=`git describe --tags --match="[0-9]*" --abbrev=0` >> $GITHUB_ENV - - name: 💉 Inject new version into files id: version - if: steps.version_type.outputs.type != 'skip' run: | - python ./tools/ci_tools.py --version ${{ env.RELEASE_VERSION }} + echo ::set-output name=current_version::${GITHUB_REF#refs/*/} + RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/}) + LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release) + + echo ::set-output name=last_release::$LASTRELEASE + echo ::set-output name=release_tag::$RESULT - name: "✏️ Generate full changelog" - if: steps.version_type.outputs.type != 'skip' + if: steps.version.outputs.release_tag != 'skip' id: generate-full-changelog uses: heinrichreimer/github-changelog-generator-action@v2.2 with: token: ${{ secrets.ADMIN_TOKEN }} - breakingLabel: '#### 💥 Breaking' - enhancementLabel: '#### 🚀 Enhancements' - bugsLabel: '#### 🐛 Bug fixes' - deprecatedLabel: '#### ⚠️ Deprecations' + breakingLabel: '**💥 Breaking**' + enhancementLabel: '**🚀 Enhancements**' + bugsLabel: '**🐛 Bug fixes**' + deprecatedLabel: '**⚠️ Deprecations**' addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' issues: false issuesWoLabels: false @@ -64,39 +55,76 @@ jobs: compareLink: true stripGeneratorNotice: true verbose: true - futureRelease: ${{ env.RELEASE_VERSION }} + futureRelease: ${{ steps.version.outputs.release_tag }} excludeTagsRegex: "CI/.+" releaseBranch: "main" - - name: "🖨️ Print changelog to console" - run: echo ${{ steps.generate-last-changelog.outputs.changelog }} - - name: 💾 Commit and Tag id: git_commit - if: steps.version_type.outputs.type != 'skip' + if: steps.version.outputs.release_tag != 'skip' run: | + git config user.email ${{ secrets.CI_EMAIL }} + git config user.name ${{ secrets.CI_USER }} git add . git commit -m "[Automated] Release" - tag_name="${{ env.RELEASE_VERSION }}" - git push - git tag -fa $tag_name -m "stable release" - git remote set-url --push origin https://pypebot:${{ secrets.ADMIN_TOKEN }}@github.com/pypeclub/openpype - git push origin $tag_name + tag_name="${{ steps.version.outputs.release_tag }}" + git tag -a $tag_name -m "stable release" - - name: "🚀 Github Release" - uses: docker://antonyurchenko/git-release:latest - env: - GITHUB_TOKEN: ${{ secrets.ADMIN_TOKEN }} - DRAFT_RELEASE: "false" - PRE_RELEASE: "false" - CHANGELOG_FILE: "CHANGELOG.md" - ALLOW_EMPTY_CHANGELOG: "false" - ALLOW_TAG_PREFIX: "true" + - name: 🔏 Push to protected main branch + if: steps.version.outputs.release_tag != 'skip' + uses: CasperWA/push-protected@v2 + with: + token: ${{ secrets.ADMIN_TOKEN }} + branch: main + tags: true + unprotect_reviews: true + + - name: "✏️ Generate last changelog" + if: steps.version.outputs.release_tag != 'skip' + id: generate-last-changelog + uses: heinrichreimer/github-changelog-generator-action@v2.2 + with: + token: ${{ secrets.ADMIN_TOKEN }} + breakingLabel: '**💥 Breaking**' + enhancementLabel: '**🚀 Enhancements**' + bugsLabel: '**🐛 Bug fixes**' + deprecatedLabel: '**⚠️ Deprecations**' + addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' + issues: false + issuesWoLabels: false + sinceTag: ${{ steps.version.outputs.last_release }} + maxIssues: 100 + pullRequests: true + prWoLabels: false + author: false + unreleased: true + compareLink: true + stripGeneratorNotice: true + verbose: true + futureRelease: ${{ steps.version.outputs.release_tag }} + excludeTagsRegex: "CI/.+" + releaseBranch: "main" + stripHeaders: true + base: 'none' - - name: 🔨 Merge main back to develop + - name: 🚀 Github Release + if: steps.version.outputs.release_tag != 'skip' + uses: ncipollo/release-action@v1 + with: + body: ${{ steps.generate-last-changelog.outputs.changelog }} + tag: ${{ steps.version.outputs.release_tag }} + token: ${{ secrets.ADMIN_TOKEN }} + + - name: ☠ Delete Pre-release + if: steps.version.outputs.release_tag != 'skip' + uses: cb80/delrel@latest + with: + tag: "${{ steps.version.outputs.current_version }}" + + - name: 🔁 Merge main back to develop + if: steps.version.outputs.release_tag != 'skip' uses: everlytic/branch-merge@1.1.0 - if: steps.version_type.outputs.type != 'skip' with: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' diff --git a/.gitignore b/.gitignore index 754e3698e2..221a2f2241 100644 --- a/.gitignore +++ b/.gitignore @@ -97,4 +97,5 @@ website/.docusaurus # Poetry ######## -.poetry/ \ No newline at end of file +.poetry/ +.python-version diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fae98ec11..0ecd583191 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,22 +1,131 @@ # Changelog -## [3.1.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.3.0-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...HEAD) -#### 🚀 Enhancements +**🚀 Enhancements** -- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) -- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) +- nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829) +- Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823) +- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819) +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797) -#### 🐛 Bug fixes +**🐛 Bug fixes** -- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) -- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) -- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) -- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) -- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645) -- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) +- nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836) +- Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813) +- Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809) +- Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802) + +**Merged pull requests:** + +- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811) + +## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) + +**🚀 Enhancements** + +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) +- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) +- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777) +- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776) +- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769) +- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766) +- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763) +- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757) +- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756) +- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753) +- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739) +- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736) +- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726) +- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708) + +**🐛 Bug fixes** + +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788) +- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786) +- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782) +- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775) +- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772) +- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768) +- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767) +- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764) +- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761) +- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758) +- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745) +- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743) +- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742) +- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741) +- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738) +- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737) +- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735) + +**Merged pull requests:** + +- Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808) +- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773) +- Bc/fix/docs [\#1771](https://github.com/pypeclub/OpenPype/pull/1771) +- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755) + +## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4) + +**Merged pull requests:** + +- celaction fixes [\#1754](https://github.com/pypeclub/OpenPype/pull/1754) +- celaciton: audio subset changed data structure [\#1750](https://github.com/pypeclub/OpenPype/pull/1750) + +## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.2...2.18.3) + +**🐛 Bug fixes** + +- Tools names forwards compatibility [\#1727](https://github.com/pypeclub/OpenPype/pull/1727) + +**⚠️ Deprecations** + +- global: removing obsolete ftrack validator plugin [\#1710](https://github.com/pypeclub/OpenPype/pull/1710) + +## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.1.0...2.18.2) + +**🐛 Bug fixes** + +- Maya: Extract review hotfix - 2.x backport [\#1713](https://github.com/pypeclub/OpenPype/pull/1713) + +**Merged pull requests:** + +- 1698 Nuke: Prerender Frame Range by default [\#1709](https://github.com/pypeclub/OpenPype/pull/1709) + +## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.1.0-nightly.4...3.1.0) + +**🚀 Enhancements** + +- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703) +- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702) +- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694) +- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689) + +**🐛 Bug fixes** + +- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707) +- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705) +- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691) + +**Merged pull requests:** + +- update dependencies [\#1697](https://github.com/pypeclub/OpenPype/pull/1697) # Changelog diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 7c4f8b4b69..8c081b8614 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -657,7 +657,7 @@ class BootstrapRepos: ] # remove duplicates - openpype_versions = list(set(openpype_versions)) + openpype_versions = sorted(list(set(openpype_versions))) return openpype_versions @@ -972,8 +972,12 @@ class BootstrapRepos: "openpype/version.py") as version_file: zip_version = {} exec(version_file.read(), zip_version) - version_check = OpenPypeVersion( - version=zip_version["__version__"]) + try: + version_check = OpenPypeVersion( + version=zip_version["__version__"]) + except ValueError as e: + self._print(str(e), True) + return False version_main = version_check.get_main_version() # noqa: E501 detected_main = detected_version.get_main_version() # noqa: E501 diff --git a/openpype/cli.py b/openpype/cli.py index 9f4561b82e..ec5b04c468 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -15,6 +15,9 @@ from .pype_commands import PypeCommands expose_value=False, help="use specified version") @click.option("--use-staging", is_flag=True, expose_value=False, help="use staging variants") +@click.option("--list-versions", is_flag=True, expose_value=False, + help=("list all detected versions. Use With `--use-staging " + "to list staging versions.")) def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -115,7 +118,9 @@ def extractenvironments(output_json_path, project, asset, task, app): @main.command() @click.argument("paths", nargs=-1) @click.option("-d", "--debug", is_flag=True, help="Print debug messages") -def publish(debug, paths): +@click.option("-t", "--targets", help="Targets module", default=None, + multiple=True) +def publish(debug, paths, targets): """Start CLI publishing. Publish collects json from paths provided as an argument. @@ -123,7 +128,7 @@ def publish(debug, paths): """ if debug: os.environ['OPENPYPE_DEBUG'] = '3' - PypeCommands.publish(list(paths)) + PypeCommands.publish(list(paths), targets) @main.command() diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py new file mode 100644 index 0000000000..eff89adcb3 --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -0,0 +1,61 @@ +from avalon import api +import pyblish.api +import openpype.api +from avalon import aftereffects + + +class ValidateInstanceAssetRepair(pyblish.api.Action): + """Repair the instance asset with value from Context.""" + + label = "Repair" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + stub = aftereffects.stub() + for instance in instances: + data = stub.read(instance[0]) + + data["asset"] = api.Session["AVALON_ASSET"] + stub.imprint(instance[0], data) + + +class ValidateInstanceAsset(pyblish.api.InstancePlugin): + """Validate the instance asset is the current selected context asset. + + As it might happen that multiple worfiles are opened at same time, + switching between them would mess with selected context. (From Launcher + or Ftrack). + + In that case outputs might be output under wrong asset! + + Repair action will use Context asset value (from Workfiles or Launcher) + Closing and reopening with Workfiles will refresh Context value. + """ + + label = "Validate Instance Asset" + hosts = ["aftereffects"] + actions = [ValidateInstanceAssetRepair] + order = openpype.api.ValidateContentsOrder + + def process(self, instance): + instance_asset = instance.data["asset"] + current_asset = api.Session["AVALON_ASSET"] + msg = ( + f"Instance asset {instance_asset} is not the same " + f"as current context {current_asset}. PLEASE DO:\n" + f"Repair with 'A' action to use '{current_asset}'.\n" + f"If that's not correct value, close workfile and " + f"reopen via Workfiles!" + ) + assert instance_asset == current_asset, msg diff --git a/openpype/hosts/hiero/plugins/create/create_shot_clip.py b/openpype/hosts/hiero/plugins/create/create_shot_clip.py index 25be9f090b..0c5bf93a3f 100644 --- a/openpype/hosts/hiero/plugins/create/create_shot_clip.py +++ b/openpype/hosts/hiero/plugins/create/create_shot_clip.py @@ -1,3 +1,4 @@ +from copy import deepcopy import openpype.hosts.hiero.api as phiero # from openpype.hosts.hiero.api import plugin, lib # reload(lib) @@ -206,20 +207,24 @@ class CreateShotClip(phiero.Creator): presets = None def process(self): + # Creator copy of object attributes that are modified during `process` + presets = deepcopy(self.presets) + gui_inputs = deepcopy(self.gui_inputs) + # get key pares from presets and match it on ui inputs - for k, v in self.gui_inputs.items(): + for k, v in gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed # for sections and dict) for _k, _v in v["value"].items(): - if self.presets.get(_k): - self.gui_inputs[k][ - "value"][_k]["value"] = self.presets[_k] - if self.presets.get(k): - self.gui_inputs[k]["value"] = self.presets[k] + if presets.get(_k): + gui_inputs[k][ + "value"][_k]["value"] = presets[_k] + if presets.get(k): + gui_inputs[k]["value"] = presets[k] # open widget for plugins inputs - widget = self.widget(self.gui_name, self.gui_info, self.gui_inputs) + widget = self.widget(self.gui_name, self.gui_info, gui_inputs) widget.exec_() if len(self.selected) < 1: diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 92e0a70d15..4984849aa7 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -41,16 +41,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # process all sellected timeline track items for track_item in selected_timeline_items: - data = {} clip_name = track_item.name() source_clip = track_item.source() - - # get clips subtracks and anotations - annotations = self.clip_annotations(source_clip) - subtracks = self.clip_subtrack(track_item) - self.log.debug("Annotations: {}".format(annotations)) - self.log.debug(">> Subtracks: {}".format(subtracks)) + self.log.debug("clip_name: {}".format(clip_name)) # get openpype tag data tag_data = phiero.get_track_item_pype_data(track_item) @@ -62,6 +56,12 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if tag_data.get("id") != "pyblish.avalon.instance": continue + # get clips subtracks and anotations + annotations = self.clip_annotations(source_clip) + subtracks = self.clip_subtrack(track_item) + self.log.debug("Annotations: {}".format(annotations)) + self.log.debug(">> Subtracks: {}".format(subtracks)) + # solve handles length tag_data["handleStart"] = min( tag_data["handleStart"], int(track_item.handleInLength())) @@ -128,7 +128,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "_ instance.data: {}".format(pformat(instance.data))) if not with_audio: - return + continue # create audio subset instance self.create_audio_instance(context, **data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 413553c864..2e294face2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -56,7 +56,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create nice name if the instance has a frame range. label = data.get("name", node.name()) if "frameStart" in data and "frameEnd" in data: - frames = "[{startFrame} - {endFrame}]".format(**data) + frames = "[{frameStart} - {frameEnd}]".format(**data) label = "{} {}".format(label, frames) instance = context.create_instance(label) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index fa1ce7f9a9..57e3f478f1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -72,7 +72,7 @@ class ExtractPlayblast(openpype.api.Extractor): # Isolate view is requested by having objects in the set besides a # camera. - if preset.pop("isolate_view", False) or instance.data.get("isolate"): + if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] # Show/Hide image planes on request. diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 5a91888781..aa8adc3986 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -75,7 +75,7 @@ class ExtractThumbnail(openpype.api.Extractor): # Isolate view is requested by having objects in the set besides a # camera. - if preset.pop("isolate_view", False) or instance.data.get("isolate"): + if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] with maintained_time(): diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py index b9bed47fa5..56d5dfe901 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py @@ -133,10 +133,10 @@ class ExtractYetiRig(openpype.api.Extractor): image_search_path = resources_dir = instance.data["resourcesDir"] settings = instance.data.get("rigsettings", None) - if settings: - settings["imageSearchPath"] = image_search_path - with open(settings_path, "w") as fp: - json.dump(settings, fp, ensure_ascii=False) + assert settings, "Yeti rig settings were not collected." + settings["imageSearchPath"] = image_search_path + with open(settings_path, "w") as fp: + json.dump(settings, fp, ensure_ascii=False) # add textures to transfers if 'transfers' not in instance.data: @@ -192,12 +192,12 @@ class ExtractYetiRig(openpype.api.Extractor): 'stagingDir': dirname } ) - self.log.info("settings file: {}".format(settings)) + self.log.info("settings file: {}".format(settings_path)) instance.data["representations"].append( { 'name': 'rigsettings', 'ext': 'rigsettings', - 'files': os.path.basename(settings), + 'files': os.path.basename(settings_path), 'stagingDir': dirname } ) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 7c274a03c7..ee03e04360 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -286,7 +286,8 @@ def add_button_write_to_read(node): node.addKnob(knob) -def create_write_node(name, data, input=None, prenodes=None, review=True): +def create_write_node(name, data, input=None, prenodes=None, + review=True, linked_knobs=None): ''' Creating write node which is group node Arguments: @@ -390,13 +391,14 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): if prenodes: for node in prenodes: # get attributes - name = node["name"] + pre_node_name = node["name"] klass = node["class"] knobs = node["knobs"] dependent = node["dependent"] # create node - now_node = nuke.createNode(klass, "name {}".format(name)) + now_node = nuke.createNode( + klass, "name {}".format(pre_node_name)) now_node.hideControlPanel() # add data to knob @@ -465,29 +467,40 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): GN.addKnob(nuke.Text_Knob('', 'Rendering')) # Add linked knobs. - linked_knob_names = [ - "_grp-start_", - "use_limit", "first", "last", - "_grp-end_", - "Render" - ] - for name in linked_knob_names: - if "_grp-start_" in name: + linked_knob_names = [] + + # add input linked knobs and create group only if any input + if linked_knobs: + linked_knob_names.append("_grp-start_") + linked_knob_names.extend(linked_knobs) + linked_knob_names.append("_grp-end_") + + linked_knob_names.append("Render") + + for _k_name in linked_knob_names: + if "_grp-start_" in _k_name: knob = nuke.Tab_Knob( "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) GN.addKnob(knob) - elif "_grp-end_" in name: + elif "_grp-end_" in _k_name: knob = nuke.Tab_Knob( "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) GN.addKnob(knob) else: - link = nuke.Link_Knob("") - link.makeLink(write_node.name(), name) - link.setName(name) - if "Render" in name: - link.setLabel("Render Local") - link.setFlag(0x1000) - GN.addKnob(link) + if "___" in _k_name: + # add devider + GN.addKnob(nuke.Text_Knob("")) + else: + # add linked knob by _k_name + link = nuke.Link_Knob("") + link.makeLink(write_node.name(), _k_name) + link.setName(_k_name) + + # make render + if "Render" in _k_name: + link.setLabel("Render Local") + link.setFlag(0x1000) + GN.addKnob(link) # adding write to read button add_button_write_to_read(GN) diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 6e1a2ddd96..1b925014ad 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -103,7 +103,8 @@ class CreateWritePrerender(plugin.PypeCreator): write_data, input=selected_node, prenodes=[], - review=False) + review=False, + linked_knobs=["channels", "___", "first", "last", "use_limit"]) # relinking to collected connections for i, input in enumerate(inputs): @@ -122,19 +123,9 @@ class CreateWritePrerender(plugin.PypeCreator): w_node = n write_node.end() - # add inner write node Tab - write_node.addKnob(nuke.Tab_Knob("WriteLinkedKnobs")) - - # linking knobs to group property panel - linking_knobs = ["channels", "___", "first", "last", "use_limit"] - for k in linking_knobs: - if "___" in k: - write_node.addKnob(nuke.Text_Knob('')) - else: - lnk = nuke.Link_Knob(k) - lnk.makeLink(w_node.name(), k) - lnk.setName(k.replace('_', ' ').capitalize()) - lnk.clearFlag(nuke.STARTLINE) - write_node.addKnob(lnk) + if self.presets.get("use_range_limit"): + w_node["use_limit"].setValue(True) + w_node["first"].setValue(nuke.root()["first_frame"].value()) + w_node["last"].setValue(nuke.root()["last_frame"].value()) return write_node diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 9ddf0e4a87..a1381122ee 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -100,6 +100,13 @@ class CreateWriteRender(plugin.PypeCreator): "/{subset}.{frame}.{ext}")}) # add crop node to cut off all outside of format bounding box + # get width and height + try: + width, height = (selected_node.width(), selected_node.height()) + except AttributeError: + actual_format = nuke.root().knob('format').value() + width, height = (actual_format.width(), actual_format.height()) + _prenodes = [ { "name": "Crop01", @@ -108,8 +115,8 @@ class CreateWriteRender(plugin.PypeCreator): ("box", [ 0.0, 0.0, - selected_node.width(), - selected_node.height() + width, + height ]) ], "dependent": None diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index cdb0589525..c2c25d0627 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -70,8 +70,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): review = False if "review" in node.knobs(): review = node["review"].value() + + if review: families.append("review") - families.append("ftrack") # Add all nodes in group instances. if node.Class() == "Group": @@ -81,6 +82,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): if target == "Use existing frames": # Local rendering self.log.info("flagged for no render") + families.append(families_ak.lower()) elif target == "Local": # Local rendering self.log.info("flagged for local render") diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index 5eaac89e84..0b5fbc0479 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -1,5 +1,6 @@ import os import re +from pprint import pformat import nuke import pyblish.api import openpype.api as pype @@ -17,6 +18,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): def process(self, instance): _families_test = [instance.data["family"]] + instance.data["families"] + self.log.debug("_families_test: {}".format(_families_test)) node = None for x in instance: @@ -133,22 +135,29 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "outputDir": output_dir, "ext": ext, "label": label, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, "outputType": output_type, "colorspace": colorspace, "deadlineChunkSize": deadlineChunkSize, "deadlinePriority": deadlinePriority }) - if "prerender" in _families_test: + if self.is_prerender(_families_test): instance.data.update({ - "family": "prerender", - "families": [] + "handleStart": 0, + "handleEnd": 0, + "frameStart": first_frame, + "frameEnd": last_frame, + "frameStartHandle": first_frame, + "frameEndHandle": last_frame, + }) + else: + instance.data.update({ + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "frameStartHandle": first_frame, + "frameEndHandle": last_frame, }) # * Add audio to instance if exists. @@ -170,4 +179,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "filename": api.get_representation_path(repre_doc) }] - self.log.debug("instance.data: {}".format(instance.data)) + self.log.debug("instance.data: {}".format(pformat(instance.data))) + + def is_prerender(self, families): + return next((f for f in families if "prerender" in f), None) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 8b71aff1ac..0c88014649 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -61,7 +61,6 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): hosts = ["nuke", "nukestudio"] actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] - def process(self, instance): for repre in instance.data["representations"]: @@ -78,10 +77,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): collection = collections[0] - frame_length = int( - instance.data["frameEndHandle"] - - instance.data["frameStartHandle"] + 1 - ) + fstartH = instance.data["frameStartHandle"] + fendH = instance.data["frameEndHandle"] + + frame_length = int(fendH - fstartH + 1) if frame_length != 1: if len(collections) != 1: @@ -95,7 +94,16 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): raise ValidationException(msg) collected_frames_len = int(len(collection.indexes)) + coll_start = min(collection.indexes) + coll_end = max(collection.indexes) + self.log.info("frame_length: {}".format(frame_length)) + self.log.info("collected_frames_len: {}".format( + collected_frames_len)) + self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH)) + self.log.info( + "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) + self.log.info( "len(collection.indexes): {}".format(collected_frames_len) ) @@ -103,8 +111,11 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if ("slate" in instance.data["families"]) \ and (frame_length != collected_frames_len): collected_frames_len -= 1 + fstartH += 1 - assert (collected_frames_len == frame_length), ( + assert ((collected_frames_len >= frame_length) + and (coll_start <= fstartH) + and (coll_end >= fendH)), ( "{} missing frames. Use repair to render all frames" ).format(__name__) diff --git a/openpype/hosts/photoshop/plugins/lib.py b/openpype/hosts/photoshop/plugins/lib.py new file mode 100644 index 0000000000..74aff06114 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/lib.py @@ -0,0 +1,26 @@ +import re + + +def get_unique_layer_name(layers, asset_name, subset_name): + """ + Gets all layer names and if 'asset_name_subset_name' is present, it + increases suffix by 1 (eg. creates unique layer name - for Loader) + Args: + layers (list) of dict with layers info (name, id etc.) + asset_name (string): + subset_name (string): + + Returns: + (string): name_00X (without version) + """ + name = "{}_{}".format(asset_name, subset_name) + names = {} + for layer in layers: + layer_name = re.sub(r'_\d{3}$', '', layer.name) + if layer_name in names.keys(): + names[layer_name] = names[layer_name] + 1 + else: + names[layer_name] = 1 + occurrences = names.get(name, 0) + + return "{}_{:0>3d}".format(name, occurrences + 1) diff --git a/openpype/hosts/photoshop/plugins/load/load_image.py b/openpype/hosts/photoshop/plugins/load/load_image.py index 44cc96c96f..d043323768 100644 --- a/openpype/hosts/photoshop/plugins/load/load_image.py +++ b/openpype/hosts/photoshop/plugins/load/load_image.py @@ -1,7 +1,9 @@ -from avalon import api, photoshop -import os import re +from avalon import api, photoshop + +from openpype.hosts.photoshop.plugins.lib import get_unique_layer_name + stub = photoshop.stub() @@ -15,8 +17,9 @@ class ImageLoader(api.Loader): representations = ["*"] def load(self, context, name=None, namespace=None, data=None): - layer_name = self._get_unique_layer_name(context["asset"]["name"], - name) + layer_name = get_unique_layer_name(stub.get_layers(), + context["asset"]["name"], + name) with photoshop.maintained_selection(): layer = stub.import_smart_object(self.fname, layer_name) @@ -69,25 +72,3 @@ class ImageLoader(api.Loader): def switch(self, container, representation): self.update(container, representation) - - def _get_unique_layer_name(self, asset_name, subset_name): - """ - Gets all layer names and if 'name' is present in them, increases - suffix by 1 (eg. creates unique layer name - for Loader) - Args: - name (string): in format asset_subset - - Returns: - (string): name_00X (without version) - """ - name = "{}_{}".format(asset_name, subset_name) - names = {} - for layer in stub.get_layers(): - layer_name = re.sub(r'_\d{3}$', '', layer.name) - if layer_name in names.keys(): - names[layer_name] = names[layer_name] + 1 - else: - names[layer_name] = 1 - occurrences = names.get(name, 0) - - return "{}_{:0>3d}".format(name, occurrences + 1) diff --git a/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py new file mode 100644 index 0000000000..8704627b12 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -0,0 +1,98 @@ +import os + +from avalon import api +from avalon import photoshop +from avalon.pipeline import get_representation_path_from_context +from avalon.vendor import qargparse + +from openpype.lib import Anatomy +from openpype.hosts.photoshop.plugins.lib import get_unique_layer_name + +stub = photoshop.stub() + + +class ImageFromSequenceLoader(api.Loader): + """ Load specifing image from sequence + + Used only as quick load of reference file from a sequence. + + Plain ImageLoader picks first frame from sequence. + + Loads only existing files - currently not possible to limit loaders + to single select - multiselect. If user selects multiple repres, list + for all of them is provided, but selection is only single file. + This loader will be triggered multiple times, but selected name will + match only to proper path. + + Loader doesnt do containerization as there is currently no data model + of 'frame of rendered files' (only rendered sequence), update would be + difficult. + """ + + families = ["render"] + representations = ["*"] + options = [] + + def load(self, context, name=None, namespace=None, data=None): + if data.get("frame"): + self.fname = os.path.join(os.path.dirname(self.fname), + data["frame"]) + if not os.path.exists(self.fname): + return + + stub = photoshop.stub() + layer_name = get_unique_layer_name(stub.get_layers(), + context["asset"]["name"], + name) + + with photoshop.maintained_selection(): + layer = stub.import_smart_object(self.fname, layer_name) + + self[:] = [layer] + namespace = namespace or layer_name + + return namespace + + @classmethod + def get_options(cls, repre_contexts): + """ + Returns list of files for selected 'repre_contexts'. + + It returns only files with same extension as in context as it is + expected that context points to sequence of frames. + + Returns: + (list) of qargparse.Choice + """ + files = [] + for context in repre_contexts: + fname = get_representation_path_from_context(context) + _, file_extension = os.path.splitext(fname) + + for file_name in os.listdir(os.path.dirname(fname)): + if not file_name.endswith(file_extension): + continue + files.append(file_name) + + # return selection only if there is something + if not files or len(files) <= 1: + return [] + + return [ + qargparse.Choice( + "frame", + label="Select specific file", + items=files, + default=0, + help="Which frame should be loaded?" + ) + ] + + def update(self, container, representation): + """No update possible, not containerized.""" + pass + + def remove(self, container): + """No update possible, not containerized.""" + pass + diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py index a1de02f319..4dc1972074 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,5 +1,4 @@ -import os - +from avalon import api import pyblish.api import openpype.api from avalon import photoshop @@ -27,12 +26,20 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = os.environ["AVALON_ASSET"] + data["asset"] = api.Session["AVALON_ASSET"] stub.imprint(instance[0], data) class ValidateInstanceAsset(pyblish.api.InstancePlugin): - """Validate the instance asset is the current asset.""" + """Validate the instance asset is the current selected context asset. + + As it might happen that multiple worfiles are opened, switching + between them would mess with selected context. + In that case outputs might be output under wrong asset! + + Repair action will use Context asset value (from Workfiles or Launcher) + Closing and reopening with Workfiles will refresh Context value. + """ label = "Validate Instance Asset" hosts = ["photoshop"] @@ -41,9 +48,12 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): def process(self, instance): instance_asset = instance.data["asset"] - current_asset = os.environ["AVALON_ASSET"] + current_asset = api.Session["AVALON_ASSET"] msg = ( - "Instance asset is not the same as current asset:" - f"\nInstance: {instance_asset}\nCurrent: {current_asset}" + f"Instance asset {instance_asset} is not the same " + f"as current context {current_asset}. PLEASE DO:\n" + f"Repair with 'A' action to use '{current_asset}'.\n" + f"If that's not correct value, close workfile and " + f"reopen via Workfiles!" ) assert instance_asset == current_asset, msg diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py index 43ab13cd79..6913e0836d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py @@ -34,7 +34,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): # presets batch_extensions = ["edl", "xml", "psd"] - default_families = ["ftrack"] def process(self, context): # get json paths from os and load them @@ -213,10 +212,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): subset = in_data["subset"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] - # Make sure default families are in instance - for default_family in self.default_families or []: - if default_family not in instance_families: - instance_families.append(default_family) instance = context.create_instance(subset) instance.data.update( diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py index fc9d95d3d7..0a1d29ccdc 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial.py @@ -2,7 +2,7 @@ Optional: presets -> extensions ( example of use: - [".mov", ".mp4"] + ["mov", "mp4"] ) presets -> source_dir ( example of use: @@ -11,6 +11,7 @@ Optional: "{root[work]}/{project[name]}/inputs" "./input" "../input" + "" ) """ @@ -48,7 +49,7 @@ class CollectEditorial(pyblish.api.InstancePlugin): actions = [] # presets - extensions = [".mov", ".mp4"] + extensions = ["mov", "mp4"] source_dir = None def process(self, instance): @@ -72,7 +73,7 @@ class CollectEditorial(pyblish.api.InstancePlugin): video_path = None basename = os.path.splitext(os.path.basename(file_path))[0] - if self.source_dir: + if self.source_dir != "": source_dir = self.source_dir.replace("\\", "/") if ("./" in source_dir) or ("../" in source_dir): # get current working dir @@ -98,7 +99,7 @@ class CollectEditorial(pyblish.api.InstancePlugin): if os.path.splitext(f)[0] not in basename: continue # filter out by respected extensions - if os.path.splitext(f)[1] not in self.extensions: + if os.path.splitext(f)[1][1:] not in self.extensions: continue video_path = os.path.join( staging_dir, f diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py similarity index 91% rename from openpype/hosts/standalonepublisher/plugins/publish/collect_instances.py rename to openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py index eb04217136..dbf2574a9d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py @@ -8,7 +8,7 @@ class CollectInstances(pyblish.api.InstancePlugin): """Collect instances from editorial's OTIO sequence""" order = pyblish.api.CollectorOrder + 0.01 - label = "Collect Instances" + label = "Collect Editorial Instances" hosts = ["standalonepublisher"] families = ["editorial"] @@ -16,17 +16,13 @@ class CollectInstances(pyblish.api.InstancePlugin): subsets = { "referenceMain": { "family": "review", - "families": ["clip", "ftrack"], - "extensions": [".mp4"] + "families": ["clip"], + "extensions": ["mp4"] }, "audioMain": { "family": "audio", - "families": ["clip", "ftrack"], - "extensions": [".wav"], - }, - "shotMain": { - "family": "shot", - "families": [] + "families": ["clip"], + "extensions": ["wav"], } } timeline_frame_start = 900000 # starndard edl default (10:00:00:00) @@ -55,7 +51,7 @@ class CollectInstances(pyblish.api.InstancePlugin): fps = plib.get_asset()["data"]["fps"] tracks = timeline.each_child( - descended_from_type=otio.schema.track.Track + descended_from_type=otio.schema.Track ) # get data from avalon @@ -84,6 +80,9 @@ class CollectInstances(pyblish.api.InstancePlugin): if clip.name is None: continue + if isinstance(clip, otio.schema.Gap): + continue + # skip all generators like black ampty if isinstance( clip.media_reference, @@ -92,7 +91,7 @@ class CollectInstances(pyblish.api.InstancePlugin): # Transitions are ignored, because Clips have the full frame # range. - if isinstance(clip, otio.schema.transition.Transition): + if isinstance(clip, otio.schema.Transition): continue # basic unique asset name @@ -175,7 +174,16 @@ class CollectInstances(pyblish.api.InstancePlugin): data_key: instance.data.get(data_key)}) # adding subsets to context as instances + self.subsets.update({ + "shotMain": { + "family": "shot", + "families": [] + } + }) for subset, properities in self.subsets.items(): + if properities["version"] == 0: + properities.pop("version") + # adding Review-able instance subset_instance_data = instance_data.copy() subset_instance_data.update(properities) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_instance_resources.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py similarity index 94% rename from openpype/hosts/standalonepublisher/plugins/publish/collect_instance_resources.py rename to openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py index 565d066fd8..ffa24cfd93 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_instance_resources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py @@ -11,7 +11,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): # must be after `CollectInstances` order = pyblish.api.CollectorOrder + 0.011 - label = "Collect Instance Resources" + label = "Collect Editorial Resources" hosts = ["standalonepublisher"] families = ["clip"] @@ -177,19 +177,23 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): collection_head_name = None # loop trough collections and create representations for _collection in collections: - ext = _collection.tail + ext = _collection.tail[1:] collection_head_name = _collection.head frame_start = list(_collection.indexes)[0] frame_end = list(_collection.indexes)[-1] repre_data = { "frameStart": frame_start, "frameEnd": frame_end, - "name": ext[1:], - "ext": ext[1:], + "name": ext, + "ext": ext, "files": [item for item in _collection], "stagingDir": staging_dir } + if instance_data.get("keepSequence"): + repre_data_keep = deepcopy(repre_data) + instance_data["representations"].append(repre_data_keep) + if "review" in instance_data["families"]: repre_data.update({ "thumbnail": True, @@ -208,20 +212,20 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): # loop trough reminders and create representations for _reminding_file in remainder: - ext = os.path.splitext(_reminding_file)[-1] + ext = os.path.splitext(_reminding_file)[-1][1:] if ext not in instance_data["extensions"]: continue if collection_head_name and ( - (collection_head_name + ext[1:]) not in _reminding_file - ) and (ext in [".mp4", ".mov"]): + (collection_head_name + ext) not in _reminding_file + ) and (ext in ["mp4", "mov"]): self.log.info(f"Skipping file: {_reminding_file}") continue frame_start = 1 frame_end = 1 repre_data = { - "name": ext[1:], - "ext": ext[1:], + "name": ext, + "ext": ext, "files": _reminding_file, "stagingDir": staging_dir } diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index be36f30f4b..ba2aed4bfc 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -131,20 +131,21 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): tasks_to_add = dict() project_tasks = io.find_one({"type": "project"})["config"]["tasks"] for task_name, task_data in self.shot_add_tasks.items(): - try: - if task_data["type"] in project_tasks.keys(): - tasks_to_add.update({task_name: task_data}) - else: - raise KeyError( - "Wrong FtrackTaskType `{}` for `{}` is not" - " existing in `{}``".format( - task_data["type"], - task_name, - list(project_tasks.keys()))) - except KeyError as error: + _task_data = deepcopy(task_data) + + # fixing enumerator from settings + _task_data["type"] = task_data["type"][0] + + # check if task type in project task types + if _task_data["type"] in project_tasks.keys(): + tasks_to_add.update({task_name: _task_data}) + else: raise KeyError( - "Wrong presets: `{0}`".format(error) - ) + "Wrong FtrackTaskType `{}` for `{}` is not" + " existing in `{}``".format( + _task_data["type"], + task_name, + list(project_tasks.keys()))) instance.data["tasks"] = tasks_to_add else: diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_matchmove.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_matchmove.py deleted file mode 100644 index 5d9e8ddfb4..0000000000 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_matchmove.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Requires: - Nothing - -Provides: - Instance -""" - -import pyblish.api -import logging - - -log = logging.getLogger("collector") - - -class CollectMatchmovePublish(pyblish.api.InstancePlugin): - """ - Collector with only one reason for its existence - remove 'ftrack' - family implicitly added by Standalone Publisher - """ - - label = "Collect Matchmove - SA Publish" - order = pyblish.api.CollectorOrder - families = ["matchmove"] - hosts = ["standalonepublisher"] - - def process(self, instance): - if "ftrack" in instance.data["families"]: - instance.data["families"].remove("ftrack") diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py index ac1dfa13d4..0792254716 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py @@ -1,6 +1,5 @@ import os import tempfile -import subprocess import pyblish.api import openpype.api import openpype.lib @@ -67,7 +66,6 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin): else: # Convert to jpeg if not yet full_input_path = os.path.join(thumbnail_repre["stagingDir"], file) - full_input_path = '"{}"'.format(full_input_path) self.log.info("input {}".format(full_input_path)) full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1] @@ -77,30 +75,37 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin): ffmpeg_args = self.ffmpeg_args or {} - jpeg_items = [] - jpeg_items.append("\"{}\"".format(ffmpeg_path)) - # override file if already exists - jpeg_items.append("-y") + jpeg_items = [ + "\"{}\"".format(ffmpeg_path), + # override file if already exists + "-y" + ] + # add input filters from peresets jpeg_items.extend(ffmpeg_args.get("input") or []) # input file - jpeg_items.append("-i {}".format(full_input_path)) + jpeg_items.append("-i \"{}\"".format(full_input_path)) # extract only single file - jpeg_items.append("-vframes 1") + jpeg_items.append("-frames:v 1") + # Add black background for transparent images + jpeg_items.append(( + "-filter_complex" + " \"color=black,format=rgb24[c]" + ";[c][0]scale2ref[c][i]" + ";[c][i]overlay=format=auto:shortest=1,setsar=1\"" + )) jpeg_items.extend(ffmpeg_args.get("output") or []) # output file - jpeg_items.append(full_thumbnail_path) + jpeg_items.append("\"{}\"".format(full_thumbnail_path)) subprocess_jpeg = " ".join(jpeg_items) # run subprocess self.log.debug("Executing: {}".format(subprocess_jpeg)) - subprocess.Popen( - subprocess_jpeg, - stdout=subprocess.PIPE, - shell=True + openpype.api.run_subprocess( + subprocess_jpeg, shell=True, logger=self.log ) # remove thumbnail key from origin repre diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py index e3086fb638..943cb73b98 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py @@ -43,7 +43,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): self.log.warning("Cannot check for extension {}".format(ext)) return - frames = len(instance.data.get("representations", [None])[0]["files"]) + files = instance.data.get("representations", [None])[0]["files"] + if isinstance(files, str): + files = [files] + frames = len(files) err_msg = "Frame duration from DB:'{}' ". format(int(duration)) +\ " doesn't match number of files:'{}'".format(frames) +\ diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index eeb7d32d50..af6c0f0eee 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -58,18 +58,14 @@ class CreateRenderlayer(plugin.Creator): # Get currently selected layers layers_data = lib.layers_data() - group_ids = set() - for layer in layers_data: - if layer["selected"]: - group_ids.add(layer["group_id"]) - + selected_layers = [ + layer + for layer in layers_data + if layer["selected"] + ] # Return layer name if only one is selected - if len(group_ids) == 1: - group_id = list(group_ids)[0] - groups_data = lib.groups_data() - for group in groups_data: - if group["group_id"] == group_id: - return group["name"] + if len(selected_layers) == 1: + return selected_layers[0]["name"] # Use defaults if cls.defaults: diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 4468bfae40..e496b144cd 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -103,8 +103,6 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["layers"] = copy.deepcopy( context.data["layersData"] ) - # Add ftrack family - instance.data["families"].append("ftrack") elif family == "renderLayer": instance = self.create_render_layer_instance( @@ -186,9 +184,6 @@ class CollectInstances(pyblish.api.ContextPlugin): instance_data["layers"] = group_layers - # Add ftrack family - instance_data["families"].append("ftrack") - return context.create_instance(**instance_data) def create_render_pass_instance(self, context, instance_data): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 13c6c9eb78..d8bb03f541 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -1,5 +1,6 @@ import os import json +import tempfile import pyblish.api import avalon.api @@ -153,9 +154,45 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): "sceneMarkIn": int(mark_in_frame), "sceneMarkInState": mark_in_state == "set", "sceneMarkOut": int(mark_out_frame), - "sceneMarkOutState": mark_out_state == "set" + "sceneMarkOutState": mark_out_state == "set", + "sceneBgColor": self._get_bg_color() } self.log.debug( "Scene data: {}".format(json.dumps(scene_data, indent=4)) ) context.data.update(scene_data) + + def _get_bg_color(self): + """Background color set on scene. + + Is important for review exporting where scene bg color is used as + background. + """ + output_file = tempfile.NamedTemporaryFile( + mode="w", prefix="a_tvp_", suffix=".txt", delete=False + ) + output_file.close() + output_filepath = output_file.name.replace("\\", "/") + george_script_lines = [ + # Variable containing full path to output file + "output_path = \"{}\"".format(output_filepath), + "tv_background", + "bg_color = result", + # Write data to output file + ( + "tv_writetextfile" + " \"strict\" \"append\" '\"'output_path'\"' bg_color" + ) + ] + + george_script = "\n".join(george_script_lines) + lib.execute_george_through_file(george_script) + + with open(output_filepath, "r") as stream: + data = stream.read() + + os.remove(output_filepath) + data = data.strip() + if not data: + return None + return data.split(" ") diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 007b5c41f1..536df2adb0 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -1,5 +1,6 @@ import os import shutil +import copy import tempfile import pyblish.api @@ -13,6 +14,9 @@ class ExtractSequence(pyblish.api.Extractor): hosts = ["tvpaint"] families = ["review", "renderPass", "renderLayer"] + # Modifiable with settings + review_bg = [255, 255, 255, 255] + def process(self, instance): self.log.info( "* Processing instance \"{}\"".format(instance.data["label"]) @@ -53,6 +57,8 @@ class ExtractSequence(pyblish.api.Extractor): handle_start = instance.context.data["handleStart"] handle_end = instance.context.data["handleEnd"] + scene_bg_color = instance.context.data["sceneBgColor"] + # --- Fallbacks ---------------------------------------------------- # This is required if validations of ranges are ignored. # - all of this code won't change processing if range to render @@ -120,7 +126,8 @@ class ExtractSequence(pyblish.api.Extractor): if instance.data["family"] == "review": output_filenames, thumbnail_fullpath = self.render_review( - filename_template, output_dir, mark_in, mark_out + filename_template, output_dir, mark_in, mark_out, + scene_bg_color ) else: # Render output @@ -241,7 +248,9 @@ class ExtractSequence(pyblish.api.Extractor): for path in repre_filepaths ] - def render_review(self, filename_template, output_dir, mark_in, mark_out): + def render_review( + self, filename_template, output_dir, mark_in, mark_out, scene_bg_color + ): """ Export images from TVPaint using `tv_savesequence` command. Args: @@ -252,6 +261,8 @@ class ExtractSequence(pyblish.api.Extractor): output_dir (str): Directory where files will be stored. mark_in (int): Starting frame index from which export will begin. mark_out (int): On which frame index export will end. + scene_bg_color (list): Bg color set in scene. Result of george + script command `tv_background`. Retruns: tuple: With 2 items first is list of filenames second is path to @@ -263,7 +274,11 @@ class ExtractSequence(pyblish.api.Extractor): filename_template.format(frame=mark_in) ) + bg_color = self._get_review_bg_color() + george_script_lines = [ + # Change bg color to color from settings + "tv_background \"color\" {} {} {}".format(*bg_color), "tv_SaveMode \"PNG\"", "export_path = \"{}\"".format( first_frame_filepath.replace("\\", "/") @@ -272,6 +287,18 @@ class ExtractSequence(pyblish.api.Extractor): mark_in, mark_out ) ] + if scene_bg_color: + # Change bg color back to previous scene bg color + _scene_bg_color = copy.deepcopy(scene_bg_color) + bg_type = _scene_bg_color.pop(0) + orig_color_command = [ + "tv_background", + "\"{}\"".format(bg_type) + ] + orig_color_command.extend(_scene_bg_color) + + george_script_lines.append(" ".join(orig_color_command)) + lib.execute_george_through_file("\n".join(george_script_lines)) first_frame_filepath = None @@ -291,12 +318,13 @@ class ExtractSequence(pyblish.api.Extractor): if first_frame_filepath is None: first_frame_filepath = filepath - thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") + thumbnail_filepath = None if first_frame_filepath and os.path.exists(first_frame_filepath): + thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") source_img = Image.open(first_frame_filepath) - thumbnail_obj = Image.new("RGB", source_img.size, (255, 255, 255)) - thumbnail_obj.paste(source_img) - thumbnail_obj.save(thumbnail_filepath) + if source_img.mode.lower() != "rgb": + source_img = source_img.convert("RGB") + source_img.save(thumbnail_filepath) return output_filenames, thumbnail_filepath @@ -392,12 +420,35 @@ class ExtractSequence(pyblish.api.Extractor): if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath): source_img = Image.open(thumbnail_src_filepath) thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") - thumbnail_obj = Image.new("RGB", source_img.size, (255, 255, 255)) - thumbnail_obj.paste(source_img) - thumbnail_obj.save(thumbnail_filepath) + # Composite background only on rgba images + # - just making sure + if source_img.mode.lower() == "rgba": + bg_color = self._get_review_bg_color() + self.log.debug("Adding thumbnail background color {}.".format( + " ".join([str(val) for val in bg_color]) + )) + bg_image = Image.new("RGBA", source_img.size, bg_color) + thumbnail_obj = Image.alpha_composite(bg_image, source_img) + thumbnail_obj.convert("RGB").save(thumbnail_filepath) + + else: + self.log.info(( + "Source for thumbnail has mode \"{}\" (Expected: RGBA)." + " Can't use thubmanail background color." + ).format(source_img.mode)) + source_img.save(thumbnail_filepath) return output_filenames, thumbnail_filepath + def _get_review_bg_color(self): + red = green = blue = 255 + if self.review_bg: + if len(self.review_bg) == 4: + red, green, blue, _ = self.review_bg + elif len(self.review_bg) == 3: + red, green, blue = self.review_bg + return (red, green, blue) + def _render_layer( self, layer, diff --git a/openpype/hosts/unreal/README.md b/openpype/hosts/unreal/README.md new file mode 100644 index 0000000000..0a69b9e0cf --- /dev/null +++ b/openpype/hosts/unreal/README.md @@ -0,0 +1,9 @@ +## Unreal Integration + +Supported Unreal Engine version is 4.26+ (mainly because of major Python changes done there). + +### Project naming +Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are +invalid. If OpenPype detects such name it automatically prepends letter **P** to make it valid name, so `123_myProject` +will become `P123_myProject`. There is also soft-limit on project name length to be shorter than 20 characters. +Longer names will issue warning in Unreal Editor that there might be possible side effects. \ No newline at end of file diff --git a/openpype/hosts/unreal/api/lib.py b/openpype/hosts/unreal/api/lib.py index 7e706a2789..7e34c3ff15 100644 --- a/openpype/hosts/unreal/api/lib.py +++ b/openpype/hosts/unreal/api/lib.py @@ -1,38 +1,51 @@ +# -*- coding: utf-8 -*- +"""Unreal launching and project tools.""" import sys import os import platform import json from distutils import dir_util import subprocess +import re +from pathlib import Path +from collections import OrderedDict from openpype.api import get_project_settings -def get_engine_versions(): - """ +def get_engine_versions(env=None): + """Detect Unreal Engine versions. + This will try to detect location and versions of installed Unreal Engine. Location can be overridden by `UNREAL_ENGINE_LOCATION` environment variable. - Returns: + Args: + env (dict, optional): Environment to use. - dict: dictionary with version as a key and dir as value. + Returns: + OrderedDict: dictionary with version as a key and dir as value. + so the highest version is first. Example: - - >>> get_engine_version() + >>> get_engine_versions() { "4.23": "C:/Epic Games/UE_4.23", "4.24": "C:/Epic Games/UE_4.24" } - """ - try: - engine_locations = {} - root, dirs, files = next(os.walk(os.environ["UNREAL_ENGINE_LOCATION"])) - for dir in dirs: - if dir.startswith("UE_"): - ver = dir.split("_")[1] - engine_locations[ver] = os.path.join(root, dir) + """ + env = env or os.environ + engine_locations = {} + try: + root, dirs, _ = next(os.walk(env["UNREAL_ENGINE_LOCATION"])) + + for directory in dirs: + if directory.startswith("UE"): + try: + ver = re.split(r"[-_]", directory)[1] + except IndexError: + continue + engine_locations[ver] = os.path.join(root, directory) except KeyError: # environment variable not set pass @@ -40,32 +53,52 @@ def get_engine_versions(): # specified directory doesn't exists pass - # if we've got something, terminate autodetection process + # if we've got something, terminate auto-detection process if engine_locations: - return engine_locations + return OrderedDict(sorted(engine_locations.items())) # else kick in platform specific detection if platform.system().lower() == "windows": - return _win_get_engine_versions() - elif platform.system().lower() == "linux": + return OrderedDict(sorted(_win_get_engine_versions().items())) + if platform.system().lower() == "linux": # on linux, there is no installation and getting Unreal Engine involves # git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`. pass - elif platform.system().lower() == "darwin": - return _darwin_get_engine_version() + if platform.system().lower() == "darwin": + return OrderedDict(sorted(_darwin_get_engine_version().items())) - return {} + return OrderedDict() + + +def get_editor_executable_path(engine_path: Path) -> Path: + """Get UE4 Editor executable path.""" + ue4_path = engine_path / "Engine/Binaries" + if platform.system().lower() == "windows": + ue4_path /= "Win64/UE4Editor.exe" + + elif platform.system().lower() == "linux": + ue4_path /= "Linux/UE4Editor" + + elif platform.system().lower() == "darwin": + ue4_path /= "Mac/UE4Editor" + + return ue4_path def _win_get_engine_versions(): - """ + """Get Unreal Engine versions on Windows. + If engines are installed via Epic Games Launcher then there is: `%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat` This file is JSON file listing installed stuff, Unreal engines are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24` + + Returns: + dict: version as a key and path as a value. + """ install_json_path = os.path.join( - os.environ.get("PROGRAMDATA"), + os.getenv("PROGRAMDATA"), "Epic", "UnrealEngineLauncher", "LauncherInstalled.dat", @@ -75,11 +108,19 @@ def _win_get_engine_versions(): def _darwin_get_engine_version() -> dict: - """ + """Get Unreal Engine versions on MacOS. + It works the same as on Windows, just JSON file location is different. + + Returns: + dict: version as a key and path as a value. + + See Aslo: + :func:`_win_get_engine_versions`. + """ install_json_path = os.path.join( - os.environ.get("HOME"), + os.getenv("HOME"), "Library", "Application Support", "Epic", @@ -91,25 +132,26 @@ def _darwin_get_engine_version() -> dict: def _parse_launcher_locations(install_json_path: str) -> dict: - """ - This will parse locations from json file. + """This will parse locations from json file. + + Args: + install_json_path (str): Path to `LauncherInstalled.dat`. + + Returns: + dict: with unreal engine versions as keys and + paths to those engine installations as value. - :param install_json_path: path to `LauncherInstalled.dat` - :type install_json_path: str - :returns: returns dict with unreal engine versions as keys and - paths to those engine installations as value. - :rtype: dict """ engine_locations = {} if os.path.isfile(install_json_path): with open(install_json_path, "r") as ilf: try: install_data = json.load(ilf) - except json.JSONDecodeError: + except json.JSONDecodeError as e: raise Exception( "Invalid `LauncherInstalled.dat file. `" "Cannot determine Unreal Engine location." - ) + ) from e for installation in install_data.get("InstallationList", []): if installation.get("AppName").startswith("UE_"): @@ -121,55 +163,91 @@ def _parse_launcher_locations(install_json_path: str) -> dict: def create_unreal_project(project_name: str, ue_version: str, - pr_dir: str, - engine_path: str, - dev_mode: bool = False) -> None: - """ - This will create `.uproject` file at specified location. As there is no - way I know to create project via command line, this is easiest option. - Unreal project file is basically JSON file. If we find + pr_dir: Path, + engine_path: Path, + dev_mode: bool = False, + env: dict = None) -> None: + """This will create `.uproject` file at specified location. + + As there is no way I know to create project via command line, this is + easiest option. Unreal project file is basically JSON file. If we find `AVALON_UNREAL_PLUGIN` environment variable we assume this is location of Avalon Integration Plugin and we copy its content to project folder and enable this plugin. - :param project_name: project name - :type project_name: str - :param ue_version: unreal engine version (like 4.23) - :type ue_version: str - :param pr_dir: path to directory where project will be created - :type pr_dir: str - :param engine_path: Path to Unreal Engine installation - :type engine_path: str - :param dev_mode: Flag to trigger C++ style Unreal project needing - Visual Studio and other tools to compile plugins from - sources. This will trigger automatically if `Binaries` - directory is not found in plugin folders as this indicates - this is only source distribution of the plugin. Dev mode - is also set by preset file `unreal/project_setup.json` in - **OPENPYPE_CONFIG**. - :type dev_mode: bool - :returns: None - """ - preset = get_project_settings(project_name)["unreal"]["project_setup"] + Args: + project_name (str): Name of the project. + ue_version (str): Unreal engine version (like 4.23). + pr_dir (Path): Path to directory where project will be created. + engine_path (Path): Path to Unreal Engine installation. + dev_mode (bool, optional): Flag to trigger C++ style Unreal project + needing Visual Studio and other tools to compile plugins from + sources. This will trigger automatically if `Binaries` + directory is not found in plugin folders as this indicates + this is only source distribution of the plugin. Dev mode + is also set by preset file `unreal/project_setup.json` in + **OPENPYPE_CONFIG**. + env (dict, optional): Environment to use. If not set, `os.environ`. - if os.path.isdir(os.environ.get("AVALON_UNREAL_PLUGIN", "")): + Throws: + NotImplementedError: For unsupported platforms. + + Returns: + None + + """ + env = env or os.environ + preset = get_project_settings(project_name)["unreal"]["project_setup"] + ue_id = ".".join(ue_version.split(".")[:2]) + # get unreal engine identifier + # ------------------------------------------------------------------------- + # FIXME (antirotor): As of 4.26 this is problem with UE4 built from + # sources. In that case Engine ID is calculated per machine/user and not + # from Engine files as this code then reads. This then prevents UE4 + # to directly open project as it will complain about project being + # created in different UE4 version. When user convert such project + # to his UE4 version, Engine ID is replaced in uproject file. If some + # other user tries to open it, it will present him with similar error. + ue4_modules = Path() + if platform.system().lower() == "windows": + ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries", + "Win64", "UE4Editor.modules")) + + if platform.system().lower() == "linux": + ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries", + "Linux", "UE4Editor.modules")) + + if platform.system().lower() == "darwin": + ue4_modules = Path(os.path.join(engine_path, "Engine", "Binaries", + "Mac", "UE4Editor.modules")) + + if ue4_modules.exists(): + print("--- Loading Engine ID from modules file ...") + with open(ue4_modules, "r") as mp: + loaded_modules = json.load(mp) + + if loaded_modules.get("BuildId"): + ue_id = "{" + loaded_modules.get("BuildId") + "}" + + plugins_path = None + if os.path.isdir(env.get("AVALON_UNREAL_PLUGIN", "")): # copy plugin to correct path under project - plugins_path = os.path.join(pr_dir, "Plugins") - avalon_plugin_path = os.path.join(plugins_path, "Avalon") - if not os.path.isdir(avalon_plugin_path): - os.makedirs(avalon_plugin_path, exist_ok=True) + plugins_path = pr_dir / "Plugins" + avalon_plugin_path = plugins_path / "Avalon" + if not avalon_plugin_path.is_dir(): + avalon_plugin_path.mkdir(parents=True, exist_ok=True) dir_util._path_created = {} dir_util.copy_tree(os.environ.get("AVALON_UNREAL_PLUGIN"), - avalon_plugin_path) + avalon_plugin_path.as_posix()) - if (not os.path.isdir(os.path.join(avalon_plugin_path, "Binaries")) - or not os.path.join(avalon_plugin_path, "Intermediate")): + if not (avalon_plugin_path / "Binaries").is_dir() \ + or not (avalon_plugin_path / "Intermediate").is_dir(): dev_mode = True # data for project file data = { "FileVersion": 3, - "EngineAssociation": ue_version, + "EngineAssociation": ue_id, "Category": "", "Description": "", "Plugins": [ @@ -179,35 +257,6 @@ def create_unreal_project(project_name: str, ] } - if preset["install_unreal_python_engine"]: - # If `OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN` is set, copy it from there - # to support offline installation. - # Otherwise clone UnrealEnginePython to Plugins directory - # https://github.com/20tab/UnrealEnginePython.git - uep_path = os.path.join(plugins_path, "UnrealEnginePython") - if os.environ.get("OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN"): - - os.makedirs(uep_path, exist_ok=True) - dir_util._path_created = {} - dir_util.copy_tree( - os.environ.get("OPENPYPE_UNREAL_ENGINE_PYTHON_PLUGIN"), - uep_path) - else: - # WARNING: this will trigger dev_mode, because we need to compile - # this plugin. - dev_mode = True - import git - git.Repo.clone_from( - "https://github.com/20tab/UnrealEnginePython.git", - uep_path) - - data["Plugins"].append( - {"Name": "UnrealEnginePython", "Enabled": True}) - - if (not os.path.isdir(os.path.join(uep_path, "Binaries")) - or not os.path.join(uep_path, "Intermediate")): - dev_mode = True - if dev_mode or preset["dev_mode"]: # this will add project module and necessary source file to make it # C++ project and to (hopefully) make Unreal Editor to compile all @@ -220,51 +269,39 @@ def create_unreal_project(project_name: str, "AdditionalDependencies": ["Engine"], }] - if preset["install_unreal_python_engine"]: - # now we need to fix python path in: - # `UnrealEnginePython.Build.cs` - # to point to our python - with open(os.path.join( - uep_path, "Source", - "UnrealEnginePython", - "UnrealEnginePython.Build.cs"), mode="r") as f: - build_file = f.read() - - fix = build_file.replace( - 'private string pythonHome = "";', - 'private string pythonHome = "{}";'.format( - sys.base_prefix.replace("\\", "/"))) - - with open(os.path.join( - uep_path, "Source", - "UnrealEnginePython", - "UnrealEnginePython.Build.cs"), mode="w") as f: - f.write(fix) - # write project file - project_file = os.path.join(pr_dir, "{}.uproject".format(project_name)) + project_file = pr_dir / f"{project_name}.uproject" with open(project_file, mode="w") as pf: json.dump(data, pf, indent=4) - # UE < 4.26 have Python2 by default, so we need PySide - # but we will not need it in 4.26 and up - if int(ue_version.split(".")[1]) < 26: - # ensure we have PySide installed in engine - # TODO: make it work for other platforms 🍎 🐧 - if platform.system().lower() == "windows": - python_path = os.path.join(engine_path, "Engine", "Binaries", - "ThirdParty", "Python", "Win64", - "python.exe") + # ensure we have PySide2 installed in engine + python_path = None + if platform.system().lower() == "windows": + python_path = engine_path / ("Engine/Binaries/ThirdParty/" + "Python3/Win64/pythonw.exe") - subprocess.run([python_path, "-m", - "pip", "install", "pyside"]) + if platform.system().lower() == "linux": + python_path = engine_path / ("Engine/Binaries/ThirdParty/" + "Python3/Linux/bin/python3") + + if platform.system().lower() == "darwin": + python_path = engine_path / ("Engine/Binaries/ThirdParty/" + "Python3/Mac/bin/python3") + + if not python_path: + raise NotImplementedError("Unsupported platform") + if not python_path.exists(): + raise RuntimeError(f"Unreal Python not found at {python_path}") + subprocess.run( + [python_path.as_posix(), "-m", "pip", "install", "pyside2"]) if dev_mode or preset["dev_mode"]: _prepare_cpp_project(project_file, engine_path) -def _prepare_cpp_project(project_file: str, engine_path: str) -> None: - """ +def _prepare_cpp_project(project_file: Path, engine_path: Path) -> None: + """Prepare CPP Unreal Project. + This function will add source files needed for project to be rebuild along with the avalon integration plugin. @@ -273,19 +310,19 @@ def _prepare_cpp_project(project_file: str, engine_path: str) -> None: by some generator. This needs more research as manually writing those files is rather hackish. :skull_and_crossbones: - :param project_file: path to .uproject file - :type project_file: str - :param engine_path: path to unreal engine associated with project - :type engine_path: str + + Args: + project_file (str): Path to .uproject file. + engine_path (str): Path to unreal engine associated with project. + """ + project_name = project_file.stem + project_dir = project_file.parent + targets_dir = project_dir / "Source" + sources_dir = targets_dir / project_name - project_name = os.path.splitext(os.path.basename(project_file))[0] - project_dir = os.path.dirname(project_file) - targets_dir = os.path.join(project_dir, "Source") - sources_dir = os.path.join(targets_dir, project_name) - - os.makedirs(sources_dir, exist_ok=True) - os.makedirs(os.path.join(project_dir, "Content"), exist_ok=True) + sources_dir.mkdir(parents=True, exist_ok=True) + (project_dir / "Content").mkdir(parents=True, exist_ok=True) module_target = ''' using UnrealBuildTool; @@ -360,59 +397,59 @@ class {1}_API A{0}GameModeBase : public AGameModeBase }}; '''.format(project_name, project_name.upper()) - with open(os.path.join( - targets_dir, f"{project_name}.Target.cs"), mode="w") as f: + with open(targets_dir / f"{project_name}.Target.cs", mode="w") as f: f.write(module_target) - with open(os.path.join( - targets_dir, f"{project_name}Editor.Target.cs"), mode="w") as f: + with open(targets_dir / f"{project_name}Editor.Target.cs", mode="w") as f: f.write(editor_module_target) - with open(os.path.join( - sources_dir, f"{project_name}.Build.cs"), mode="w") as f: + with open(sources_dir / f"{project_name}.Build.cs", mode="w") as f: f.write(module_build) - with open(os.path.join( - sources_dir, f"{project_name}.cpp"), mode="w") as f: + with open(sources_dir / f"{project_name}.cpp", mode="w") as f: f.write(module_cpp) - with open(os.path.join( - sources_dir, f"{project_name}.h"), mode="w") as f: + with open(sources_dir / f"{project_name}.h", mode="w") as f: f.write(module_header) - with open(os.path.join( - sources_dir, f"{project_name}GameModeBase.cpp"), mode="w") as f: + with open(sources_dir / f"{project_name}GameModeBase.cpp", mode="w") as f: f.write(game_mode_cpp) - with open(os.path.join( - sources_dir, f"{project_name}GameModeBase.h"), mode="w") as f: + with open(sources_dir / f"{project_name}GameModeBase.h", mode="w") as f: f.write(game_mode_h) + u_build_tool = Path( + engine_path / "Engine/Binaries/DotNET/UnrealBuildTool.exe") + u_header_tool = None + + arch = "Win64" if platform.system().lower() == "windows": - u_build_tool = (f"{engine_path}/Engine/Binaries/DotNET/" - "UnrealBuildTool.exe") - u_header_tool = (f"{engine_path}/Engine/Binaries/Win64/" - f"UnrealHeaderTool.exe") + arch = "Win64" + u_header_tool = Path( + engine_path / "Engine/Binaries/Win64/UnrealHeaderTool.exe") elif platform.system().lower() == "linux": - # WARNING: there is no UnrealBuildTool on linux? - u_build_tool = "" - u_header_tool = "" + arch = "Linux" + u_header_tool = Path( + engine_path / "Engine/Binaries/Linux/UnrealHeaderTool") elif platform.system().lower() == "darwin": - # WARNING: there is no UnrealBuildTool on Mac? - u_build_tool = "" - u_header_tool = "" + # we need to test this out + arch = "Mac" + u_header_tool = Path( + engine_path / "Engine/Binaries/Mac/UnrealHeaderTool") - u_build_tool = u_build_tool.replace("\\", "/") - u_header_tool = u_header_tool.replace("\\", "/") + if not u_header_tool: + raise NotImplementedError("Unsupported platform") - command1 = [u_build_tool, "-projectfiles", f"-project={project_file}", - "-progress"] + command1 = [u_build_tool.as_posix(), "-projectfiles", + f"-project={project_file}", "-progress"] subprocess.run(command1) - command2 = [u_build_tool, f"-ModuleWithSuffix={project_name},3555" - "Win64", "Development", "-TargetType=Editor" - f'-Project="{project_file}"', f'"{project_file}"' + command2 = [u_build_tool.as_posix(), + f"-ModuleWithSuffix={project_name},3555", arch, + "Development", "-TargetType=Editor", + f'-Project={project_file}', + f'{project_file}', "-IgnoreJunk"] subprocess.run(command2) diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index f084cccfc3..01b8b6bc05 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -1,31 +1,49 @@ +# -*- coding: utf-8 -*- +"""Hook to launch Unreal and prepare projects.""" import os +from pathlib import Path from openpype.lib import ( PreLaunchHook, - ApplicationLaunchFailed + ApplicationLaunchFailed, + ApplicationNotFound ) from openpype.hosts.unreal.api import lib as unreal_lib class UnrealPrelaunchHook(PreLaunchHook): - """ + """Hook to handle launching Unreal. + This hook will check if current workfile path has Unreal project inside. IF not, it initialize it and finally it pass path to the project by environment variable to Unreal launcher shell script. - """ + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.signature = "( {} )".format(self.__class__.__name__) def execute(self): + """Hook entry method.""" asset_name = self.data["asset_name"] task_name = self.data["task_name"] workdir = self.launch_context.env["AVALON_WORKDIR"] engine_version = self.app_name.split("/")[-1].replace("-", ".") unreal_project_name = f"{asset_name}_{task_name}" + try: + if int(engine_version.split(".")[0]) < 4 and \ + int(engine_version.split(".")[1]) < 26: + raise ApplicationLaunchFailed(( + f"{self.signature} Old unsupported version of UE4 " + f"detected - {engine_version}")) + except ValueError: + # there can be string in minor version and in that case + # int cast is failing. This probably happens only with + # early access versions and is of no concert for this check + # so lets keep it quite. + ... # Unreal is sensitive about project names longer then 20 chars if len(unreal_project_name) > 20: @@ -45,19 +63,21 @@ class UnrealPrelaunchHook(PreLaunchHook): )) unreal_project_name = f"P{unreal_project_name}" - project_path = os.path.join(workdir, unreal_project_name) + project_path = Path(os.path.join(workdir, unreal_project_name)) self.log.info(( f"{self.signature} requested UE4 version: " f"[ {engine_version} ]" )) - detected = unreal_lib.get_engine_versions() + detected = unreal_lib.get_engine_versions(self.launch_context.env) detected_str = ', '.join(detected.keys()) or 'none' self.log.info(( f"{self.signature} detected UE4 versions: " f"[ {detected_str} ]" )) + if not detected: + raise ApplicationNotFound("No Unreal Engines are found.") engine_version = ".".join(engine_version.split(".")[:2]) if engine_version not in detected.keys(): @@ -66,13 +86,14 @@ class UnrealPrelaunchHook(PreLaunchHook): f"detected [ {engine_version} ]" )) - os.makedirs(project_path, exist_ok=True) + ue4_path = unreal_lib.get_editor_executable_path( + Path(detected[engine_version])) - project_file = os.path.join( - project_path, - f"{unreal_project_name}.uproject" - ) - if not os.path.isfile(project_file): + self.launch_context.launch_args.append(ue4_path.as_posix()) + project_path.mkdir(parents=True, exist_ok=True) + + project_file = project_path / f"{unreal_project_name}.uproject" + if not project_file.is_file(): engine_path = detected[engine_version] self.log.info(( f"{self.signature} creating unreal " @@ -88,8 +109,9 @@ class UnrealPrelaunchHook(PreLaunchHook): unreal_project_name, engine_version, project_path, - engine_path=engine_path + engine_path=Path(engine_path) ) # Append project file to launch arguments - self.launch_context.launch_args.append(f"\"{project_file}\"") + self.launch_context.launch_args.append( + f"\"{project_file.as_posix()}\"") diff --git a/openpype/lib/abstract_submit_deadline.py b/openpype/lib/abstract_submit_deadline.py index 12014ddfb5..4a052a4ee2 100644 --- a/openpype/lib/abstract_submit_deadline.py +++ b/openpype/lib/abstract_submit_deadline.py @@ -18,6 +18,48 @@ import pyblish.api from .abstract_metaplugins import AbstractMetaInstancePlugin +def requests_post(*args, **kwargs): + """Wrap request post method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if 'verify' not in kwargs: + kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", + True) else True # noqa + # add 10sec timeout before bailing out + kwargs['timeout'] = 10 + return requests.post(*args, **kwargs) + + +def requests_get(*args, **kwargs): + """Wrap request get method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if 'verify' not in kwargs: + kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", + True) else True # noqa + # add 10sec timeout before bailing out + kwargs['timeout'] = 10 + return requests.get(*args, **kwargs) + + @attr.s class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. @@ -579,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """ url = "{}/api/jobs".format(self._deadline_url) - response = self._requests_post(url, json=payload) + response = requests_post(url, json=payload) if not response.ok: self.log.error("Submission failed!") self.log.error(response.status_code) @@ -592,41 +634,3 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self._instance.data["deadlineSubmissionJob"] = result return result["_id"] - - def _requests_post(self, *args, **kwargs): - """Wrap request post method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - # add 10sec timeout before bailing out - kwargs['timeout'] = 10 - return requests.post(*args, **kwargs) - - def _requests_get(self, *args, **kwargs): - """Wrap request get method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - # add 10sec timeout before bailing out - kwargs['timeout'] = 10 - return requests.get(*args, **kwargs) diff --git a/openpype/lib/anatomy.py b/openpype/lib/anatomy.py index c16c6e2e99..7a4a55363c 100644 --- a/openpype/lib/anatomy.py +++ b/openpype/lib/anatomy.py @@ -733,6 +733,9 @@ class Templates: continue default_key_values[key] = templates.pop(key) + # Pop "others" key before before expected keys are processed + other_templates = templates.pop("others") or {} + keys_by_subkey = {} for sub_key, sub_value in templates.items(): key_values = {} @@ -740,7 +743,6 @@ class Templates: key_values.update(sub_value) keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) - other_templates = templates.get("others") or {} for sub_key, sub_value in other_templates.items(): if sub_key in keys_by_subkey: log.warning(( diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index ff5ef92d82..e1b304a351 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -180,7 +180,7 @@ class Application: if group.enabled: enabled = data.get("enabled", True) self.enabled = enabled - self.use_python_2 = data["use_python_2"] + self.use_python_2 = data.get("use_python_2", False) self.label = data.get("variant_label") or name self.full_name = "/".join((group.name, name)) @@ -192,26 +192,32 @@ class Application: self.full_label = full_label self._environment = data.get("environment") or {} + arguments = data.get("arguments") + if isinstance(arguments, dict): + arguments = arguments.get(platform.system().lower()) + + if not arguments: + arguments = [] + self.arguments = arguments + + if "executables" not in data: + self.executables = [ + UndefinedApplicationExecutable() + ] + return + _executables = data["executables"] + if isinstance(_executables, dict): + _executables = _executables.get(platform.system().lower()) + if not _executables: _executables = [] - elif isinstance(_executables, dict): - _executables = _executables.get(platform.system().lower()) or [] - - _arguments = data["arguments"] - if not _arguments: - _arguments = [] - - elif isinstance(_arguments, dict): - _arguments = _arguments.get(platform.system().lower()) or [] - executables = [] for executable in _executables: executables.append(ApplicationExecutable(executable)) self.executables = executables - self.arguments = _arguments def __repr__(self): return "<{}> - {}".format(self.__class__.__name__, self.full_name) @@ -444,6 +450,12 @@ class ApplicationExecutable: """Representation of executable loaded from settings.""" def __init__(self, executable): + # Try to format executable with environments + try: + executable = executable.format(**os.environ) + except Exception: + pass + # On MacOS check if exists path to executable when ends with `.app` # - it is common that path will lead to "/Applications/Blender" but # real path is "/Applications/Blender.app" @@ -485,6 +497,27 @@ class ApplicationExecutable: return bool(self._realpath()) +class UndefinedApplicationExecutable(ApplicationExecutable): + """Some applications do not require executable path from settings. + + In that case this class is used to "fake" existing executable. + """ + def __init__(self): + pass + + def __str__(self): + return self.__class__.__name__ + + def __repr__(self): + return "<{}>".format(self.__class__.__name__) + + def as_args(self): + return [] + + def exists(self): + return True + + @six.add_metaclass(ABCMeta) class LaunchHook: """Abstract base class of launch hook.""" @@ -1131,6 +1164,9 @@ def prepare_host_environments(data, implementation_envs=True): def apply_project_environments_value(project_name, env, project_settings=None): """Apply project specific environments on passed environments. + The enviornments are applied on passed `env` argument value so it is not + required to apply changes back. + Args: project_name (str): Name of project for which environemnts should be received. @@ -1139,6 +1175,9 @@ def apply_project_environments_value(project_name, env, project_settings=None): project_settings (dict): Project settings for passed project name. Optional if project settings are already prepared. + Returns: + dict: Passed env values with applied project environments. + Raises: KeyError: If project settings do not contain keys for project specific environments. @@ -1149,10 +1188,9 @@ def apply_project_environments_value(project_name, env, project_settings=None): project_settings = get_project_settings(project_name) env_value = project_settings["global"]["project_environments"] - if not env_value: - return env - parsed = acre.parse(env_value) - return _merge_env(parsed, env) + if env_value: + env.update(_merge_env(acre.parse(env_value), env)) + return env def prepare_context_environments(data): @@ -1181,9 +1219,8 @@ def prepare_context_environments(data): # Load project specific environments project_name = project_doc["name"] - data["env"] = apply_project_environments_value( - project_name, data["env"] - ) + # Apply project specific environments on current env value + apply_project_environments_value(project_name, data["env"]) app = data["app"] workdir_data = get_workdir_data( diff --git a/openpype/lib/editorial.py b/openpype/lib/editorial.py index 158488dd56..8e8e365bdb 100644 --- a/openpype/lib/editorial.py +++ b/openpype/lib/editorial.py @@ -7,6 +7,8 @@ try: import opentimelineio as otio from opentimelineio import opentime as _ot except ImportError: + if not os.environ.get("AVALON_APP"): + raise otio = discover_host_vendor_module("opentimelineio") _ot = discover_host_vendor_module("opentimelineio.opentime") diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 3b923cb608..a8c75c20da 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -89,8 +89,13 @@ def ffprobe_streams(path_to_file, logger=None): popen_stdout, popen_stderr = popen.communicate() if popen_stdout: - logger.debug("ffprobe stdout: {}".format(popen_stdout)) + logger.debug("FFprobe stdout:\n{}".format( + popen_stdout.decode("utf-8") + )) if popen_stderr: - logger.debug("ffprobe stderr: {}".format(popen_stderr)) + logger.warning("FFprobe stderr:\n{}".format( + popen_stderr.decode("utf-8") + )) + return json.loads(popen_stdout)["streams"] diff --git a/openpype/modules/clockify/clockify_api.py b/openpype/modules/clockify/clockify_api.py index 3f0a9799b4..6af911fffc 100644 --- a/openpype/modules/clockify/clockify_api.py +++ b/openpype/modules/clockify/clockify_api.py @@ -36,6 +36,7 @@ class ClockifyAPI: self._secure_registry = None + @property def secure_registry(self): if self._secure_registry is None: self._secure_registry = OpenPypeSecureRegistry("clockify") diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/clockify/widgets.py index 76f3a3f365..fc8e7fa8a3 100644 --- a/openpype/modules/clockify/widgets.py +++ b/openpype/modules/clockify/widgets.py @@ -1,6 +1,5 @@ from Qt import QtCore, QtGui, QtWidgets -from avalon import style -from openpype import resources +from openpype import resources, style class MessageWidget(QtWidgets.QWidget): @@ -22,14 +21,6 @@ class MessageWidget(QtWidgets.QWidget): QtCore.Qt.WindowMinimizeButtonHint ) - # Font - self.font = QtGui.QFont() - self.font.setFamily("DejaVu Sans Condensed") - self.font.setPointSize(9) - self.font.setBold(True) - self.font.setWeight(50) - self.font.setKerning(True) - # Size setting self.resize(self.SIZE_W, self.SIZE_H) self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) @@ -53,7 +44,6 @@ class MessageWidget(QtWidgets.QWidget): labels = [] for message in messages: label = QtWidgets.QLabel(message) - label.setFont(self.font) label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) label.setTextFormat(QtCore.Qt.RichText) label.setWordWrap(True) @@ -103,84 +93,64 @@ class ClockifySettings(QtWidgets.QWidget): icon = QtGui.QIcon(resources.pype_icon_filepath()) self.setWindowIcon(icon) + self.setWindowTitle("Clockify settings") self.setWindowFlags( QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowMinimizeButtonHint ) - self._translate = QtCore.QCoreApplication.translate - - # Font - self.font = QtGui.QFont() - self.font.setFamily("DejaVu Sans Condensed") - self.font.setPointSize(9) - self.font.setBold(True) - self.font.setWeight(50) - self.font.setKerning(True) - # Size setting self.resize(self.SIZE_W, self.SIZE_H) self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100)) self.setStyleSheet(style.load_stylesheet()) - self.setLayout(self._main()) - self.setWindowTitle('Clockify settings') + self._ui_init() - def _main(self): - self.main = QtWidgets.QVBoxLayout() - self.main.setObjectName("main") + def _ui_init(self): + label_api_key = QtWidgets.QLabel("Clockify API key:") - self.form = QtWidgets.QFormLayout() - self.form.setContentsMargins(10, 15, 10, 5) - self.form.setObjectName("form") + input_api_key = QtWidgets.QLineEdit() + input_api_key.setFrame(True) + input_api_key.setPlaceholderText("e.g. XX1XxXX2x3x4xXxx") - self.label_api_key = QtWidgets.QLabel("Clockify API key:") - self.label_api_key.setFont(self.font) - self.label_api_key.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) - self.label_api_key.setTextFormat(QtCore.Qt.RichText) - self.label_api_key.setObjectName("label_api_key") + error_label = QtWidgets.QLabel("") + error_label.setTextFormat(QtCore.Qt.RichText) + error_label.setWordWrap(True) + error_label.hide() - self.input_api_key = QtWidgets.QLineEdit() - self.input_api_key.setEnabled(True) - self.input_api_key.setFrame(True) - self.input_api_key.setObjectName("input_api_key") - self.input_api_key.setPlaceholderText( - self._translate("main", "e.g. XX1XxXX2x3x4xXxx") - ) + form_layout = QtWidgets.QFormLayout() + form_layout.setContentsMargins(10, 15, 10, 5) + form_layout.addRow(label_api_key, input_api_key) + form_layout.addRow(error_label) - self.error_label = QtWidgets.QLabel("") - self.error_label.setFont(self.font) - self.error_label.setTextFormat(QtCore.Qt.RichText) - self.error_label.setObjectName("error_label") - self.error_label.setWordWrap(True) - self.error_label.hide() + btn_ok = QtWidgets.QPushButton("Ok") + btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer') - self.form.addRow(self.label_api_key, self.input_api_key) - self.form.addRow(self.error_label) - - self.btn_group = QtWidgets.QHBoxLayout() - self.btn_group.addStretch(1) - self.btn_group.setObjectName("btn_group") - - self.btn_ok = QtWidgets.QPushButton("Ok") - self.btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer') - self.btn_ok.clicked.connect(self.click_ok) - - self.btn_cancel = QtWidgets.QPushButton("Cancel") + btn_cancel = QtWidgets.QPushButton("Cancel") cancel_tooltip = 'Application won\'t start' if self.optional: cancel_tooltip = 'Close this window' - self.btn_cancel.setToolTip(cancel_tooltip) - self.btn_cancel.clicked.connect(self._close_widget) + btn_cancel.setToolTip(cancel_tooltip) - self.btn_group.addWidget(self.btn_ok) - self.btn_group.addWidget(self.btn_cancel) + btn_group = QtWidgets.QHBoxLayout() + btn_group.addStretch(1) + btn_group.addWidget(btn_ok) + btn_group.addWidget(btn_cancel) - self.main.addLayout(self.form) - self.main.addLayout(self.btn_group) + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addLayout(form_layout) + main_layout.addLayout(btn_group) - return self.main + btn_ok.clicked.connect(self.click_ok) + btn_cancel.clicked.connect(self._close_widget) + + self.label_api_key = label_api_key + self.input_api_key = input_api_key + self.error_label = error_label + + self.btn_ok = btn_ok + self.btn_cancel = btn_cancel def setError(self, msg): self.error_label.setText(msg) @@ -212,6 +182,17 @@ class ClockifySettings(QtWidgets.QWidget): "Entered invalid API key" ) + def showEvent(self, event): + super(ClockifySettings, self).showEvent(event) + + # Make btns same width + max_width = max( + self.btn_ok.sizeHint().width(), + self.btn_cancel.sizeHint().width() + ) + self.btn_ok.setMinimumWidth(max_width) + self.btn_cancel.setMinimumWidth(max_width) + def closeEvent(self, event): if self.optional is True: event.ignore() diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index a5841f406c..a652da7786 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -271,6 +271,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): ["DEADLINE_REST_URL"] ) + self._job_info = ( + context.data["project_settings"].get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "jobInfo", {}) + ) + + self._plugin_info = ( + context.data["project_settings"].get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "pluginInfo", {}) + ) + assert self._deadline_url, "Requires DEADLINE_REST_URL" context = instance.context @@ -407,7 +423,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.payload_skeleton["JobInfo"]["Priority"] = \ self._instance.data.get("priority", 50) - if self.group != "none": + if self.group != "none" and self.group: self.payload_skeleton["JobInfo"]["Group"] = self.group if self.limit_groups: @@ -536,6 +552,10 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.preflight_check(instance) + # add jobInfo and pluginInfo variables from Settings + payload["JobInfo"].update(self._job_info) + payload["PluginInfo"].update(self._plugin_info) + # Prepare tiles data ------------------------------------------------ if instance.data.get("tileRendering"): # if we have sequence of files, we need to create tile job for diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 7faa3393e5..fed98d8a08 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -32,6 +32,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): department = "" limit_groups = {} use_gpu = False + env_allowed_keys = [] + env_search_replace_values = {} def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" @@ -242,19 +244,19 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "PYBLISHPLUGINPATH", "NUKE_PATH", "TOOL_ENV", - "OPENPYPE_DEV", "FOUNDRY_LICENSE" ] + # add allowed keys from preset if any + if self.env_allowed_keys: + keys += self.env_allowed_keys + environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **api.Session) # self.log.debug("enviro: {}".format(pprint(environment))) - for path in os.environ: - if path.lower().startswith('pype_'): - environment[path] = os.environ[path] - if path.lower().startswith('nuke_'): - environment[path] = os.environ[path] - if 'license' in path.lower(): - environment[path] = os.environ[path] + + for _path in os.environ: + if _path.lower().startswith('openpype_'): + environment[_path] = os.environ[_path] clean_environment = {} for key, value in environment.items(): @@ -285,6 +287,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): environment = clean_environment # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" + + # finally search replace in values of any key + if self.env_search_replace_values: + for key, value in environment.items(): + for _k, _v in self.env_search_replace_values.items(): + environment[key] = value.replace(_k, _v) + payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 52ed9bba76..41f8337fd8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -231,7 +231,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): args = [ 'publish', - roothless_metadata_path + roothless_metadata_path, + "--targets {}".format("deadline") ] # Generate the payload for Deadline submission diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py new file mode 100644 index 0000000000..c71b5106ec --- /dev/null +++ b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -0,0 +1,186 @@ +import os +import json +import pyblish.api + +from avalon.vendor import requests + +from openpype.api import get_system_settings +from openpype.lib.abstract_submit_deadline import requests_get +from openpype.lib.delivery import collect_frames + + +class ValidateExpectedFiles(pyblish.api.InstancePlugin): + """Compare rendered and expected files""" + + label = "Validate rendered files from Deadline" + order = pyblish.api.ValidatorOrder + families = ["render"] + targets = ["deadline"] + + # check if actual frame range on render job wasn't different + # case when artists wants to render only subset of frames + allow_user_override = True + + def process(self, instance): + frame_list = self._get_frame_list(instance.data["render_job_id"]) + + for repre in instance.data["representations"]: + expected_files = self._get_expected_files(repre) + + staging_dir = repre["stagingDir"] + existing_files = self._get_existing_files(staging_dir) + + expected_non_existent = expected_files.difference( + existing_files) + if len(expected_non_existent) != 0: + self.log.info("Some expected files missing {}".format( + expected_non_existent)) + + if self.allow_user_override: + file_name_template, frame_placeholder = \ + self._get_file_name_template_and_placeholder( + expected_files) + + if not file_name_template: + return + + real_expected_rendered = self._get_real_render_expected( + file_name_template, + frame_placeholder, + frame_list) + + real_expected_non_existent = \ + real_expected_rendered.difference(existing_files) + if len(real_expected_non_existent) != 0: + raise RuntimeError("Still missing some files {}". + format(real_expected_non_existent)) + self.log.info("Update range from actual job range") + repre["files"] = sorted(list(real_expected_rendered)) + else: + raise RuntimeError("Some expected files missing {}".format( + expected_non_existent)) + + def _get_frame_list(self, original_job_id): + """ + Returns list of frame ranges from all render job. + + Render job might be requeried so job_id in metadata.json is invalid + GlobalJobPreload injects current ids to RENDER_JOB_IDS. + + Args: + original_job_id (str) + Returns: + (list) + """ + all_frame_lists = [] + render_job_ids = os.environ.get("RENDER_JOB_IDS") + if render_job_ids: + render_job_ids = render_job_ids.split(',') + else: # fallback + render_job_ids = [original_job_id] + + for job_id in render_job_ids: + job_info = self._get_job_info(job_id) + frame_list = job_info["Props"]["Frames"] + if frame_list: + all_frame_lists.extend(frame_list.split(',')) + + return all_frame_lists + + def _get_real_render_expected(self, file_name_template, frame_placeholder, + frame_list): + """ + Calculates list of names of expected rendered files. + + Might be different from job expected files if user explicitly and + manually change frame list on Deadline job. + """ + real_expected_rendered = set() + src_padding_exp = "%0{}d".format(len(frame_placeholder)) + for frames in frame_list: + if '-' not in frames: # single frame + frames = "{}-{}".format(frames, frames) + + start, end = frames.split('-') + for frame in range(int(start), int(end) + 1): + ren_name = file_name_template.replace( + frame_placeholder, src_padding_exp % frame) + real_expected_rendered.add(ren_name) + + return real_expected_rendered + + def _get_file_name_template_and_placeholder(self, files): + """Returns file name with frame replaced with # and this placeholder""" + sources_and_frames = collect_frames(files) + + file_name_template = frame_placeholder = None + for file_name, frame in sources_and_frames.items(): + frame_placeholder = "#" * len(frame) + file_name_template = os.path.basename( + file_name.replace(frame, frame_placeholder)) + break + + return file_name_template, frame_placeholder + + def _get_job_info(self, job_id): + """ + Calls DL for actual job info for 'job_id' + + Might be different than job info saved in metadata.json if user + manually changes job pre/during rendering. + """ + deadline_url = ( + get_system_settings() + ["modules"] + ["deadline"] + ["DEADLINE_REST_URL"] + ) + assert deadline_url, "Requires DEADLINE_REST_URL" + + url = "{}/api/jobs?JobID={}".format(deadline_url, job_id) + try: + response = requests_get(url) + except requests.exceptions.ConnectionError: + print("Deadline is not accessible at {}".format(deadline_url)) + # self.log("Deadline is not accessible at {}".format(deadline_url)) + return {} + + if not response.ok: + self.log.error("Submission failed!") + self.log.error(response.status_code) + self.log.error(response.content) + raise RuntimeError(response.text) + + json_content = response.json() + if json_content: + return json_content.pop() + return {} + + def _parse_metadata_json(self, json_path): + if not os.path.exists(json_path): + msg = "Metadata file {} doesn't exist".format(json_path) + raise RuntimeError(msg) + + with open(json_path) as fp: + try: + return json.load(fp) + except Exception as exc: + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(json_path, exc) + ) + + def _get_existing_files(self, out_dir): + """Returns set of existing file names from 'out_dir'""" + existing_files = set() + for file_name in os.listdir(out_dir): + existing_files.add(file_name) + return existing_files + + def _get_expected_files(self, repre): + """Returns set of file names from metadata.json""" + expected_files = set() + + for file_name in repre["files"]: + expected_files.add(file_name) + return expected_files diff --git a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py b/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py index d29316c795..59c8bffb75 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py +++ b/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py @@ -16,11 +16,13 @@ def clone_review_session(session, entity): # Add all invitees. for invitee in entity["review_session_invitees"]: + # Make sure email is not None but string + email = invitee["email"] or "" session.create( "ReviewSessionInvitee", { "name": invitee["name"], - "email": invitee["email"], + "email": email, "review_session": review_session } ) diff --git a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py b/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py new file mode 100644 index 0000000000..9ad7b1a969 --- /dev/null +++ b/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py @@ -0,0 +1,167 @@ +from openpype.modules.ftrack.lib import ServerAction + + +class MultipleNotesServer(ServerAction): + """Action adds same note for muliple AssetVersions. + + Note is added to selection of AssetVersions. Note is created with user + who triggered the action. It is possible to define note category of note. + """ + + identifier = "multiple.notes.server" + label = "Multiple Notes (Server)" + description = "Add same note to multiple Asset Versions" + + _none_category = "__NONE__" + + def discover(self, session, entities, event): + """Show action only on AssetVersions.""" + if not entities: + return False + + for entity in entities: + if entity.entity_type.lower() != "assetversion": + return False + return True + + def interface(self, session, entities, event): + event_source = event["source"] + user_info = event_source.get("user") or {} + user_id = user_info.get("id") + if not user_id: + return None + + values = event["data"].get("values") + if values: + return None + + note_label = { + "type": "label", + "value": "# Enter note: #" + } + + note_value = { + "name": "note", + "type": "textarea" + } + + category_label = { + "type": "label", + "value": "## Category: ##" + } + + category_data = [] + category_data.append({ + "label": "- None -", + "value": self._none_category + }) + all_categories = session.query( + "select id, name from NoteCategory" + ).all() + for cat in all_categories: + category_data.append({ + "label": cat["name"], + "value": cat["id"] + }) + category_value = { + "type": "enumerator", + "name": "category", + "data": category_data, + "value": self._none_category + } + + splitter = { + "type": "label", + "value": "---" + } + + return [ + note_label, + note_value, + splitter, + category_label, + category_value + ] + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return None + + values = event["data"]["values"] + if len(values) <= 0 or "note" not in values: + return False + + # Get Note text + note_value = values["note"] + if note_value.lower().strip() == "": + return { + "success": True, + "message": "Note was not entered. Skipping" + } + + # Get User + event_source = event["source"] + user_info = event_source.get("user") or {} + user_id = user_info.get("id") + user = None + if user_id: + user = session.query( + 'User where id is "{}"'.format(user_id) + ).first() + + if not user: + return { + "success": False, + "message": "Couldn't get user information." + } + + # Logging message preparation + # - username + username = user.get("username") or "N/A" + + # - AssetVersion ids + asset_version_ids_str = ",".join([entity["id"] for entity in entities]) + + # Base note data + note_data = { + "content": note_value, + "author": user + } + + # Get category + category_id = values["category"] + if category_id == self._none_category: + category_id = None + + category_name = None + if category_id is not None: + category = session.query( + "select id, name from NoteCategory where id is \"{}\"".format( + category_id + ) + ).first() + if category: + note_data["category"] = category + category_name = category["name"] + + category_msg = "" + if category_name: + category_msg = " with category: \"{}\"".format(category_name) + + self.log.warning(( + "Creating note{} as User \"{}\" on " + "AssetVersions: {} with value \"{}\"" + ).format(category_msg, username, asset_version_ids_str, note_value)) + + # Create notes for entities + for entity in entities: + new_note = session.create("Note", note_data) + entity["notes"].append(new_note) + session.commit() + return True + + +def register(session): + '''Register plugin. Called when used as an plugin.''' + + MultipleNotesServer(session).register() diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index c0b3137455..81719258e1 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -2,7 +2,10 @@ import collections import datetime import ftrack_api -from openpype.modules.ftrack.lib import BaseEvent +from openpype.modules.ftrack.lib import ( + BaseEvent, + query_custom_attributes +) class PushFrameValuesToTaskEvent(BaseEvent): @@ -55,10 +58,6 @@ class PushFrameValuesToTaskEvent(BaseEvent): if entity_info.get("entityType") != "task": continue - # Skip `Task` entity type - if entity_info["entity_type"].lower() == "task": - continue - # Care only about changes of status changes = entity_info.get("changes") if not changes: @@ -74,6 +73,14 @@ class PushFrameValuesToTaskEvent(BaseEvent): if project_id is None: continue + # Skip `Task` entity type if parent didn't change + if entity_info["entity_type"].lower() == "task": + if ( + "parent_id" not in changes + or changes["parent_id"]["new"] is None + ): + continue + if project_id not in entities_info_by_project_id: entities_info_by_project_id[project_id] = [] entities_info_by_project_id[project_id].append(entity_info) @@ -117,11 +124,24 @@ class PushFrameValuesToTaskEvent(BaseEvent): )) return + interest_attributes = set(interest_attributes) + interest_entity_types = set(interest_entity_types) + + # Separate value changes and task parent changes + _entities_info = [] + task_parent_changes = [] + for entity_info in entities_info: + if entity_info["entity_type"].lower() == "task": + task_parent_changes.append(entity_info) + else: + _entities_info.append(entity_info) + entities_info = _entities_info + # Filter entities info with changes interesting_data, changed_keys_by_object_id = self.filter_changes( session, event, entities_info, interest_attributes ) - if not interesting_data: + if not interesting_data and not task_parent_changes: return # Prepare object types @@ -131,6 +151,289 @@ class PushFrameValuesToTaskEvent(BaseEvent): name_low = object_type["name"].lower() object_types_by_name[name_low] = object_type + # NOTE it would be nice to check if `interesting_data` do not contain + # value changs of tasks that were created or moved + # - it is a complex way how to find out + if interesting_data: + self.process_attribute_changes( + session, object_types_by_name, + interesting_data, changed_keys_by_object_id, + interest_entity_types, interest_attributes + ) + + if task_parent_changes: + self.process_task_parent_change( + session, object_types_by_name, task_parent_changes, + interest_entity_types, interest_attributes + ) + + def process_task_parent_change( + self, session, object_types_by_name, task_parent_changes, + interest_entity_types, interest_attributes + ): + """Push custom attribute values if task parent has changed. + + Parent is changed if task is created or if is moved under different + entity. We don't care about all task changes only about those that + have it's parent in interest types (from settings). + + Tasks hierarchical value should be unset or set based on parents + real hierarchical value and non hierarchical custom attribute value + should be set to hierarchical value. + """ + # Store task ids which were created or moved under parent with entity + # type defined in settings (interest_entity_types). + task_ids = set() + # Store parent ids of matching task ids + matching_parent_ids = set() + # Store all entity ids of all entities to be able query hierarchical + # values. + whole_hierarchy_ids = set() + # Store parent id of each entity id + parent_id_by_entity_id = {} + for entity_info in task_parent_changes: + # Ignore entities with less parents than 2 + # NOTE entity itself is also part of "parents" value + parents = entity_info.get("parents") or [] + if len(parents) < 2: + continue + + parent_info = parents[1] + # Check if parent has entity type we care about. + if parent_info["entity_type"] not in interest_entity_types: + continue + + task_ids.add(entity_info["entityId"]) + matching_parent_ids.add(parent_info["entityId"]) + + # Store whole hierarchi of task entity + prev_id = None + for item in parents: + item_id = item["entityId"] + whole_hierarchy_ids.add(item_id) + + if prev_id is None: + prev_id = item_id + continue + + parent_id_by_entity_id[prev_id] = item_id + if item["entityType"] == "show": + break + prev_id = item_id + + # Just skip if nothing is interesting for our settings + if not matching_parent_ids: + return + + # Query object type ids of parent ids for custom attribute + # definitions query + entities = session.query( + "select object_type_id from TypedContext where id in ({})".format( + self.join_query_keys(matching_parent_ids) + ) + ) + + # Prepare task object id + task_object_id = object_types_by_name["task"]["id"] + + # All object ids for which we're querying custom attribute definitions + object_type_ids = set() + object_type_ids.add(task_object_id) + for entity in entities: + object_type_ids.add(entity["object_type_id"]) + + attrs_by_obj_id, hier_attrs = self.attrs_configurations( + session, object_type_ids, interest_attributes + ) + + # Skip if all task attributes are not available + task_attrs = attrs_by_obj_id.get(task_object_id) + if not task_attrs: + return + + # Skip attributes that is not in both hierarchical and nonhierarchical + # TODO be able to push values if hierarchical is available + for key in interest_attributes: + if key not in hier_attrs: + task_attrs.pop(key, None) + + elif key not in task_attrs: + hier_attrs.pop(key) + + # Skip if nothing remained + if not task_attrs: + return + + # Do some preparations for custom attribute values query + attr_key_by_id = {} + nonhier_id_by_key = {} + hier_attr_ids = [] + for key, attr_id in hier_attrs.items(): + attr_key_by_id[attr_id] = key + hier_attr_ids.append(attr_id) + + conf_ids = list(hier_attr_ids) + for key, attr_id in task_attrs.items(): + attr_key_by_id[attr_id] = key + nonhier_id_by_key[key] = attr_id + conf_ids.append(attr_id) + + # Query custom attribute values + # - result does not contain values for all entities only result of + # query callback to ftrack server + result = query_custom_attributes( + session, conf_ids, whole_hierarchy_ids + ) + + # Prepare variables where result will be stored + # - hierachical values should not contain attribute with value by + # default + hier_values_by_entity_id = { + entity_id: {} + for entity_id in whole_hierarchy_ids + } + # - real values of custom attributes + values_by_entity_id = { + entity_id: { + attr_id: None + for attr_id in conf_ids + } + for entity_id in whole_hierarchy_ids + } + for item in result: + attr_id = item["configuration_id"] + entity_id = item["entity_id"] + value = item["value"] + + values_by_entity_id[entity_id][attr_id] = value + + if attr_id in hier_attr_ids and value is not None: + hier_values_by_entity_id[entity_id][attr_id] = value + + # Prepare values for all task entities + # - going through all parents and storing first value value + # - store None to those that are already known that do not have set + # value at all + for task_id in tuple(task_ids): + for attr_id in hier_attr_ids: + entity_ids = [] + value = None + entity_id = task_id + while value is None: + entity_value = hier_values_by_entity_id[entity_id] + if attr_id in entity_value: + value = entity_value[attr_id] + if value is None: + break + + if value is None: + entity_ids.append(entity_id) + + entity_id = parent_id_by_entity_id.get(entity_id) + if entity_id is None: + break + + for entity_id in entity_ids: + hier_values_by_entity_id[entity_id][attr_id] = value + + # Prepare changes to commit + changes = [] + for task_id in tuple(task_ids): + parent_id = parent_id_by_entity_id[task_id] + for attr_id in hier_attr_ids: + attr_key = attr_key_by_id[attr_id] + nonhier_id = nonhier_id_by_key[attr_key] + + # Real value of hierarchical attribute on parent + # - If is none then should be unset + real_parent_value = values_by_entity_id[parent_id][attr_id] + # Current hierarchical value of a task + # - Will be compared to real parent value + hier_value = hier_values_by_entity_id[task_id][attr_id] + + # Parent value that can be inherited from it's parent entity + parent_value = hier_values_by_entity_id[parent_id][attr_id] + # Task value of nonhierarchical custom attribute + nonhier_value = values_by_entity_id[task_id][nonhier_id] + + if real_parent_value != hier_value: + changes.append({ + "new_value": real_parent_value, + "attr_id": attr_id, + "entity_id": task_id, + "attr_key": attr_key + }) + + if parent_value != nonhier_value: + changes.append({ + "new_value": parent_value, + "attr_id": nonhier_id, + "entity_id": task_id, + "attr_key": attr_key + }) + + self._commit_changes(session, changes) + + def _commit_changes(self, session, changes): + uncommited_changes = False + for idx, item in enumerate(changes): + new_value = item["new_value"] + attr_id = item["attr_id"] + entity_id = item["entity_id"] + attr_key = item["attr_key"] + + entity_key = collections.OrderedDict() + entity_key["configuration_id"] = attr_id + entity_key["entity_id"] = entity_id + self._cached_changes.append({ + "attr_key": attr_key, + "entity_id": entity_id, + "value": new_value, + "time": datetime.datetime.now() + }) + if new_value is None: + op = ftrack_api.operation.DeleteEntityOperation( + "CustomAttributeValue", + entity_key + ) + else: + op = ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + entity_key, + "value", + ftrack_api.symbol.NOT_SET, + new_value + ) + + session.recorded_operations.push(op) + self.log.info(( + "Changing Custom Attribute \"{}\" to value" + " \"{}\" on entity: {}" + ).format(attr_key, new_value, entity_id)) + + if (idx + 1) % 20 == 0: + uncommited_changes = False + try: + session.commit() + except Exception: + session.rollback() + self.log.warning( + "Changing of values failed.", exc_info=True + ) + else: + uncommited_changes = True + if uncommited_changes: + try: + session.commit() + except Exception: + session.rollback() + self.log.warning("Changing of values failed.", exc_info=True) + + def process_attribute_changes( + self, session, object_types_by_name, + interesting_data, changed_keys_by_object_id, + interest_entity_types, interest_attributes + ): # Prepare task object id task_object_id = object_types_by_name["task"]["id"] @@ -216,13 +519,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): task_entity_ids.add(task_id) parent_id_by_task_id[task_id] = task_entity["parent_id"] - self.finalize( + self.finalize_attribute_changes( session, interesting_data, changed_keys, attrs_by_obj_id, hier_attrs, task_entity_ids, parent_id_by_task_id ) - def finalize( + def finalize_attribute_changes( self, session, interesting_data, changed_keys, attrs_by_obj_id, hier_attrs, task_entity_ids, parent_id_by_task_id @@ -248,6 +551,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): session, attr_ids, entity_ids, task_entity_ids, hier_attrs ) + changes = [] for entity_id, current_values in current_values_by_id.items(): parent_id = parent_id_by_task_id.get(entity_id) if not parent_id: @@ -272,39 +576,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): if new_value == old_value: continue - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = attr_id - entity_key["entity_id"] = entity_id - self._cached_changes.append({ - "attr_key": attr_key, + changes.append({ + "new_value": new_value, + "attr_id": attr_id, "entity_id": entity_id, - "value": new_value, - "time": datetime.datetime.now() + "attr_key": attr_key }) - if new_value is None: - op = ftrack_api.operation.DeleteEntityOperation( - "CustomAttributeValue", - entity_key - ) - else: - op = ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - entity_key, - "value", - ftrack_api.symbol.NOT_SET, - new_value - ) - - session.recorded_operations.push(op) - self.log.info(( - "Changing Custom Attribute \"{}\" to value" - " \"{}\" on entity: {}" - ).format(attr_key, new_value, entity_id)) - try: - session.commit() - except Exception: - session.rollback() - self.log.warning("Changing of values failed.", exc_info=True) + self._commit_changes(session, changes) def filter_changes( self, session, event, entities_info, interest_attributes diff --git a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py b/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py index d20e2ff5a8..f215bedcc2 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py +++ b/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py @@ -66,15 +66,7 @@ class VersionToTaskStatus(BaseEvent): )) return - _status_mapping = event_settings["mapping"] - if not _status_mapping: - self.log.debug( - "Project \"{}\" does not have set mapping for {}".format( - project_name, self.__class__.__name__ - ) - ) - return - + _status_mapping = event_settings["mapping"] or {} status_mapping = { key.lower(): value for key, value in _status_mapping.items() diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index d7ac866e42..035a1c60de 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -1,5 +1,6 @@ import os import re +import json from openpype.modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy, get_project_settings @@ -84,6 +85,9 @@ class CreateProjectFolders(BaseAction): } try: + if isinstance(project_folder_structure, str): + project_folder_structure = json.loads(project_folder_structure) + # Get paths based on presets basic_paths = self.get_path_items(project_folder_structure) self.create_folders(basic_paths, project_entity) diff --git a/openpype/modules/ftrack/lib/__init__.py b/openpype/modules/ftrack/lib/__init__.py index ce6d5284b6..9dc2d67279 100644 --- a/openpype/modules/ftrack/lib/__init__.py +++ b/openpype/modules/ftrack/lib/__init__.py @@ -13,7 +13,8 @@ from .custom_attributes import ( default_custom_attributes_definition, app_definitions_from_app_manager, tool_definitions_from_app_manager, - get_openpype_attr + get_openpype_attr, + query_custom_attributes ) from . import avalon_sync @@ -37,6 +38,7 @@ __all__ = ( "app_definitions_from_app_manager", "tool_definitions_from_app_manager", "get_openpype_attr", + "query_custom_attributes", "avalon_sync", diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 5d1da005dc..2458308af5 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -402,16 +402,18 @@ class SyncEntitiesFactory: items = [] items.append({ "type": "label", - "value": "# Can't access Custom attribute <{}>".format( - CUST_ATTR_ID_KEY - ) + "value": ( + "# Can't access Custom attribute: \"{}\"" + ).format(CUST_ATTR_ID_KEY) }) items.append({ "type": "label", "value": ( - "

- Check if user \"{}\" has permissions" - " to access the Custom attribute

" - ).format(self._api_key) + "

- Check if your User and API key has permissions" + " to access the Custom attribute." + "
Username:\"{}\"" + "
API key:\"{}\"

" + ).format(self._api_user, self._api_key) }) items.append({ "type": "label", diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/ftrack/lib/custom_attributes.py index f6b82c90b1..53facd4ab2 100644 --- a/openpype/modules/ftrack/lib/custom_attributes.py +++ b/openpype/modules/ftrack/lib/custom_attributes.py @@ -81,3 +81,60 @@ def get_openpype_attr(session, split_hierarchical=True, query_keys=None): return custom_attributes, hier_custom_attributes return custom_attributes + + +def join_query_keys(keys): + """Helper to join keys to query.""" + return ",".join(["\"{}\"".format(key) for key in keys]) + + +def query_custom_attributes(session, conf_ids, entity_ids, table_name=None): + """Query custom attribute values from ftrack database. + + Using ftrack call method result may differ based on used table name and + version of ftrack server. + + Args: + session(ftrack_api.Session): Connected ftrack session. + conf_id(list, set, tuple): Configuration(attribute) ids which are + queried. + entity_ids(list, set, tuple): Entity ids for which are values queried. + table_name(str): Table nam from which values are queried. Not + recommended to change until you know what it means. + """ + output = [] + # Just skip + if not conf_ids or not entity_ids: + return output + + if table_name is None: + table_name = "ContextCustomAttributeValue" + + # Prepare values to query + attributes_joined = join_query_keys(conf_ids) + attributes_len = len(conf_ids) + + # Query values in chunks + chunk_size = int(5000 / attributes_len) + # Make sure entity_ids is `list` for chunk selection + entity_ids = list(entity_ids) + for idx in range(0, len(entity_ids), chunk_size): + entity_ids_joined = join_query_keys( + entity_ids[idx:idx + chunk_size] + ) + + call_expr = [{ + "action": "query", + "expression": ( + "select value, entity_id from {}" + " where entity_id in ({}) and configuration_id in ({})" + ).format(table_name, entity_ids_joined, attributes_joined) + }] + if hasattr(session, "call"): + [result] = session.call(call_expr) + else: + [result] = session._call(call_expr) + + for item in result["data"]: + output.append(item) + return output diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index e6daed9a33..8464a43ef7 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -1,29 +1,107 @@ +""" +Requires: + none + +Provides: + instance -> families ([]) +""" import pyblish.api +import avalon.api + +from openpype.lib.plugin_tools import filter_profiles -class CollectFtrackFamilies(pyblish.api.InstancePlugin): - """Collect family for ftrack publishing - - Add ftrack family to those instance that should be published to ftrack - +class CollectFtrackFamily(pyblish.api.InstancePlugin): """ + Adds explicitly 'ftrack' to families to upload instance to FTrack. - order = pyblish.api.CollectorOrder + 0.3 - label = 'Add ftrack family' - families = ["model", - "setdress", - "model", - "animation", - "look", - "rig", - "camera" - ] - hosts = ["maya"] + Uses selection by combination of hosts/families/tasks names via + profiles resolution. + + Triggered everywhere, checks instance against configured. + + Checks advanced filtering which works on 'families' not on main + 'family', as some variants dynamically resolves addition of ftrack + based on 'families' (editorial drives it by presence of 'review') + """ + label = "Collect Ftrack Family" + order = pyblish.api.CollectorOrder + 0.4998 + + profiles = None def process(self, instance): + if not self.profiles: + self.log.warning("No profiles present for adding Ftrack family") + return - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') + task_name = instance.data.get("task", + avalon.api.Session["AVALON_TASK"]) + host_name = avalon.api.Session["AVALON_APP"] + family = instance.data["family"] + + filtering_criteria = { + "hosts": host_name, + "families": family, + "tasks": task_name + } + profile = filter_profiles(self.profiles, filtering_criteria, + logger=self.log) + + if profile: + families = instance.data.get("families") + add_ftrack_family = profile["add_ftrack_family"] + + additional_filters = profile.get("advanced_filtering") + if additional_filters: + add_ftrack_family = self._get_add_ftrack_f_from_addit_filters( + additional_filters, + families, + add_ftrack_family + ) + + if add_ftrack_family: + self.log.debug("Adding ftrack family for '{}'". + format(instance.data.get("family"))) + + if families and "ftrack" not in families: + instance.data["families"].append("ftrack") + else: + instance.data["families"] = ["ftrack"] else: - instance.data['families'] = ['ftrack'] + self.log.debug("Instance '{}' doesn't match any profile".format( + instance.data.get("family"))) + + def _get_add_ftrack_f_from_addit_filters(self, + additional_filters, + families, + add_ftrack_family): + """ + Compares additional filters - working on instance's families. + + Triggered for more detailed filtering when main family matches, + but content of 'families' actually matter. + (For example 'review' in 'families' should result in adding to + Ftrack) + + Args: + additional_filters (dict) - from Setting + families (list) - subfamilies + add_ftrack_family (bool) - add ftrack to families if True + """ + override_filter = None + override_filter_value = -1 + for additional_filter in additional_filters: + filter_families = set(additional_filter["families"]) + valid = filter_families <= set(families) # issubset + if not valid: + continue + + value = len(filter_families) + if value > override_filter_value: + override_filter = additional_filter + override_filter_value = value + + if override_filter: + add_ftrack_family = override_filter["add_ftrack_family"] + + return add_ftrack_family diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/ftrack/tray/login_dialog.py index a6360a7380..cc5689bee5 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/ftrack/tray/login_dialog.py @@ -1,6 +1,6 @@ import os import requests -from avalon import style +from openpype import style from openpype.modules.ftrack.lib import credentials from . import login_tools from openpype import resources @@ -46,8 +46,11 @@ class CredentialsDialog(QtWidgets.QDialog): self.user_label = QtWidgets.QLabel("Username:") self.api_label = QtWidgets.QLabel("API Key:") - self.ftsite_input = QtWidgets.QLineEdit() - self.ftsite_input.setReadOnly(True) + self.ftsite_input = QtWidgets.QLabel() + self.ftsite_input.setTextInteractionFlags( + QtCore.Qt.TextBrowserInteraction + ) + # self.ftsite_input.setReadOnly(True) self.ftsite_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) self.user_input = QtWidgets.QLineEdit() diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/log_viewer/tray/app.py index c0e180c8a1..9aab37cd20 100644 --- a/openpype/modules/log_viewer/tray/app.py +++ b/openpype/modules/log_viewer/tray/app.py @@ -1,6 +1,6 @@ from Qt import QtWidgets, QtCore from .widgets import LogsWidget, OutputWidget -from avalon import style +from openpype import style class LogsWindow(QtWidgets.QWidget): @@ -14,7 +14,7 @@ class LogsWindow(QtWidgets.QWidget): main_layout = QtWidgets.QHBoxLayout() - log_splitter = QtWidgets.QSplitter() + log_splitter = QtWidgets.QSplitter(self) log_splitter.setOrientation(QtCore.Qt.Horizontal) log_splitter.addWidget(logs_widget) log_splitter.addWidget(log_detail) diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/log_viewer/tray/widgets.py index cd0df283bf..b9a8499a4c 100644 --- a/openpype/modules/log_viewer/tray/widgets.py +++ b/openpype/modules/log_viewer/tray/widgets.py @@ -83,7 +83,6 @@ class CustomCombo(QtWidgets.QWidget): self.setLayout(layout) - # toolmenu.selection_changed.connect(self.on_selection_changed) toolmenu.selection_changed.connect(self.selection_changed) self.toolbutton = toolbutton @@ -119,7 +118,6 @@ class LogsWidget(QtWidgets.QWidget): filter_layout = QtWidgets.QHBoxLayout() - # user_filter = SearchComboBox(self, "Users") user_filter = CustomCombo("Users", self) users = model.dbcon.distinct("username") user_filter.populate(users) @@ -128,21 +126,18 @@ class LogsWidget(QtWidgets.QWidget): proxy_model.update_users_filter(users) level_filter = CustomCombo("Levels", self) - # levels = [(level, True) for level in model.dbcon.distinct("level")] levels = model.dbcon.distinct("level") level_filter.addItems(levels) level_filter.selection_changed.connect(self._level_changed) detail_widget.update_level_filter(levels) - spacer = QtWidgets.QWidget() - icon = qtawesome.icon("fa.refresh", color="white") refresh_btn = QtWidgets.QPushButton(icon, "") filter_layout.addWidget(user_filter) filter_layout.addWidget(level_filter) - filter_layout.addWidget(spacer, 1) + filter_layout.addStretch(1) filter_layout.addWidget(refresh_btn) view = QtWidgets.QTreeView(self) diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/muster/widget_login.py index d9af4cb99f..231b52c6bd 100644 --- a/openpype/modules/muster/widget_login.py +++ b/openpype/modules/muster/widget_login.py @@ -1,13 +1,12 @@ import os from Qt import QtCore, QtGui, QtWidgets -from avalon import style -from openpype import resources +from openpype import resources, style class MusterLogin(QtWidgets.QWidget): SIZE_W = 300 - SIZE_H = 130 + SIZE_H = 150 loginSignal = QtCore.Signal(object, object, object) @@ -123,7 +122,6 @@ class MusterLogin(QtWidgets.QWidget): super().keyPressEvent(key_event) def setError(self, msg): - self.error_label.setText(msg) self.error_label.show() @@ -149,6 +147,17 @@ class MusterLogin(QtWidgets.QWidget): def save_credentials(self, username, password): self.module.get_auth_token(username, password) + def showEvent(self, event): + super(MusterLogin, self).showEvent(event) + + # Make btns same width + max_width = max( + self.btn_ok.sizeHint().width(), + self.btn_cancel.sizeHint().width() + ) + self.btn_ok.setMinimumWidth(max_width) + self.btn_cancel.setMinimumWidth(max_width) + def closeEvent(self, event): event.ignore() self._close_widget() diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 1035dc0dcd..9db4a252bc 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -114,6 +114,7 @@ class LocalSettingsAction(PypeModule, ITrayAction): # Tray attributes self.settings_window = None + self._first_trigger = True def connect_with_modules(self, *_a, **_kw): return @@ -153,6 +154,9 @@ class LocalSettingsAction(PypeModule, ITrayAction): self.settings_window.raise_() self.settings_window.activateWindow() - # Reset content if was not visible - if not was_visible: + # Do not reset if it's first trigger of action + if self._first_trigger: + self._first_trigger = False + elif not was_visible: + # Reset content if was not visible self.settings_window.reset() diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/timers_manager/widget_user_idle.py index 8b614f6a13..25b4e56650 100644 --- a/openpype/modules/timers_manager/widget_user_idle.py +++ b/openpype/modules/timers_manager/widget_user_idle.py @@ -1,6 +1,5 @@ -from avalon import style from Qt import QtCore, QtGui, QtWidgets -from openpype import resources +from openpype import resources, style class WidgetUserIdle(QtWidgets.QWidget): diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index fa376eeac7..010430a303 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -40,6 +40,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): otio_clip = instance.data["otioClip"] otio_avalable_range = otio_clip.available_range() media_fps = otio_avalable_range.start_time.rate + available_duration = otio_avalable_range.duration.value # get available range trimmed with processed retimes retimed_attributes = editorial.get_media_range_with_retimes( @@ -68,6 +69,8 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): a_frame_start_h, (a_frame_end_h - a_frame_start_h + 1), media_fps ) + trimmed_duration = trimmed_media_range_h.duration.value + self.log.debug("trimmed_media_range_h: {}".format( trimmed_media_range_h)) self.log.debug("a_frame_start_h: {}".format( @@ -150,12 +153,18 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): repre = self._create_representation( frame_start, frame_end, collection=collection) else: + _trim = False dirname, filename = os.path.split(media_ref.target_url) self.staging_dir = dirname + if trimmed_duration < available_duration: + self.log.debug("Ready for Trimming") + instance.data["families"].append("trim") + instance.data["otioTrimmingRange"] = trimmed_media_range_h + _trim = True self.log.debug(filename) repre = self._create_representation( - frame_start, frame_end, file=filename) + frame_start, frame_end, file=filename, trim=_trim) if repre: # add representation to instance data @@ -196,7 +205,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): "frameStart": start, "frameEnd": end, }) - return representation_data + if kwargs.get("file"): file = kwargs.get("file") ext = os.path.splitext(file)[-1] @@ -207,4 +216,9 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): "frameStart": start, "frameEnd": end, }) - return representation_data + + if kwargs.get("trim") is True: + representation_data.update({ + "tags": ["trim"] + }) + return representation_data diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index edf9b50b92..2f55f2bdb5 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -87,11 +87,14 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): instance = self._context.create_instance( instance_data.get("subset") ) - self.log.info("Filling stagignDir...") + self.log.info("Filling stagingDir...") self._fill_staging_dir(instance_data, anatomy) instance.data.update(instance_data) + # stash render job id for later validation + instance.data["render_job_id"] = data.get("job").get("_id") + representations = [] for repre_data in instance_data.get("representations") or []: self._fill_staging_dir(repre_data, anatomy) diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py new file mode 100644 index 0000000000..fdb7c4b096 --- /dev/null +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -0,0 +1,125 @@ +""" +Requires: + instance -> otioTrimmingRange + instance -> representations + +""" + +import os +from pyblish import api +import openpype +from copy import deepcopy + + +class ExtractOTIOTrimmingVideo(openpype.api.Extractor): + """ + Trimming video file longer then required lenght + + """ + order = api.ExtractorOrder + label = "Extract OTIO trim longer video" + families = ["trim"] + hosts = ["resolve", "hiero"] + + def process(self, instance): + self.staging_dir = self.staging_dir(instance) + otio_trim_range = instance.data["otioTrimmingRange"] + representations = instance.data["representations"] + self.log.debug("otio_trim_range: {}".format(otio_trim_range)) + self.log.debug("self.staging_dir: {}".format(self.staging_dir)) + + # get corresponding representation + for _repre in representations: + if "trim" not in _repre.get("tags", []): + continue + + input_file = _repre["files"] + input_file_path = os.path.normpath(os.path.join( + _repre["stagingDir"], input_file + )) + self.log.debug("input_file_path: {}".format(input_file_path)) + + # trim via ffmpeg + new_file = self._ffmpeg_trim_seqment( + input_file_path, otio_trim_range) + + # prepare new representation data + repre_data = deepcopy(_repre) + # remove tags as we dont need them + repre_data.pop("tags") + repre_data["stagingDir"] = self.staging_dir + repre_data["files"] = new_file + + # romove `trim` tagged representation + representations.remove(_repre) + representations.append(repre_data) + self.log.debug(repre_data) + + self.log.debug("representations: {}".format(representations)) + + def _ffmpeg_trim_seqment(self, input_file_path, otio_range): + """ + Trim seqment of video file. + + Using ffmpeg to trim video to desired length. + + Args: + input_file_path (str): path string + otio_range (opentime.TimeRange): range to trim to + + """ + # get rendering app path + ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + + # create path to destination + output_path = self._get_ffmpeg_output(input_file_path) + + # start command list + command = ['"{}"'.format(ffmpeg_path)] + + video_path = input_file_path + frame_start = otio_range.start_time.value + input_fps = otio_range.start_time.rate + frame_duration = (otio_range.duration.value + 1) + sec_start = openpype.lib.frames_to_secons(frame_start, input_fps) + sec_duration = openpype.lib.frames_to_secons(frame_duration, input_fps) + + # form command for rendering gap files + command.extend([ + "-ss {}".format(sec_start), + "-t {}".format(sec_duration), + "-i \"{}\"".format(video_path), + "-c copy", + output_path + ]) + + # execute + self.log.debug("Executing: {}".format(" ".join(command))) + output = openpype.api.run_subprocess( + " ".join(command), logger=self.log + ) + self.log.debug("Output: {}".format(output)) + + return os.path.basename(output_path) + + def _get_ffmpeg_output(self, file_path): + """ + Returning ffmpeg output command arguments. + + Arguments" + file_path (str): path string + + Returns: + str: output_path is path + + """ + basename = os.path.basename(file_path) + name, ext = os.path.splitext(basename) + + output_file = "{}_{}{}".format( + name, + "trimmed", + ext + ) + # create path to destination + return os.path.join(self.staging_dir, output_file) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index e1e24af3ea..de54b554e3 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -975,20 +975,52 @@ class ExtractReview(pyblish.api.InstancePlugin): # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] - input_data = ffprobe_streams( - full_input_path_single_file, self.log - )[0] - input_width = int(input_data["width"]) - input_height = int(input_data["height"]) + try: + streams = ffprobe_streams( + full_input_path_single_file, self.log + ) + except Exception: + raise AssertionError(( + "FFprobe couldn't read information about input file: \"{}\"" + ).format(full_input_path_single_file)) + + # Try to find first stream with defined 'width' and 'height' + # - this is to avoid order of streams where audio can be as first + # - there may be a better way (checking `codec_type`?) + input_width = None + input_height = None + for stream in streams: + if "width" in stream and "height" in stream: + input_width = int(stream["width"]) + input_height = int(stream["height"]) + break + + # Raise exception of any stream didn't define input resolution + if input_width is None: + raise AssertionError(( + "FFprobe couldn't read resolution from input file: \"{}\"" + ).format(full_input_path_single_file)) # NOTE Setting only one of `width` or `heigth` is not allowed # - settings value can't have None but has value of 0 output_width = output_def.get("width") or None output_height = output_def.get("height") or None + # Overscal color + overscan_color_value = "black" + overscan_color = output_def.get("overscan_color") + if overscan_color: + bg_red, bg_green, bg_blue, _ = overscan_color + overscan_color_value = "#{0:0>2X}{1:0>2X}{2:0>2X}".format( + bg_red, bg_green, bg_blue + ) + self.log.debug("Overscan color: `{}`".format(overscan_color_value)) + # Convert overscan value video filters overscan_crop = output_def.get("overscan_crop") - overscan = OverscanCrop(input_width, input_height, overscan_crop) + overscan = OverscanCrop( + input_width, input_height, overscan_crop, overscan_color_value + ) overscan_crop_filters = overscan.video_filters() # Add overscan filters to filters if are any and modify input # resolution by it's values @@ -1158,9 +1190,10 @@ class ExtractReview(pyblish.api.InstancePlugin): "scale={}x{}:flags=lanczos".format( width_scale, height_scale ), - "pad={}:{}:{}:{}:black".format( + "pad={}:{}:{}:{}:{}".format( output_width, output_height, - width_half_pad, height_half_pad + width_half_pad, height_half_pad, + overscan_color_value ), "setsar=1" ]) @@ -1707,12 +1740,15 @@ class OverscanCrop: item_regex = re.compile(r"([\+\-])?([0-9]+)(.+)?") relative_source_regex = re.compile(r"%([\+\-])") - def __init__(self, input_width, input_height, string_value): + def __init__( + self, input_width, input_height, string_value, overscal_color=None + ): # Make sure that is not None string_value = string_value or "" self.input_width = input_width self.input_height = input_height + self.overscal_color = overscal_color width, height = self._convert_string_to_values(string_value) self._width_value = width @@ -1767,16 +1803,22 @@ class OverscanCrop: elif width >= self.input_width and height >= self.input_height: output.append( - "pad={}:{}:(iw-ow)/2:(ih-oh)/2".format(width, height) + "pad={}:{}:(iw-ow)/2:(ih-oh)/2:{}".format( + width, height, self.overscal_color + ) ) elif width > self.input_width and height < self.input_height: output.append("crop=iw:{}".format(height)) - output.append("pad={}:ih:(iw-ow)/2:(ih-oh)/2".format(width)) + output.append("pad={}:ih:(iw-ow)/2:(ih-oh)/2:{}".format( + width, self.overscal_color + )) elif width < self.input_width and height > self.input_height: output.append("crop={}:ih".format(width)) - output.append("pad=iw:{}:(iw-ow)/2:(ih-oh)/2".format(height)) + output.append("pad=iw:{}:(iw-ow)/2:(ih-oh)/2:{}".format( + height, self.overscal_color + )) return output diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index fb36a930fb..2b07d7db74 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -26,9 +26,23 @@ class ExtractReviewSlate(openpype.api.Extractor): slate_path = inst_data.get("slateFrame") ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") - slate_stream = openpype.lib.ffprobe_streams(slate_path, self.log)[0] - slate_width = slate_stream["width"] - slate_height = slate_stream["height"] + slate_streams = openpype.lib.ffprobe_streams(slate_path, self.log) + # Try to find first stream with defined 'width' and 'height' + # - this is to avoid order of streams where audio can be as first + # - there may be a better way (checking `codec_type`?)+ + slate_width = None + slate_height = None + for slate_stream in slate_streams: + if "width" in slate_stream and "height" in slate_stream: + slate_width = int(slate_stream["width"]) + slate_height = int(slate_stream["height"]) + break + + # Raise exception of any stream didn't define input resolution + if slate_width is None: + raise AssertionError(( + "FFprobe couldn't read resolution from input file: \"{}\"" + ).format(slate_path)) if "reviewToWidth" in inst_data: use_legacy_code = True @@ -309,16 +323,29 @@ class ExtractReviewSlate(openpype.api.Extractor): ) return codec_args - codec_name = streams[0].get("codec_name") + # Try to find first stream that is not an audio + no_audio_stream = None + for stream in streams: + if stream.get("codec_type") != "audio": + no_audio_stream = stream + break + + if no_audio_stream is None: + self.log.warning(( + "Couldn't find stream that is not an audio from file \"{}\"" + ).format(full_input_path)) + return codec_args + + codec_name = no_audio_stream.get("codec_name") if codec_name: codec_args.append("-codec:v {}".format(codec_name)) - profile_name = streams[0].get("profile") + profile_name = no_audio_stream.get("profile") if profile_name: profile_name = profile_name.replace(" ", "_").lower() codec_args.append("-profile:v {}".format(profile_name)) - pix_fmt = streams[0].get("pix_fmt") + pix_fmt = no_audio_stream.get("pix_fmt") if pix_fmt: codec_args.append("-pix_fmt {}".format(pix_fmt)) return codec_args diff --git a/openpype/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/plugins/publish/integrate_ftrack_component_overwrite.py deleted file mode 100644 index 047fd8462c..0000000000 --- a/openpype/plugins/publish/integrate_ftrack_component_overwrite.py +++ /dev/null @@ -1,21 +0,0 @@ -import pyblish.api - - -class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin): - """ - Set `component_overwrite` to True on all instances `ftrackComponentsList` - """ - - order = pyblish.api.IntegratorOrder + 0.49 - label = 'Overwrite ftrack created versions' - families = ["clip"] - optional = True - active = False - - def process(self, instance): - component_list = instance.data['ftrackComponentsList'] - - for cl in component_list: - cl['component_overwrite'] = True - self.log.debug('Component {} overwriting'.format( - cl['component_data']['name'])) diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py new file mode 100644 index 0000000000..ccea42dc37 --- /dev/null +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -0,0 +1,112 @@ +import pyblish.api +from avalon import io +from pprint import pformat + + +class ValidateEditorialAssetName(pyblish.api.ContextPlugin): + """ Validating if editorial's asset names are not already created in db. + + Checking variations of names with different size of caps or with + or without underscores. + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Asset Name" + + def process(self, context): + + asset_and_parents = self.get_parents(context) + + if not io.Session: + io.install() + + db_assets = list(io.find( + {"type": "asset"}, {"name": 1, "data.parents": 1})) + self.log.debug("__ db_assets: {}".format(db_assets)) + + asset_db_docs = { + str(e["name"]): e["data"]["parents"] for e in db_assets} + + self.log.debug("__ project_entities: {}".format( + pformat(asset_db_docs))) + + assets_missing_name = {} + assets_wrong_parent = {} + for asset in asset_and_parents.keys(): + if asset not in asset_db_docs.keys(): + # add to some nonexistent list for next layer of check + assets_missing_name.update({asset: asset_and_parents[asset]}) + continue + + if asset_and_parents[asset] != asset_db_docs[asset]: + # add to some nonexistent list for next layer of check + assets_wrong_parent.update({ + asset: { + "required": asset_and_parents[asset], + "already_in_db": asset_db_docs[asset] + } + }) + continue + + self.log.info("correct asset: {}".format(asset)) + + if assets_missing_name: + wrong_names = {} + self.log.debug( + ">> assets_missing_name: {}".format(assets_missing_name)) + for asset in assets_missing_name.keys(): + _asset = asset.lower().replace("_", "") + if _asset in [a.lower().replace("_", "") + for a in asset_db_docs.keys()]: + wrong_names.update({ + "required_name": asset, + "used_variants_in_db": [ + a for a in asset_db_docs.keys() + if a.lower().replace("_", "") == _asset + ] + }) + + if wrong_names: + self.log.debug( + ">> wrong_names: {}".format(wrong_names)) + raise Exception( + "Some already existing asset name variants `{}`".format( + wrong_names)) + + if assets_wrong_parent: + self.log.debug( + ">> assets_wrong_parent: {}".format(assets_wrong_parent)) + raise Exception( + "Wrong parents on assets `{}`".format(assets_wrong_parent)) + + def _get_all_assets(self, input_dict): + """ Returns asset names in list. + + List contains all asset names including parents + """ + for key in input_dict.keys(): + # check if child key is available + if input_dict[key].get("childs"): + # loop deeper + self._get_all_assets( + input_dict[key]["childs"]) + else: + self.all_testing_assets.append(key) + + def get_parents(self, context): + return_dict = {} + for instance in context: + asset = instance.data["asset"] + families = instance.data.get("families", []) + [ + instance.data["family"] + ] + # filter out non-shot families + if "shot" not in families: + continue + + parents = instance.data["parents"] + + return_dict.update({ + asset: [p["entity_name"] for p in parents] + }) + return return_dict diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index f86ece6799..7c47d8c613 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -46,16 +46,18 @@ class PypeCommands: standalonepublish.main() @staticmethod - def publish(paths): + def publish(paths, targets=None): """Start headless publishing. Publish use json from passed paths argument. Args: paths (list): Paths to jsons. + targets (string): What module should be targeted + (to choose validator for example) Raises: - RuntimeError: When there is no pathto process. + RuntimeError: When there is no path to process. """ if not any(paths): raise RuntimeError("No publish paths specified") @@ -82,6 +84,10 @@ class PypeCommands: pyblish.api.register_target("filesequence") pyblish.api.register_host("shell") + if targets: + for target in targets: + pyblish.api.register_target(target) + os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths) log.info("Running publish ...") diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index ff16c22663..fcebc876f5 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -78,6 +78,10 @@ { "name": "colorspace", "value": "linear" + }, + { + "name": "create_directories", + "value": "True" } ] }, @@ -114,6 +118,10 @@ { "name": "colorspace", "value": "linear" + }, + { + "name": "create_directories", + "value": "True" } ] } diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 03f3e19a64..2dba20d63c 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -1,5 +1,16 @@ { "publish": { + "ValidateExpectedFiles": { + "enabled": true, + "active": true, + "allow_user_override": true, + "families": [ + "render" + ], + "targets": [ + "deadline" + ] + }, "MayaSubmitDeadline": { "enabled": true, "optional": false, @@ -8,7 +19,9 @@ "use_published": true, "asset_dependencies": true, "group": "none", - "limit": [] + "limit": [], + "jobInfo": {}, + "pluginInfo": {} }, "NukeSubmitDeadline": { "enabled": true, @@ -22,6 +35,8 @@ "group": "", "department": "", "use_gpu": true, + "env_allowed_keys": [], + "env_search_replace_values": {}, "limit_groups": {} }, "HarmonySubmitDeadline": { diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index b964ce07c3..7cf5568662 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -200,6 +200,106 @@ } }, "publish": { + "CollectFtrackFamily": { + "enabled": true, + "profiles": [ + { + "hosts": [ + "standalonepublisher" + ], + "families": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] + }, + { + "hosts": [ + "standalonepublisher" + ], + "families": [ + "matchmove", + "shot" + ], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [] + }, + { + "hosts": [ + "standalonepublisher" + ], + "families": [ + "plate" + ], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [ + { + "families": [ + "clip", + "review" + ], + "add_ftrack_family": true + } + ] + }, + { + "hosts": [ + "maya" + ], + "families": [ + "model", + "setdress", + "animation", + "look", + "rig", + "camera" + ], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] + }, + { + "hosts": [ + "tvpaint" + ], + "families": [ + "renderPass" + ], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [] + }, + { + "hosts": [ + "tvpaint" + ], + "families": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] + }, + { + "hosts": [ + "nuke" + ], + "families": [ + "write", + "render" + ], + "tasks": [], + "add_ftrack_family": false, + "advanced_filtering": [ + { + "families": [ + "review" + ], + "add_ftrack_family": true + } + ] + } + ] + }, "IntegrateFtrackNote": { "enabled": true, "note_with_intent_template": "{intent}: {comment}", diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 4351f18a60..43053c38c0 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -56,6 +56,12 @@ ] }, "overscan_crop": "", + "overscan_color": [ + 0, + 0, + 0, + 255 + ], "width": 0, "height": 0, "bg_color": [ @@ -166,6 +172,8 @@ "deadline_group": "", "deadline_chunk_size": 1, "deadline_priority": 50, + "publishing_script": "", + "skip_integration_repre_list": [], "aov_filter": { "maya": [ ".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*" @@ -178,6 +186,10 @@ ".*" ] } + }, + "CleanUp": { + "paterns": [], + "remove_temp_renders": false } }, "tools": { @@ -226,6 +238,17 @@ ], "tasks": [], "template": "{family}{Task}_{Render_layer}_{Render_pass}" + }, + { + "families": [ + "review", + "workfile" + ], + "hosts": [ + "tvpaint" + ], + "tasks": [], + "template": "{family}{Task}" } ] }, @@ -254,28 +277,7 @@ } } }, - "project_folder_structure": { - "__project_root__": { - "prod": {}, - "resources": { - "footage": { - "plates": {}, - "offline": {} - }, - "audio": {}, - "art_dept": {} - }, - "editorial": {}, - "assets[ftrack.Library]": { - "characters[ftrack]": {}, - "locations[ftrack]": {} - }, - "shots[ftrack.Sequence]": { - "scripts": {}, - "editorial[ftrack.Folder]": {} - } - } - }, + "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets[ftrack.Library]\": {\"characters[ftrack]\": {}, \"locations[ftrack]\": {}}, \"shots[ftrack.Sequence]\": {\"scripts\": {}, \"editorial[ftrack.Folder]\": {}}}}", "sync_server": { "enabled": true, "config": { diff --git a/openpype/settings/defaults/project_settings/harmony.json b/openpype/settings/defaults/project_settings/harmony.json index 0c7a35c058..3cc175ae72 100644 --- a/openpype/settings/defaults/project_settings/harmony.json +++ b/openpype/settings/defaults/project_settings/harmony.json @@ -5,6 +5,11 @@ ".*" ] }, + "ValidateContainers": { + "enabled": true, + "optional": true, + "active": true + }, "ValidateSceneSettings": { "enabled": true, "optional": true, diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json new file mode 100644 index 0000000000..811a446e59 --- /dev/null +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -0,0 +1,9 @@ +{ + "publish": { + "ValidateContainers": { + "enabled": true, + "optional": true, + "active": true + } + } +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ba685ae502..284a1a0040 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -127,6 +127,11 @@ "CollectMayaRender": { "sync_workfile_version": false }, + "ValidateContainers": { + "enabled": true, + "optional": true, + "active": true + }, "ValidateShaderName": { "enabled": false, "regex": "(?P.*)_(.*)_SHD" diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 3736f67268..136f1d6b42 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -10,16 +10,33 @@ }, "create": { "CreateWriteRender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}" + "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", + "defaults": [ + "Main", + "Mask" + ] }, "CreateWritePrerender": { - "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}" + "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}", + "use_range_limit": true, + "defaults": [ + "Key01", + "Bg01", + "Fg01", + "Branch01", + "Part01" + ] } }, "publish": { "PreCollectNukeInstances": { "sync_workfile_version": true }, + "ValidateContainers": { + "enabled": true, + "optional": true, + "active": true + }, "ValidateKnobs": { "enabled": false, "knobs": { diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index b306a757a6..4c36e4bd49 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -7,6 +7,11 @@ } }, "publish": { + "ValidateContainers": { + "enabled": true, + "optional": true, + "active": true + }, "ExtractImage": { "formats": [ "png", diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json index 979f5285d3..f08212934d 100644 --- a/openpype/settings/defaults/project_settings/standalonepublisher.json +++ b/openpype/settings/defaults/project_settings/standalonepublisher.json @@ -105,16 +105,23 @@ "label": "Render", "family": "render", "icon": "image", - "defaults": ["Animation", "Lighting", "Lookdev", "Compositing"], + "defaults": [ + "Animation", + "Lighting", + "Lookdev", + "Compositing" + ], "help": "Rendered images or video files" }, "create_mov_batch": { - "name": "mov_batch", - "label": "Batch Mov", - "family": "render_mov_batch", - "icon": "image", - "defaults": ["Main"], - "help": "Process multiple Mov files and publish them for layout and comp." + "name": "mov_batch", + "label": "Batch Mov", + "family": "render_mov_batch", + "icon": "image", + "defaults": [ + "Main" + ], + "help": "Process multiple Mov files and publish them for layout and comp." }, "__dynamic_keys_labels__": { "create_workfile": "Workfile", @@ -154,10 +161,62 @@ "ExtractThumbnailSP": { "ffmpeg_args": { "input": [ - "-gamma 2.2" + "-apply_trc gamma22" ], "output": [] } + }, + "CollectEditorial": { + "source_dir": "", + "extensions": [ + "mov", + "mp4" + ] + }, + "CollectHierarchyInstance": { + "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}", + "shot_rename_search_patterns": { + "_sequence_": "(\\d{4})(?=_\\d{4})", + "_shot_": "(\\d{4})(?!_\\d{4})" + }, + "shot_add_hierarchy": { + "parents_path": "{project}/{folder}/{sequence}", + "parents": { + "project": "{project[name]}", + "sequence": "{_sequence_}", + "folder": "shots" + } + }, + "shot_add_tasks": {} + }, + "shot_add_tasks": { + "custom_start_frame": 0, + "timeline_frame_start": 900000, + "timeline_frame_offset": 0, + "subsets": { + "referenceMain": { + "family": "review", + "families": [ + "clip" + ], + "extensions": [ + "mp4" + ], + "version": 0, + "keepSequence": false + }, + "audioMain": { + "family": "audio", + "families": [ + "clip" + ], + "extensions": [ + "wav" + ], + "version": 0, + "keepSequence": false + } + } } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index b4f3b315ec..763802a73f 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -1,5 +1,13 @@ { "publish": { + "ExtractSequence": { + "review_bg": [ + 255, + 255, + 255, + 255 + ] + }, "ValidateProjectSettings": { "enabled": true, "optional": true, diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 020924db67..842c294599 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -807,7 +807,6 @@ "environment": {}, "variants": { "2-83": { - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe" @@ -829,7 +828,6 @@ "environment": {} }, "2-90": { - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe" @@ -851,7 +849,6 @@ "environment": {} }, "2-91": { - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe" @@ -891,7 +888,6 @@ "20": { "enabled": true, "variant_label": "20", - "use_python_2": false, "executables": { "windows": [], "darwin": [], @@ -907,7 +903,6 @@ "17": { "enabled": true, "variant_label": "17", - "use_python_2": false, "executables": { "windows": [], "darwin": [ @@ -932,7 +927,6 @@ "environment": {}, "variants": { "animation_11-64bits": { - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe" @@ -948,7 +942,6 @@ "environment": {} }, "animation_11-32bits": { - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe" @@ -982,7 +975,6 @@ "2020": { "enabled": true, "variant_label": "2020", - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe" @@ -1000,7 +992,6 @@ "2021": { "enabled": true, "variant_label": "2021", - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe" @@ -1030,7 +1021,6 @@ "2020": { "enabled": true, "variant_label": "2020", - "use_python_2": false, "executables": { "windows": [ "" @@ -1048,7 +1038,6 @@ "2021": { "enabled": true, "variant_label": "2021", - "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" @@ -1095,22 +1084,12 @@ "unreal": { "enabled": true, "label": "Unreal Editor", - "icon": "{}/app_icons/ue4.png'", + "icon": "{}/app_icons/ue4.png", "host_name": "unreal", "environment": {}, "variants": { "4-26": { "use_python_2": false, - "executables": { - "windows": [], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, "environment": {} } } diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index f64ca1e98d..c0eef15e69 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -101,6 +101,7 @@ from .color_entity import ColorEntity from .enum_entity import ( BaseEnumEntity, EnumEntity, + HostsEnumEntity, AppsEnumEntity, ToolsEnumEntity, TaskTypeEnumEntity, @@ -110,6 +111,7 @@ from .enum_entity import ( from .list_entity import ListEntity from .dict_immutable_keys_entity import DictImmutableKeysEntity from .dict_mutable_keys_entity import DictMutableKeysEntity +from .dict_conditional import DictConditionalEntity from .anatomy_entities import AnatomyEntity @@ -153,6 +155,7 @@ __all__ = ( "BaseEnumEntity", "EnumEntity", + "HostsEnumEntity", "AppsEnumEntity", "ToolsEnumEntity", "TaskTypeEnumEntity", @@ -164,5 +167,7 @@ __all__ = ( "DictMutableKeysEntity", + "DictConditionalEntity", + "AnatomyEntity" ) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index c6bff1ff47..b4ebe885f5 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -136,6 +136,7 @@ class BaseItemEntity(BaseEntity): # Override state defines which values are used, saved and how. # TODO convert to private attribute self._override_state = OverrideState.NOT_DEFINED + self._ignore_missing_defaults = None # These attributes may change values during existence of an object # Default value, studio override values and project override values @@ -279,8 +280,13 @@ class BaseItemEntity(BaseEntity): self, "Dynamic entity can't require restart." ) + @abstractproperty + def root_key(self): + """Root is represented as this dictionary key.""" + pass + @abstractmethod - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): """Set override state and trigger it on children. Method discard all changes in hierarchy and use values, metadata @@ -290,8 +296,15 @@ class BaseItemEntity(BaseEntity): Should start on root entity and when triggered then must be called on all entities in hierarchy. + Argument `ignore_missing_defaults` should be used when entity has + children that are not saved or used all the time but override statu + must be changed and children must have any default value. + Args: state (OverrideState): State to which should be data changed. + ignore_missing_defaults (bool): Ignore missing default values. + Entity won't raise `DefaultsNotDefined` and + `StudioDefaultsNotDefined`. """ pass @@ -866,6 +879,10 @@ class ItemEntity(BaseItemEntity): """Call save on root item.""" self.root_item.save() + @property + def root_key(self): + return self.root_item.root_key + def schema_validations(self): if not self.label and self.use_label_wrap: reason = ( @@ -885,7 +902,11 @@ class ItemEntity(BaseItemEntity): def create_schema_object(self, *args, **kwargs): """Reference method for creation of entities defined in RootEntity.""" - return self.root_item.create_schema_object(*args, **kwargs) + return self.schema_hub.create_schema_object(*args, **kwargs) + + @property + def schema_hub(self): + return self.root_item.schema_hub def get_entity_from_path(self, path): return self.root_item.get_entity_from_path(path) diff --git a/openpype/settings/entities/color_entity.py b/openpype/settings/entities/color_entity.py index 7a1b1d9848..dfaa75e761 100644 --- a/openpype/settings/entities/color_entity.py +++ b/openpype/settings/entities/color_entity.py @@ -12,6 +12,17 @@ class ColorEntity(InputEntity): def _item_initalization(self): self.valid_value_types = (list, ) self.value_on_not_set = [0, 0, 0, 255] + self.use_alpha = self.schema_data.get("use_alpha", True) + + def set_override_state(self, *args, **kwargs): + super(ColorEntity, self).set_override_state(*args, **kwargs) + value = self._current_value + if ( + not self.use_alpha + and isinstance(value, list) + and len(value) == 4 + ): + value[3] = 255 def convert_to_valid_type(self, value): """Conversion to valid type. @@ -51,4 +62,8 @@ class ColorEntity(InputEntity): ).format(value) raise BaseInvalidValueType(reason, self.path) new_value.append(item) + + # Make sure + if not self.use_alpha: + new_value[3] = 255 return new_value diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py new file mode 100644 index 0000000000..96065b670e --- /dev/null +++ b/openpype/settings/entities/dict_conditional.py @@ -0,0 +1,707 @@ +import copy + +from .lib import ( + OverrideState, + NOT_SET +) +from openpype.settings.constants import ( + METADATA_KEYS, + M_OVERRIDEN_KEY, + KEY_REGEX +) +from . import ( + BaseItemEntity, + ItemEntity, + GUIEntity +) +from .exceptions import ( + SchemaDuplicatedKeys, + EntitySchemaError, + InvalidKeySymbols +) + + +class DictConditionalEntity(ItemEntity): + """Entity represents dictionay with only one persistent key definition. + + The persistent key is enumerator which define rest of children under + dictionary. There is not possibility of shared children. + + Entity's keys can't be removed or added. But they may change based on + the persistent key. If you're change value manually (key by key) make sure + you'll change value of the persistent key as first. It is recommended to + use `set` method which handle this for you. + + It is possible to use entity similar way as `dict` object. Returned values + are not real settings values but entities representing the value. + """ + schema_types = ["dict-conditional"] + _default_label_wrap = { + "use_label_wrap": False, + "collapsible": False, + "collapsed": True + } + + def __getitem__(self, key): + """Return entity inder key.""" + if key == self.enum_key: + return self.enum_entity + return self.non_gui_children[self.current_enum][key] + + def __setitem__(self, key, value): + """Set value of item under key.""" + if key == self.enum_key: + child_obj = self.enum_entity + else: + child_obj = self.non_gui_children[self.current_enum][key] + child_obj.set(value) + + def __iter__(self): + """Iter through keys.""" + for key in self.keys(): + yield key + + def __contains__(self, key): + """Check if key is available.""" + if key == self.enum_key: + return True + return key in self.non_gui_children[self.current_enum] + + def get(self, key, default=None): + """Safe entity getter by key.""" + if key == self.enum_key: + return self.enum_entity + return self.non_gui_children[self.current_enum].get(key, default) + + def keys(self): + """Entity's keys.""" + keys = list(self.non_gui_children[self.current_enum].keys()) + keys.insert(0, [self.enum_key]) + return keys + + def values(self): + """Children entities.""" + values = [ + self.enum_entity + ] + for child_entiy in self.non_gui_children[self.current_enum].values(): + values.append(child_entiy) + return values + + def items(self): + """Children entities paired with their key (key, value).""" + items = [ + (self.enum_key, self.enum_entity) + ] + for key, value in self.non_gui_children[self.current_enum].items(): + items.append((key, value)) + return items + + def set(self, value): + """Set value.""" + new_value = self.convert_to_valid_type(value) + # First change value of enum key if available + if self.enum_key in new_value: + self.enum_entity.set(new_value.pop(self.enum_key)) + + for _key, _value in new_value.items(): + self.non_gui_children[self.current_enum][_key].set(_value) + + def _item_initalization(self): + self._default_metadata = NOT_SET + self._studio_override_metadata = NOT_SET + self._project_override_metadata = NOT_SET + + self._ignore_child_changes = False + + # `current_metadata` are still when schema is loaded + # - only metadata stored with dict item are gorup overrides in + # M_OVERRIDEN_KEY + self._current_metadata = {} + self._metadata_are_modified = False + + # Entity must be group or in group + if ( + self.group_item is None + and not self.is_dynamic_item + and not self.is_in_dynamic_item + ): + self.is_group = True + + # Children are stored by key as keys are immutable and are defined by + # schema + self.valid_value_types = (dict, ) + self.children = {} + self.non_gui_children = {} + self.gui_layout = {} + + if self.is_dynamic_item: + self.require_key = False + + self.enum_key = self.schema_data.get("enum_key") + self.enum_label = self.schema_data.get("enum_label") + self.enum_children = self.schema_data.get("enum_children") + + self.enum_entity = None + + self.highlight_content = self.schema_data.get( + "highlight_content", False + ) + self.show_borders = self.schema_data.get("show_borders", True) + + self._add_children() + + @property + def current_enum(self): + """Current value of enum entity. + + This value define what children are used. + """ + if self.enum_entity is None: + return None + return self.enum_entity.value + + def schema_validations(self): + """Validation of schema data.""" + # Enum key must be defined + if self.enum_key is None: + raise EntitySchemaError(self, "Key 'enum_key' is not set.") + + # Validate type of enum children + if not isinstance(self.enum_children, list): + raise EntitySchemaError( + self, "Key 'enum_children' must be a list. Got: {}".format( + str(type(self.enum_children)) + ) + ) + + # Without defined enum children entity has nothing to do + if not self.enum_children: + raise EntitySchemaError(self, ( + "Key 'enum_children' have empty value. Entity can't work" + " without children definitions." + )) + + children_def_keys = [] + for children_def in self.enum_children: + if not isinstance(children_def, dict): + raise EntitySchemaError(( + "Children definition under key 'enum_children' must" + " be a dictionary." + )) + + if "key" not in children_def: + raise EntitySchemaError(( + "Children definition under key 'enum_children' miss" + " 'key' definition." + )) + # We don't validate regex of these keys because they will be stored + # as value at the end. + key = children_def["key"] + if key in children_def_keys: + # TODO this hould probably be different exception? + raise SchemaDuplicatedKeys(self, key) + children_def_keys.append(key) + + # Validate key duplications per each enum item + for children in self.children.values(): + children_keys = set() + children_keys.add(self.enum_key) + for child_entity in children: + if not isinstance(child_entity, BaseItemEntity): + continue + elif child_entity.key not in children_keys: + children_keys.add(child_entity.key) + else: + raise SchemaDuplicatedKeys(self, child_entity.key) + + # Enum key must match key regex + if not KEY_REGEX.match(self.enum_key): + raise InvalidKeySymbols(self.path, self.enum_key) + + # Validate all remaining keys with key regex + for children_by_key in self.non_gui_children.values(): + for key in children_by_key.keys(): + if not KEY_REGEX.match(key): + raise InvalidKeySymbols(self.path, key) + + super(DictConditionalEntity, self).schema_validations() + # Trigger schema validation on children entities + for children in self.children.values(): + for child_obj in children: + child_obj.schema_validations() + + def on_change(self): + """Update metadata on change and pass change to parent.""" + self._update_current_metadata() + + for callback in self.on_change_callbacks: + callback() + self.parent.on_child_change(self) + + def on_child_change(self, child_obj): + """Trigger on change callback if child changes are not ignored.""" + if self._ignore_child_changes: + return + + if ( + child_obj is self.enum_entity + or child_obj in self.children[self.current_enum] + ): + self.on_change() + + def _add_children(self): + """Add children from schema data and repare enum items. + + Each enum item must have defined it's children. None are shared across + all enum items. + + Nice to have: Have ability to have shared keys across all enum items. + + All children are stored by their enum item. + """ + # Skip if are not defined + # - schema validations should raise and exception + if not self.enum_children or not self.enum_key: + return + + valid_enum_items = [] + for item in self.enum_children: + if isinstance(item, dict) and "key" in item: + valid_enum_items.append(item) + + enum_items = [] + for item in valid_enum_items: + item_key = item["key"] + item_label = item.get("label") or item_key + enum_items.append({item_key: item_label}) + + if not enum_items: + return + + # Create Enum child first + enum_key = self.enum_key or "invalid" + enum_schema = { + "type": "enum", + "multiselection": False, + "enum_items": enum_items, + "key": enum_key, + "label": self.enum_label or enum_key + } + + enum_entity = self.create_schema_object(enum_schema, self) + self.enum_entity = enum_entity + + # Create children per each enum item + for item in valid_enum_items: + item_key = item["key"] + # Make sure all keys have set value in these variables + # - key 'children' is optional + self.non_gui_children[item_key] = {} + self.children[item_key] = [] + self.gui_layout[item_key] = [] + + children = item.get("children") or [] + for children_schema in children: + child_obj = self.create_schema_object(children_schema, self) + self.children[item_key].append(child_obj) + self.gui_layout[item_key].append(child_obj) + if isinstance(child_obj, GUIEntity): + continue + + self.non_gui_children[item_key][child_obj.key] = child_obj + + def get_child_path(self, child_obj): + """Get hierarchical path of child entity. + + Child must be entity's direct children. This must be possible to get + for any children even if not from current enum value. + """ + if child_obj is self.enum_entity: + return "/".join([self.path, self.enum_key]) + + result_key = None + for children in self.non_gui_children.values(): + for key, _child_obj in children.items(): + if _child_obj is child_obj: + result_key = key + break + + if result_key is None: + raise ValueError("Didn't found child {}".format(child_obj)) + + return "/".join([self.path, result_key]) + + def _update_current_metadata(self): + current_metadata = {} + for key, child_obj in self.non_gui_children[self.current_enum].items(): + if self._override_state is OverrideState.DEFAULTS: + break + + if not child_obj.is_group: + continue + + if ( + self._override_state is OverrideState.STUDIO + and not child_obj.has_studio_override + ): + continue + + if ( + self._override_state is OverrideState.PROJECT + and not child_obj.has_project_override + ): + continue + + if M_OVERRIDEN_KEY not in current_metadata: + current_metadata[M_OVERRIDEN_KEY] = [] + current_metadata[M_OVERRIDEN_KEY].append(key) + + # Define if current metadata are avaialble for current override state + metadata = NOT_SET + if self._override_state is OverrideState.STUDIO: + metadata = self._studio_override_metadata + + elif self._override_state is OverrideState.PROJECT: + metadata = self._project_override_metadata + + if metadata is NOT_SET: + metadata = {} + + self._metadata_are_modified = current_metadata != metadata + self._current_metadata = current_metadata + + def set_override_state(self, state, ignore_missing_defaults): + # Trigger override state change of root if is not same + if self.root_item.override_state is not state: + self.root_item.set_override_state(state) + return + + # Change has/had override states + self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults + + # Set override state on enum entity first + self.enum_entity.set_override_state(state, ignore_missing_defaults) + + # Set override state on other enum children + # - these must not raise exception about missing defaults + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.set_override_state(state, True) + + self._update_current_metadata() + + @property + def value(self): + output = { + self.enum_key: self.enum_entity.value + } + for key, child_obj in self.non_gui_children[self.current_enum].items(): + output[key] = child_obj.value + return output + + @property + def has_unsaved_changes(self): + if self._metadata_are_modified: + return True + + return self._child_has_unsaved_changes + + @property + def _child_has_unsaved_changes(self): + if self.enum_entity.has_unsaved_changes: + return True + + for child_obj in self.non_gui_children[self.current_enum].values(): + if child_obj.has_unsaved_changes: + return True + return False + + @property + def has_studio_override(self): + return self._child_has_studio_override + + @property + def _child_has_studio_override(self): + if self._override_state >= OverrideState.STUDIO: + if self.enum_entity.has_studio_override: + return True + + for child_obj in self.non_gui_children[self.current_enum].values(): + if child_obj.has_studio_override: + return True + return False + + @property + def has_project_override(self): + return self._child_has_project_override + + @property + def _child_has_project_override(self): + if self._override_state >= OverrideState.PROJECT: + if self.enum_entity.has_project_override: + return True + + for child_obj in self.non_gui_children[self.current_enum].values(): + if child_obj.has_project_override: + return True + return False + + def settings_value(self): + if self._override_state is OverrideState.NOT_DEFINED: + return NOT_SET + + if self._override_state is OverrideState.DEFAULTS: + children_items = [ + (self.enum_key, self.enum_entity) + ] + for item in self.non_gui_children[self.current_enum].items(): + children_items.append(item) + + output = {} + for key, child_obj in children_items: + child_value = child_obj.settings_value() + if not child_obj.is_file and not child_obj.file_item: + for _key, _value in child_value.items(): + new_key = "/".join([key, _key]) + output[new_key] = _value + else: + output[key] = child_value + return output + + if self.is_group: + if self._override_state is OverrideState.STUDIO: + if not self.has_studio_override: + return NOT_SET + elif self._override_state is OverrideState.PROJECT: + if not self.has_project_override: + return NOT_SET + + output = {} + children_items = [ + (self.enum_key, self.enum_entity) + ] + for item in self.non_gui_children[self.current_enum].items(): + children_items.append(item) + + for key, child_obj in children_items: + value = child_obj.settings_value() + if value is not NOT_SET: + output[key] = value + + if not output: + return NOT_SET + + output.update(self._current_metadata) + return output + + def _prepare_value(self, value): + if value is NOT_SET or self.enum_key not in value: + return NOT_SET, NOT_SET + + enum_value = value.get(self.enum_key) + if enum_value not in self.non_gui_children: + return NOT_SET, NOT_SET + + # Create copy of value before poping values + value = copy.deepcopy(value) + metadata = {} + for key in METADATA_KEYS: + if key in value: + metadata[key] = value.pop(key) + + enum_value = value.get(self.enum_key) + + old_metadata = metadata.get(M_OVERRIDEN_KEY) + if old_metadata: + old_metadata_set = set(old_metadata) + new_metadata = [] + non_gui_children = self.non_gui_children[enum_value] + for key in non_gui_children.keys(): + if key in old_metadata: + new_metadata.append(key) + old_metadata_set.remove(key) + + for key in old_metadata_set: + new_metadata.append(key) + metadata[M_OVERRIDEN_KEY] = new_metadata + + return value, metadata + + def update_default_value(self, value): + """Update default values. + + Not an api method, should be called by parent. + """ + value = self._check_update_value(value, "default") + self.has_default_value = value is not NOT_SET + # TODO add value validation + value, metadata = self._prepare_value(value) + self._default_metadata = metadata + + if value is NOT_SET: + self.enum_entity.update_default_value(value) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.update_default_value(value) + return + + value_keys = set(value.keys()) + enum_value = value[self.enum_key] + expected_keys = set(self.non_gui_children[enum_value].keys()) + expected_keys.add(self.enum_key) + unknown_keys = value_keys - expected_keys + if unknown_keys: + self.log.warning( + "{} Unknown keys in default values: {}".format( + self.path, + ", ".join("\"{}\"".format(key) for key in unknown_keys) + ) + ) + + self.enum_entity.update_default_value(enum_value) + for children_by_key in self.non_gui_children.values(): + for key, child_obj in children_by_key.items(): + child_value = value.get(key, NOT_SET) + child_obj.update_default_value(child_value) + + def update_studio_value(self, value): + """Update studio override values. + + Not an api method, should be called by parent. + """ + value = self._check_update_value(value, "studio override") + value, metadata = self._prepare_value(value) + self._studio_override_metadata = metadata + self.had_studio_override = metadata is not NOT_SET + + if value is NOT_SET: + self.enum_entity.update_studio_value(value) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.update_studio_value(value) + return + + value_keys = set(value.keys()) + enum_value = value[self.enum_key] + expected_keys = set(self.non_gui_children[enum_value]) + expected_keys.add(self.enum_key) + unknown_keys = value_keys - expected_keys + if unknown_keys: + self.log.warning( + "{} Unknown keys in studio overrides: {}".format( + self.path, + ", ".join("\"{}\"".format(key) for key in unknown_keys) + ) + ) + + self.enum_entity.update_studio_value(enum_value) + for children_by_key in self.non_gui_children.values(): + for key, child_obj in children_by_key.items(): + child_value = value.get(key, NOT_SET) + child_obj.update_studio_value(child_value) + + def update_project_value(self, value): + """Update project override values. + + Not an api method, should be called by parent. + """ + value = self._check_update_value(value, "project override") + value, metadata = self._prepare_value(value) + self._project_override_metadata = metadata + self.had_project_override = metadata is not NOT_SET + + if value is NOT_SET: + self.enum_entity.update_project_value(value) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.update_project_value(value) + return + + value_keys = set(value.keys()) + enum_value = value[self.enum_key] + expected_keys = set(self.non_gui_children[enum_value]) + expected_keys.add(self.enum_key) + unknown_keys = value_keys - expected_keys + if unknown_keys: + self.log.warning( + "{} Unknown keys in project overrides: {}".format( + self.path, + ", ".join("\"{}\"".format(key) for key in unknown_keys) + ) + ) + + self.enum_entity.update_project_value(enum_value) + for children_by_key in self.non_gui_children.values(): + for key, child_obj in children_by_key.items(): + child_value = value.get(key, NOT_SET) + child_obj.update_project_value(child_value) + + def _discard_changes(self, on_change_trigger): + self._ignore_child_changes = True + + self.enum_entity.discard_changes(on_change_trigger) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.discard_changes(on_change_trigger) + + self._ignore_child_changes = False + + def _add_to_studio_default(self, on_change_trigger): + self._ignore_child_changes = True + + self.enum_entity.add_to_studio_default(on_change_trigger) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.add_to_studio_default(on_change_trigger) + + self._ignore_child_changes = False + + self._update_current_metadata() + + self.parent.on_child_change(self) + + def _remove_from_studio_default(self, on_change_trigger): + self._ignore_child_changes = True + + self.enum_entity.remove_from_studio_default(on_change_trigger) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.remove_from_studio_default(on_change_trigger) + + self._ignore_child_changes = False + + def _add_to_project_override(self, on_change_trigger): + self._ignore_child_changes = True + + self.enum_entity.add_to_project_override(on_change_trigger) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.add_to_project_override(on_change_trigger) + + self._ignore_child_changes = False + + self._update_current_metadata() + + self.parent.on_child_change(self) + + def _remove_from_project_override(self, on_change_trigger): + if self._override_state is not OverrideState.PROJECT: + return + + self._ignore_child_changes = True + + self.enum_entity.remove_from_project_override(on_change_trigger) + for children_by_key in self.non_gui_children.values(): + for child_obj in children_by_key.values(): + child_obj.remove_from_project_override(on_change_trigger) + + self._ignore_child_changes = False + + def reset_callbacks(self): + """Reset registered callbacks on entity and children.""" + super(DictConditionalEntity, self).reset_callbacks() + for children in self.children.values(): + for child_entity in children: + child_entity.reset_callbacks() diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index 052bbda4d0..bde5304787 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -1,4 +1,5 @@ import copy +import collections from .lib import ( WRAPPER_TYPES, @@ -138,7 +139,16 @@ class DictImmutableKeysEntity(ItemEntity): method when handling gui wrappers. """ added_children = [] - for children_schema in schema_data["children"]: + children_deque = collections.deque() + for _children_schema in schema_data["children"]: + children_schemas = self.schema_hub.resolve_schema_data( + _children_schema + ) + for children_schema in children_schemas: + children_deque.append(children_schema) + + while children_deque: + children_schema = children_deque.popleft() if children_schema["type"] in WRAPPER_TYPES: _children_schema = copy.deepcopy(children_schema) wrapper_children = self._add_children( @@ -248,7 +258,7 @@ class DictImmutableKeysEntity(ItemEntity): self._metadata_are_modified = current_metadata != metadata self._current_metadata = current_metadata - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) @@ -256,9 +266,10 @@ class DictImmutableKeysEntity(ItemEntity): # Change has/had override states self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults for child_obj in self.non_gui_children.values(): - child_obj.set_override_state(state) + child_obj.set_override_state(state, ignore_missing_defaults) self._update_current_metadata() diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index 3c2645e3e5..c3df935269 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -154,7 +154,9 @@ class DictMutableKeysEntity(EndpointEntity): def add_key(self, key): new_child = self._add_key(key) - new_child.set_override_state(self._override_state) + new_child.set_override_state( + self._override_state, self._ignore_missing_defaults + ) self.on_change() return new_child @@ -320,7 +322,7 @@ class DictMutableKeysEntity(EndpointEntity): def _metadata_for_current_state(self): return self._get_metadata_for_state(self._override_state) - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) @@ -328,14 +330,22 @@ class DictMutableKeysEntity(EndpointEntity): # TODO change metadata self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults + # Ignore if is dynamic item and use default in that case if not self.is_dynamic_item and not self.is_in_dynamic_item: if state > OverrideState.DEFAULTS: - if not self.has_default_value: + if ( + not self.has_default_value + and not ignore_missing_defaults + ): raise DefaultsNotDefined(self) elif state > OverrideState.STUDIO: - if not self.had_studio_override: + if ( + not self.had_studio_override + and not ignore_missing_defaults + ): raise StudioDefaultsNotDefined(self) if state is OverrideState.STUDIO: @@ -426,7 +436,7 @@ class DictMutableKeysEntity(EndpointEntity): if label: children_label_by_id[child_entity.id] = label - child_entity.set_override_state(state) + child_entity.set_override_state(state, ignore_missing_defaults) self.children_label_by_id = children_label_by_id @@ -610,7 +620,9 @@ class DictMutableKeysEntity(EndpointEntity): if not self._can_discard_changes: return - self.set_override_state(self._override_state) + self.set_override_state( + self._override_state, self._ignore_missing_defaults + ) on_change_trigger.append(self.on_change) def _add_to_studio_default(self, _on_change_trigger): @@ -645,7 +657,9 @@ class DictMutableKeysEntity(EndpointEntity): if label: children_label_by_id[child_entity.id] = label - child_entity.set_override_state(self._override_state) + child_entity.set_override_state( + self._override_state, self._ignore_missing_defaults + ) self.children_label_by_id = children_label_by_id @@ -694,7 +708,9 @@ class DictMutableKeysEntity(EndpointEntity): if label: children_label_by_id[child_entity.id] = label - child_entity.set_override_state(self._override_state) + child_entity.set_override_state( + self._override_state, self._ignore_missing_defaults + ) self.children_label_by_id = children_label_by_id diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 0b0575a255..d306eca7ef 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -101,6 +101,79 @@ class EnumEntity(BaseEnumEntity): super(EnumEntity, self).schema_validations() +class HostsEnumEntity(BaseEnumEntity): + """Enumeration of host names. + + Enum items are hardcoded in definition of the entity. + + Hosts enum can have defined empty value as valid option which is + represented by empty string. Schema key to set this option is + `use_empty_value` (true/false). And to set label of empty value set + `empty_label` (string). + + Enum can have single and multiselection. + + NOTE: + Host name is not the same as application name. Host name defines + implementation instead of application name. + """ + schema_types = ["hosts-enum"] + + def _item_initalization(self): + self.multiselection = self.schema_data.get("multiselection", True) + self.use_empty_value = self.schema_data.get( + "use_empty_value", not self.multiselection + ) + custom_labels = self.schema_data.get("custom_labels") or {} + + host_names = [ + "aftereffects", + "blender", + "celaction", + "fusion", + "harmony", + "hiero", + "houdini", + "maya", + "nuke", + "photoshop", + "resolve", + "tvpaint", + "unreal", + "standalonepublisher" + ] + if self.use_empty_value: + host_names.insert(0, "") + # Add default label for empty value if not available + if "" not in custom_labels: + custom_labels[""] = "< without host >" + + # These are hardcoded there is not list of available host in OpenPype + enum_items = [] + valid_keys = set() + for key in host_names: + label = custom_labels.get(key, key) + valid_keys.add(key) + enum_items.append({key: label}) + + self.enum_items = enum_items + self.valid_keys = valid_keys + + if self.multiselection: + self.valid_value_types = (list, ) + self.value_on_not_set = [] + else: + for key in valid_keys: + if self.value_on_not_set is NOT_SET: + self.value_on_not_set = key + break + + self.valid_value_types = (STRING_TYPE, ) + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + class AppsEnumEntity(BaseEnumEntity): schema_types = ["apps-enum"] diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 295333eb60..6952529963 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -1,5 +1,6 @@ import re import copy +import json from abc import abstractmethod from .base_entity import ItemEntity @@ -217,21 +218,28 @@ class InputEntity(EndpointEntity): return True return False - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) return self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults # Ignore if is dynamic item and use default in that case if not self.is_dynamic_item and not self.is_in_dynamic_item: if state > OverrideState.DEFAULTS: - if not self.has_default_value: + if ( + not self.has_default_value + and not ignore_missing_defaults + ): raise DefaultsNotDefined(self) elif state > OverrideState.STUDIO: - if not self.had_studio_override: + if ( + not self.had_studio_override + and not ignore_missing_defaults + ): raise StudioDefaultsNotDefined(self) if state is OverrideState.STUDIO: @@ -433,6 +441,7 @@ class RawJsonEntity(InputEntity): def _item_initalization(self): # Schema must define if valid value is dict or list + store_as_string = self.schema_data.get("store_as_string", False) is_list = self.schema_data.get("is_list", False) if is_list: valid_value_types = (list, ) @@ -441,6 +450,8 @@ class RawJsonEntity(InputEntity): valid_value_types = (dict, ) value_on_not_set = {} + self.store_as_string = store_as_string + self._is_list = is_list self.valid_value_types = valid_value_types self.value_on_not_set = value_on_not_set @@ -484,6 +495,23 @@ class RawJsonEntity(InputEntity): result = self.metadata != self._metadata_for_current_state() return result + def schema_validations(self): + if self.store_as_string and self.is_env_group: + reason = ( + "RawJson entity can't store environment group metadata" + " as string." + ) + raise EntitySchemaError(self, reason) + super(RawJsonEntity, self).schema_validations() + + def _convert_to_valid_type(self, value): + if isinstance(value, STRING_TYPE): + try: + return json.loads(value) + except Exception: + pass + return super(RawJsonEntity, self)._convert_to_valid_type(value) + def _metadata_for_current_state(self): if ( self._override_state is OverrideState.PROJECT @@ -503,6 +531,9 @@ class RawJsonEntity(InputEntity): value = super(RawJsonEntity, self)._settings_value() if self.is_env_group and isinstance(value, dict): value.update(self.metadata) + + if self.store_as_string: + return json.dumps(value) return value def _prepare_value(self, value): diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index 48336080b6..7e84f8c801 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -150,14 +150,15 @@ class PathEntity(ItemEntity): def value(self): return self.child_obj.value - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) return self._override_state = state - self.child_obj.set_override_state(state) + self._ignore_missing_defaults = ignore_missing_defaults + self.child_obj.set_override_state(state, ignore_missing_defaults) def update_default_value(self, value): self.child_obj.update_default_value(value) @@ -344,25 +345,32 @@ class ListStrictEntity(ItemEntity): return True return False - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) return self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults # Ignore if is dynamic item and use default in that case if not self.is_dynamic_item and not self.is_in_dynamic_item: if state > OverrideState.DEFAULTS: - if not self.has_default_value: + if ( + not self.has_default_value + and not ignore_missing_defaults + ): raise DefaultsNotDefined(self) elif state > OverrideState.STUDIO: - if not self.had_studio_override: + if ( + not self.had_studio_override + and not ignore_missing_defaults + ): raise StudioDefaultsNotDefined(self) for child_entity in self.children: - child_entity.set_override_state(state) + child_entity.set_override_state(state, ignore_missing_defaults) self.initial_value = self.settings_value() diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 05f4ea64f8..e58281644a 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -2,6 +2,7 @@ import os import re import json import copy +import inspect from .exceptions import ( SchemaTemplateMissingKeys, @@ -25,335 +26,6 @@ TEMPLATE_METADATA_KEYS = ( template_key_pattern = re.compile(r"(\{.*?[^{0]*\})") -def _pop_metadata_item(template): - found_idx = None - for idx, item in enumerate(template): - if not isinstance(item, dict): - continue - - for key in TEMPLATE_METADATA_KEYS: - if key in item: - found_idx = idx - break - - if found_idx is not None: - break - - metadata_item = {} - if found_idx is not None: - metadata_item = template.pop(found_idx) - return metadata_item - - -def _fill_schema_template_data( - template, template_data, skip_paths, required_keys=None, missing_keys=None -): - first = False - if required_keys is None: - first = True - - if "skip_paths" in template_data: - skip_paths = template_data["skip_paths"] - if not isinstance(skip_paths, list): - skip_paths = [skip_paths] - - # Cleanup skip paths (skip empty values) - skip_paths = [path for path in skip_paths if path] - - required_keys = set() - missing_keys = set() - - # Copy template data as content may change - template = copy.deepcopy(template) - - # Get metadata item from template - metadata_item = _pop_metadata_item(template) - - # Check for default values for template data - default_values = metadata_item.get(DEFAULT_VALUES_KEY) or {} - - for key, value in default_values.items(): - if key not in template_data: - template_data[key] = value - - if not template: - output = template - - elif isinstance(template, list): - # Store paths by first part if path - # - None value says that whole key should be skipped - skip_paths_by_first_key = {} - for path in skip_paths: - parts = path.split("/") - key = parts.pop(0) - if key not in skip_paths_by_first_key: - skip_paths_by_first_key[key] = [] - - value = "/".join(parts) - skip_paths_by_first_key[key].append(value or None) - - output = [] - for item in template: - # Get skip paths for children item - _skip_paths = [] - if not isinstance(item, dict): - pass - - elif item.get("type") in WRAPPER_TYPES: - _skip_paths = copy.deepcopy(skip_paths) - - elif skip_paths_by_first_key: - # Check if this item should be skipped - key = item.get("key") - if key and key in skip_paths_by_first_key: - _skip_paths = skip_paths_by_first_key[key] - # Skip whole item if None is in skip paths value - if None in _skip_paths: - continue - - output_item = _fill_schema_template_data( - item, template_data, _skip_paths, required_keys, missing_keys - ) - if output_item: - output.append(output_item) - - elif isinstance(template, dict): - output = {} - for key, value in template.items(): - output[key] = _fill_schema_template_data( - value, template_data, skip_paths, required_keys, missing_keys - ) - if output.get("type") in WRAPPER_TYPES and not output.get("children"): - return {} - - elif isinstance(template, STRING_TYPE): - # TODO find much better way how to handle filling template data - template = template.replace("{{", "__dbcb__").replace("}}", "__decb__") - for replacement_string in template_key_pattern.findall(template): - key = str(replacement_string[1:-1]) - required_keys.add(key) - if key not in template_data: - missing_keys.add(key) - continue - - value = template_data[key] - if replacement_string == template: - # Replace the value with value from templates data - # - with this is possible to set value with different type - template = value - else: - # Only replace the key in string - template = template.replace(replacement_string, value) - - output = template.replace("__dbcb__", "{").replace("__decb__", "}") - - else: - output = template - - if first and missing_keys: - raise SchemaTemplateMissingKeys(missing_keys, required_keys) - - return output - - -def _fill_schema_template(child_data, schema_collection, schema_templates): - template_name = child_data["name"] - template = schema_templates.get(template_name) - if template is None: - if template_name in schema_collection: - raise KeyError(( - "Schema \"{}\" is used as `schema_template`" - ).format(template_name)) - raise KeyError("Schema template \"{}\" was not found".format( - template_name - )) - - # Default value must be dictionary (NOT list) - # - empty list would not add any item if `template_data` are not filled - template_data = child_data.get("template_data") or {} - if isinstance(template_data, dict): - template_data = [template_data] - - skip_paths = child_data.get("skip_paths") or [] - if isinstance(skip_paths, STRING_TYPE): - skip_paths = [skip_paths] - - output = [] - for single_template_data in template_data: - try: - filled_child = _fill_schema_template_data( - template, single_template_data, skip_paths - ) - - except SchemaTemplateMissingKeys as exc: - raise SchemaTemplateMissingKeys( - exc.missing_keys, exc.required_keys, template_name - ) - - for item in filled_child: - filled_item = _fill_inner_schemas( - item, schema_collection, schema_templates - ) - if filled_item["type"] == "schema_template": - output.extend(_fill_schema_template( - filled_item, schema_collection, schema_templates - )) - else: - output.append(filled_item) - return output - - -def _fill_inner_schemas(schema_data, schema_collection, schema_templates): - if schema_data["type"] == "schema": - raise ValueError("First item in schema data can't be schema.") - - children_key = "children" - object_type_key = "object_type" - for item_key in (children_key, object_type_key): - children = schema_data.get(item_key) - if not children: - continue - - if object_type_key == item_key: - if not isinstance(children, dict): - continue - children = [children] - - new_children = [] - for child in children: - child_type = child["type"] - if child_type == "schema": - schema_name = child["name"] - if schema_name not in schema_collection: - if schema_name in schema_templates: - raise KeyError(( - "Schema template \"{}\" is used as `schema`" - ).format(schema_name)) - raise KeyError( - "Schema \"{}\" was not found".format(schema_name) - ) - - filled_child = _fill_inner_schemas( - schema_collection[schema_name], - schema_collection, - schema_templates - ) - - elif child_type in ("template", "schema_template"): - for filled_child in _fill_schema_template( - child, schema_collection, schema_templates - ): - new_children.append(filled_child) - continue - - else: - filled_child = _fill_inner_schemas( - child, schema_collection, schema_templates - ) - - new_children.append(filled_child) - - if item_key == object_type_key: - if len(new_children) != 1: - raise KeyError(( - "Failed to fill object type with type: {} | name {}" - ).format( - child_type, str(child.get("name")) - )) - new_children = new_children[0] - - schema_data[item_key] = new_children - return schema_data - - -# TODO reimplement logic inside entities -def validate_environment_groups_uniquenes( - schema_data, env_groups=None, keys=None -): - is_first = False - if env_groups is None: - is_first = True - env_groups = {} - keys = [] - - my_keys = copy.deepcopy(keys) - key = schema_data.get("key") - if key: - my_keys.append(key) - - env_group_key = schema_data.get("env_group_key") - if env_group_key: - if env_group_key not in env_groups: - env_groups[env_group_key] = [] - env_groups[env_group_key].append("/".join(my_keys)) - - children = schema_data.get("children") - if not children: - return - - for child in children: - validate_environment_groups_uniquenes( - child, env_groups, copy.deepcopy(my_keys) - ) - - if is_first: - invalid = {} - for env_group_key, key_paths in env_groups.items(): - if len(key_paths) > 1: - invalid[env_group_key] = key_paths - - if invalid: - raise SchemaDuplicatedEnvGroupKeys(invalid) - - -def validate_schema(schema_data): - validate_environment_groups_uniquenes(schema_data) - - -def get_gui_schema(subfolder, main_schema_name): - dirpath = os.path.join( - os.path.dirname(__file__), - "schemas", - subfolder - ) - loaded_schemas = {} - loaded_schema_templates = {} - for root, _, filenames in os.walk(dirpath): - for filename in filenames: - basename, ext = os.path.splitext(filename) - if ext != ".json": - continue - - filepath = os.path.join(root, filename) - with open(filepath, "r") as json_stream: - try: - schema_data = json.load(json_stream) - except Exception as exc: - raise ValueError(( - "Unable to parse JSON file {}\n{}" - ).format(filepath, str(exc))) - if isinstance(schema_data, list): - loaded_schema_templates[basename] = schema_data - else: - loaded_schemas[basename] = schema_data - - main_schema = _fill_inner_schemas( - loaded_schemas[main_schema_name], - loaded_schemas, - loaded_schema_templates - ) - validate_schema(main_schema) - return main_schema - - -def get_studio_settings_schema(): - return get_gui_schema("system_schema", "schema_main") - - -def get_project_settings_schema(): - return get_gui_schema("projects_schema", "schema_main") - - class OverrideStateItem: """Object used as item for `OverrideState` enum. @@ -426,3 +98,506 @@ class OverrideState: DEFAULTS = OverrideStateItem(0, "Defaults") STUDIO = OverrideStateItem(1, "Studio overrides") PROJECT = OverrideStateItem(2, "Project Overrides") + + +class SchemasHub: + def __init__(self, schema_subfolder, reset=True): + self._schema_subfolder = schema_subfolder + + self._loaded_types = {} + self._gui_types = tuple() + + self._crashed_on_load = {} + self._loaded_templates = {} + self._loaded_schemas = {} + + # It doesn't make sence to reload types on each reset as they can't be + # changed + self._load_types() + + # Trigger reset + if reset: + self.reset() + + def reset(self): + self._load_schemas() + + @property + def gui_types(self): + return self._gui_types + + def get_schema(self, schema_name): + """Get schema definition data by it's name. + + Returns: + dict: Copy of schema loaded from json files. + + Raises: + KeyError: When schema name is stored in loaded templates or json + file was not possible to parse or when schema name was not + found. + """ + if schema_name not in self._loaded_schemas: + if schema_name in self._loaded_templates: + raise KeyError(( + "Template \"{}\" is used as `schema`" + ).format(schema_name)) + + elif schema_name in self._crashed_on_load: + crashed_item = self._crashed_on_load[schema_name] + raise KeyError( + "Unable to parse schema file \"{}\". {}".format( + crashed_item["filepath"], crashed_item["message"] + ) + ) + + raise KeyError( + "Schema \"{}\" was not found".format(schema_name) + ) + return copy.deepcopy(self._loaded_schemas[schema_name]) + + def get_template(self, template_name): + """Get template definition data by it's name. + + Returns: + list: Copy of template items loaded from json files. + + Raises: + KeyError: When template name is stored in loaded schemas or json + file was not possible to parse or when template name was not + found. + """ + if template_name not in self._loaded_templates: + if template_name in self._loaded_schemas: + raise KeyError(( + "Schema \"{}\" is used as `template`" + ).format(template_name)) + + elif template_name in self._crashed_on_load: + crashed_item = self._crashed_on_load[template_name] + raise KeyError( + "Unable to parse template file \"{}\". {}".format( + crashed_item["filepath"], crashed_item["message"] + ) + ) + + raise KeyError( + "Template \"{}\" was not found".format(template_name) + ) + return copy.deepcopy(self._loaded_templates[template_name]) + + def resolve_schema_data(self, schema_data): + """Resolve single item schema data as few types can be expanded. + + This is mainly for 'schema' and 'template' types. Type 'schema' does + not have entity representation and 'template' may contain more than one + output schemas. + + In other cases is retuned passed schema item in list. + + Goal is to have schema and template resolving at one place. + + Returns: + list: Resolved schema data. + """ + schema_type = schema_data["type"] + if schema_type not in ("schema", "template", "schema_template"): + return [schema_data] + + if schema_type == "schema": + return self.resolve_schema_data( + self.get_schema(schema_data["name"]) + ) + + template_name = schema_data["name"] + template_def = self.get_template(template_name) + + filled_template = self._fill_template( + schema_data, template_def + ) + return filled_template + + def create_schema_object(self, schema_data, *args, **kwargs): + """Create entity for passed schema data. + + Args: + schema_data(dict): Schema definition of settings entity. + + Returns: + ItemEntity: Created entity for passed schema data item. + + Raises: + ValueError: When 'schema', 'template' or any of wrapper types are + passed. + KeyError: When type of passed schema is not known. + """ + schema_type = schema_data["type"] + if schema_type in ("schema", "template", "schema_template"): + raise ValueError( + "Got unresolved schema data of type \"{}\"".format(schema_type) + ) + + if schema_type in WRAPPER_TYPES: + raise ValueError(( + "Function `create_schema_object` can't create entities" + " of any wrapper type. Got type: \"{}\"" + ).format(schema_type)) + + klass = self._loaded_types.get(schema_type) + if not klass: + raise KeyError("Unknown type \"{}\"".format(schema_type)) + + return klass(schema_data, *args, **kwargs) + + def _load_types(self): + """Prepare entity types for cretion of their objects. + + Currently all classes in `openpype.settings.entities` that inherited + from `BaseEntity` are stored as loaded types. GUI types are stored to + separated attribute to not mess up api access of entities. + + TODOs: + Add more dynamic way how to add custom types from anywhere and + better handling of abstract classes. Skipping them is dangerous. + """ + + from openpype.settings import entities + + # Define known abstract classes + known_abstract_classes = ( + entities.BaseEntity, + entities.BaseItemEntity, + entities.ItemEntity, + entities.EndpointEntity, + entities.InputEntity, + entities.BaseEnumEntity + ) + + self._loaded_types = {} + _gui_types = [] + for attr in dir(entities): + item = getattr(entities, attr) + # Filter classes + if not inspect.isclass(item): + continue + + # Skip classes that do not inherit from BaseEntity + if not issubclass(item, entities.BaseEntity): + continue + + # Skip class that is abstract by design + if item in known_abstract_classes: + continue + + if inspect.isabstract(item): + # Create an object to get crash and get traceback + item() + + # Backwards compatibility + # Single entity may have multiple schema types + for schema_type in item.schema_types: + self._loaded_types[schema_type] = item + + if item.gui_type: + _gui_types.append(item) + self._gui_types = tuple(_gui_types) + + def _load_schemas(self): + """Load schema definitions from json files.""" + + # Refresh all affecting variables + self._crashed_on_load = {} + self._loaded_templates = {} + self._loaded_schemas = {} + + dirpath = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "schemas", + self._schema_subfolder + ) + loaded_schemas = {} + loaded_templates = {} + for root, _, filenames in os.walk(dirpath): + for filename in filenames: + basename, ext = os.path.splitext(filename) + if ext != ".json": + continue + + filepath = os.path.join(root, filename) + with open(filepath, "r") as json_stream: + try: + schema_data = json.load(json_stream) + except Exception as exc: + msg = str(exc) + print("Unable to parse JSON file {}\n{}".format( + filepath, msg + )) + self._crashed_on_load[basename] = { + "filepath": filepath, + "message": msg + } + continue + + if basename in self._crashed_on_load: + crashed_item = self._crashed_on_load[basename] + raise KeyError(( + "Duplicated filename \"{}\"." + " One of them crashed on load \"{}\" {}" + ).format( + filename, + crashed_item["filepath"], + crashed_item["message"] + )) + + if isinstance(schema_data, list): + if basename in loaded_templates: + raise KeyError( + "Duplicated template filename \"{}\"".format( + filename + ) + ) + loaded_templates[basename] = schema_data + else: + if basename in loaded_schemas: + raise KeyError( + "Duplicated schema filename \"{}\"".format( + filename + ) + ) + loaded_schemas[basename] = schema_data + + self._loaded_templates = loaded_templates + self._loaded_schemas = loaded_schemas + + def _fill_template(self, child_data, template_def): + """Fill template based on schema definition and template definition. + + Based on `child_data` is `template_def` modified and result is + returned. + + Template definition may have defined data to fill which + should be filled with data from child data. + + Child data may contain more than one output definition of an template. + + Child data can define paths to skip. Path is full path of an item + which won't be returned. + + TODO: + Be able to handle wrapper items here. + + Args: + child_data(dict): Schema data of template item. + template_def(dict): Template definition that will be filled with + child_data. + + Returns: + list: Resolved template always returns list of schemas. + """ + template_name = child_data["name"] + + # Default value must be dictionary (NOT list) + # - empty list would not add any item if `template_data` are not filled + template_data = child_data.get("template_data") or {} + if isinstance(template_data, dict): + template_data = [template_data] + + skip_paths = child_data.get("skip_paths") or [] + if isinstance(skip_paths, STRING_TYPE): + skip_paths = [skip_paths] + + output = [] + for single_template_data in template_data: + try: + output.extend(self._fill_template_data( + template_def, single_template_data, skip_paths + )) + + except SchemaTemplateMissingKeys as exc: + raise SchemaTemplateMissingKeys( + exc.missing_keys, exc.required_keys, template_name + ) + return output + + def _fill_template_data( + self, + template, + template_data, + skip_paths, + required_keys=None, + missing_keys=None + ): + """Fill template values with data from schema data. + + Template has more abilities than schemas. It is expected that template + will be used at multiple places (but may not). Schema represents + exactly one entity and it's children but template may represent more + entities. + + Template can have "keys to fill" from their definition. Some key may be + required and some may be optional because template has their default + values defined. + + Template also have ability to "skip paths" which means to skip entities + from it's content. A template can be used across multiple places with + different requirements. + + Raises: + SchemaTemplateMissingKeys: When fill data do not contain all + required keys for template. + """ + first = False + if required_keys is None: + first = True + + if "skip_paths" in template_data: + skip_paths = template_data["skip_paths"] + if not isinstance(skip_paths, list): + skip_paths = [skip_paths] + + # Cleanup skip paths (skip empty values) + skip_paths = [path for path in skip_paths if path] + + required_keys = set() + missing_keys = set() + + # Copy template data as content may change + template = copy.deepcopy(template) + + # Get metadata item from template + metadata_item = self._pop_metadata_item(template) + + # Check for default values for template data + default_values = metadata_item.get(DEFAULT_VALUES_KEY) or {} + + for key, value in default_values.items(): + if key not in template_data: + template_data[key] = value + + if not template: + output = template + + elif isinstance(template, list): + # Store paths by first part if path + # - None value says that whole key should be skipped + skip_paths_by_first_key = {} + for path in skip_paths: + parts = path.split("/") + key = parts.pop(0) + if key not in skip_paths_by_first_key: + skip_paths_by_first_key[key] = [] + + value = "/".join(parts) + skip_paths_by_first_key[key].append(value or None) + + output = [] + for item in template: + # Get skip paths for children item + _skip_paths = [] + if not isinstance(item, dict): + pass + + elif item.get("type") in WRAPPER_TYPES: + _skip_paths = copy.deepcopy(skip_paths) + + elif skip_paths_by_first_key: + # Check if this item should be skipped + key = item.get("key") + if key and key in skip_paths_by_first_key: + _skip_paths = skip_paths_by_first_key[key] + # Skip whole item if None is in skip paths value + if None in _skip_paths: + continue + + output_item = self._fill_template_data( + item, + template_data, + _skip_paths, + required_keys, + missing_keys + ) + if output_item: + output.append(output_item) + + elif isinstance(template, dict): + output = {} + for key, value in template.items(): + output[key] = self._fill_template_data( + value, + template_data, + skip_paths, + required_keys, + missing_keys + ) + if ( + output.get("type") in WRAPPER_TYPES + and not output.get("children") + ): + return {} + + elif isinstance(template, STRING_TYPE): + # TODO find much better way how to handle filling template data + template = ( + template + .replace("{{", "__dbcb__") + .replace("}}", "__decb__") + ) + full_replacement = False + for replacement_string in template_key_pattern.findall(template): + key = str(replacement_string[1:-1]) + required_keys.add(key) + if key not in template_data: + missing_keys.add(key) + continue + + value = template_data[key] + if replacement_string == template: + # Replace the value with value from templates data + # - with this is possible to set value with different type + template = value + full_replacement = True + else: + # Only replace the key in string + template = template.replace(replacement_string, value) + + if not full_replacement: + output = ( + template + .replace("__dbcb__", "{") + .replace("__decb__", "}") + ) + else: + output = template + + else: + output = template + + if first and missing_keys: + raise SchemaTemplateMissingKeys(missing_keys, required_keys) + + return output + + def _pop_metadata_item(self, template_def): + """Pop template metadata from template definition. + + Template metadata may define default values if are not passed from + schema data. + """ + + found_idx = None + for idx, item in enumerate(template_def): + if not isinstance(item, dict): + continue + + for key in TEMPLATE_METADATA_KEYS: + if key in item: + found_idx = idx + break + + if found_idx is not None: + break + + metadata_item = {} + if found_idx is not None: + metadata_item = template_def.pop(found_idx) + return metadata_item diff --git a/openpype/settings/entities/list_entity.py b/openpype/settings/entities/list_entity.py index 4b3f7a2659..64bbad28a7 100644 --- a/openpype/settings/entities/list_entity.py +++ b/openpype/settings/entities/list_entity.py @@ -102,7 +102,9 @@ class ListEntity(EndpointEntity): def add_new_item(self, idx=None, trigger_change=True): child_obj = self._add_new_item(idx) - child_obj.set_override_state(self._override_state) + child_obj.set_override_state( + self._override_state, self._ignore_missing_defaults + ) if trigger_change: self.on_child_change(child_obj) @@ -205,13 +207,14 @@ class ListEntity(EndpointEntity): self._has_project_override = True self.on_change() - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults): # Trigger override state change of root if is not same if self.root_item.override_state is not state: self.root_item.set_override_state(state) return self._override_state = state + self._ignore_missing_defaults = ignore_missing_defaults while self.children: self.children.pop(0) @@ -219,11 +222,17 @@ class ListEntity(EndpointEntity): # Ignore if is dynamic item and use default in that case if not self.is_dynamic_item and not self.is_in_dynamic_item: if state > OverrideState.DEFAULTS: - if not self.has_default_value: + if ( + not self.has_default_value + and not ignore_missing_defaults + ): raise DefaultsNotDefined(self) elif state > OverrideState.STUDIO: - if not self.had_studio_override: + if ( + not self.had_studio_override + and not ignore_missing_defaults + ): raise StudioDefaultsNotDefined(self) value = NOT_SET @@ -257,7 +266,9 @@ class ListEntity(EndpointEntity): child_obj.update_studio_value(item) for child_obj in self.children: - child_obj.set_override_state(self._override_state) + child_obj.set_override_state( + self._override_state, ignore_missing_defaults + ) self.initial_value = self.settings_value() @@ -395,7 +406,9 @@ class ListEntity(EndpointEntity): if self.had_studio_override: child_obj.update_studio_value(item) - child_obj.set_override_state(self._override_state) + child_obj.set_override_state( + self._override_state, self._ignore_missing_defaults + ) if self._override_state >= OverrideState.PROJECT: self._has_project_override = self.had_project_override @@ -427,7 +440,9 @@ class ListEntity(EndpointEntity): for item in value: child_obj = self._add_new_item() child_obj.update_default_value(item) - child_obj.set_override_state(self._override_state) + child_obj.set_override_state( + self._override_state, self._ignore_missing_defaults + ) self._ignore_child_changes = False @@ -460,7 +475,10 @@ class ListEntity(EndpointEntity): child_obj.update_default_value(item) if self._has_studio_override: child_obj.update_studio_value(item) - child_obj.set_override_state(self._override_state) + child_obj.set_override_state( + self._override_state, + self._ignore_missing_defaults + ) self._ignore_child_changes = False diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 401d3980c9..00677480e8 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -1,7 +1,7 @@ import os import json import copy -import inspect +import collections from abc import abstractmethod @@ -10,8 +10,7 @@ from .lib import ( NOT_SET, WRAPPER_TYPES, OverrideState, - get_studio_settings_schema, - get_project_settings_schema + SchemasHub ) from .exceptions import ( SchemaError, @@ -53,7 +52,12 @@ class RootEntity(BaseItemEntity): """ schema_types = ["root"] - def __init__(self, schema_data, reset): + def __init__(self, schema_hub, reset, main_schema_name=None): + self.schema_hub = schema_hub + if not main_schema_name: + main_schema_name = "schema_main" + schema_data = schema_hub.get_schema(main_schema_name) + super(RootEntity, self).__init__(schema_data) self._require_restart_callbacks = [] self._item_ids_require_restart = set() @@ -130,7 +134,17 @@ class RootEntity(BaseItemEntity): def _add_children(self, schema_data, first=True): added_children = [] - for children_schema in schema_data["children"]: + children_deque = collections.deque() + for _children_schema in schema_data["children"]: + children_schemas = self.schema_hub.resolve_schema_data( + _children_schema + ) + for children_schema in children_schemas: + children_deque.append(children_schema) + + while children_deque: + children_schema = children_deque.popleft() + if children_schema["type"] in WRAPPER_TYPES: _children_schema = copy.deepcopy(children_schema) wrapper_children = self._add_children( @@ -143,11 +157,13 @@ class RootEntity(BaseItemEntity): child_obj = self.create_schema_object(children_schema, self) self.children.append(child_obj) added_children.append(child_obj) - if isinstance(child_obj, self._gui_types): + if isinstance(child_obj, self.schema_hub.gui_types): continue if child_obj.key in self.non_gui_children: - raise KeyError("Duplicated key \"{}\"".format(child_obj.key)) + raise KeyError( + "Duplicated key \"{}\"".format(child_obj.key) + ) self.non_gui_children[child_obj.key] = child_obj if not first: @@ -160,9 +176,6 @@ class RootEntity(BaseItemEntity): # Store `self` to `root_item` for children entities self.root_item = self - self._loaded_types = None - self._gui_types = None - # Children are stored by key as keys are immutable and are defined by # schema self.valid_value_types = (dict, ) @@ -189,11 +202,10 @@ class RootEntity(BaseItemEntity): if not KEY_REGEX.match(key): raise InvalidKeySymbols(self.path, key) + @abstractmethod def get_entity_from_path(self, path): - """Return system settings entity.""" - raise NotImplementedError(( - "Method `get_entity_from_path` not available for \"{}\"" - ).format(self.__class__.__name__)) + """Return entity matching passed path.""" + pass def create_schema_object(self, schema_data, *args, **kwargs): """Create entity by entered schema data. @@ -201,56 +213,11 @@ class RootEntity(BaseItemEntity): Available entities are loaded on first run. Children entities can call this method. """ - if self._loaded_types is None: - # Load available entities - from openpype.settings import entities + return self.schema_hub.create_schema_object( + schema_data, *args, **kwargs + ) - # Define known abstract classes - known_abstract_classes = ( - entities.BaseEntity, - entities.BaseItemEntity, - entities.ItemEntity, - entities.EndpointEntity, - entities.InputEntity, - entities.BaseEnumEntity - ) - - self._loaded_types = {} - _gui_types = [] - for attr in dir(entities): - item = getattr(entities, attr) - # Filter classes - if not inspect.isclass(item): - continue - - # Skip classes that do not inherit from BaseEntity - if not issubclass(item, entities.BaseEntity): - continue - - # Skip class that is abstract by design - if item in known_abstract_classes: - continue - - if inspect.isabstract(item): - # Create an object to get crash and get traceback - item() - - # Backwards compatibility - # Single entity may have multiple schema types - for schema_type in item.schema_types: - self._loaded_types[schema_type] = item - - if item.gui_type: - _gui_types.append(item) - self._gui_types = tuple(_gui_types) - - klass = self._loaded_types.get(schema_data["type"]) - if not klass: - raise KeyError("Unknown type \"{}\"".format(schema_data["type"])) - - return klass(schema_data, *args, **kwargs) - - def set_override_state(self, state): + def set_override_state(self, state, ignore_missing_defaults=None): """Set override state and trigger it on children. Method will discard all changes in hierarchy and use values, metadata @@ -259,9 +226,12 @@ class RootEntity(BaseItemEntity): Args: state (OverrideState): State to which should be data changed. """ + if not ignore_missing_defaults: + ignore_missing_defaults = False + self._override_state = state for child_obj in self.non_gui_children.values(): - child_obj.set_override_state(state) + child_obj.set_override_state(state, ignore_missing_defaults) def on_change(self): """Trigger callbacks on change.""" @@ -491,18 +461,32 @@ class SystemSettings(RootEntity): schema_data (dict): Pass schema data to entity. This is for development and debugging purposes. """ - def __init__( - self, set_studio_state=True, reset=True, schema_data=None - ): - if schema_data is None: - # Load system schemas - schema_data = get_studio_settings_schema() + root_key = SYSTEM_SETTINGS_KEY - super(SystemSettings, self).__init__(schema_data, reset) + def __init__( + self, set_studio_state=True, reset=True, schema_hub=None + ): + if schema_hub is None: + # Load system schemas + schema_hub = SchemasHub("system_schema") + + super(SystemSettings, self).__init__(schema_hub, reset) if set_studio_state: self.set_studio_state() + def get_entity_from_path(self, path): + """Return system settings entity.""" + path_parts = path.split("/") + first_part = path_parts[0] + output = self + if first_part == self.root_key: + path_parts.pop(0) + + for path_part in path_parts: + output = output[path_part] + return output + def _reset_values(self): default_value = get_default_settings()[SYSTEM_SETTINGS_KEY] for key, child_obj in self.non_gui_children.items(): @@ -600,22 +584,24 @@ class ProjectSettings(RootEntity): schema_data (dict): Pass schema data to entity. This is for development and debugging purposes. """ + root_key = PROJECT_SETTINGS_KEY + def __init__( self, project_name=None, change_state=True, reset=True, - schema_data=None + schema_hub=None ): self._project_name = project_name self._system_settings_entity = None - if schema_data is None: + if schema_hub is None: # Load system schemas - schema_data = get_project_settings_schema() + schema_hub = SchemasHub("projects_schema") - super(ProjectSettings, self).__init__(schema_data, reset) + super(ProjectSettings, self).__init__(schema_hub, reset) if change_state: if self.project_name is None: diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index 6c31b61f59..d457e44e74 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -181,6 +181,103 @@ } ``` +## dict-conditional +- is similar to `dict` but has only one child entity that will be always available +- the one entity is enumerator of possible values and based on value of the entity are defined and used other children entities +- each value of enumerator have defined children that will be used + - there is no way how to have shared entities across multiple enum items +- value from enumerator is also stored next to other values + - to define the key under which will be enum value stored use `enum_key` + - `enum_key` must match key regex and any enum item can't have children with same key + - `enum_label` is label of the entity for UI purposes +- enum items are define with `enum_children` + - it's a list where each item represents enum item + - all items in `enum_children` must have at least `key` key which represents value stored under `enum_key` + - items can define `label` for UI purposes + - most important part is that item can define `children` key where are definitions of it's children (`children` value works the same way as in `dict`) +- entity must have defined `"label"` if is not used as widget +- is set as group if any parent is not group +- if `"label"` is entetered there which will be shown in GUI + - item with label can be collapsible + - that can be set with key `"collapsible"` as `True`/`False` (Default: `True`) + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add darker background with `"highlight_content"` (Default: `False`) + - darker background has limits of maximum applies after 3-4 nested highlighted items there is not difference in the color + - output is dictionary `{the "key": children values}` +``` +# Example +{ + "type": "dict-conditional", + "key": "my_key", + "label": "My Key", + "enum_key": "type", + "enum_label": "label", + "enum_children": [ + # Each item must be a dictionary with 'key' + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "command", + "label": "Comand" + } + ] + }, + { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": "text" + } + ] + }, + { + # Separator does not have children as "separator" value is enough + "key": "separator", + "label": "Separator" + } + ] +} +``` + +How output of the schema could look like on save: +``` +{ + "type": "separator" +} + +{ + "type": "action", + "key": "action_1", + "label": "Action 1", + "command": "run command -arg" +} + +{ + "type": "menu", + "children": [ + "child_1", + "child_2" + ] +} +``` + ## Inputs for setting any kind of value (`Pure` inputs) - all these input must have defined `"key"` under which will be stored and `"label"` which will be shown next to input - unless they are used in different types of inputs (later) "as widgets" in that case `"key"` and `"label"` are not required as there is not place where to set them @@ -240,6 +337,11 @@ - schema also defines valid value type - by default it is dictionary - to be able use list it is required to define `is_list` to `true` +- output can be stored as string + - this is to allow any keys in dictionary + - set key `store_as_string` to `true` + - code using that setting must expected that value is string and use json module to convert it to python types + ``` { "type": "raw-json", @@ -272,6 +374,25 @@ } ``` +### hosts-enum +- enumeration of available hosts +- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) +- it is possible to add empty value (represented with empty string) with setting `"use_empty_value"` to `True` (Default: `False`) +- it is possible to set `"custom_labels"` for host names where key `""` is empty value (Default: `{}`) +``` +{ + "key": "host", + "label": "Host name", + "type": "hosts-enum", + "multiselection": false, + "use_empty_value": true, + "custom_labels": { + "": "N/A", + "nuke": "Nuke" + } +} +``` + ## Inputs for setting value using Pure inputs - these inputs also have required `"key"` - attribute `"label"` is required in few conditions diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index bee9712878..4a8a9d496e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -82,6 +82,10 @@ "type": "schema", "name": "schema_project_hiero" }, + { + "type": "schema", + "name": "schema_project_houdini" + }, { "type": "schema", "name": "schema_project_blender" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index d47a6917da..27eeaef559 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -11,6 +11,47 @@ "key": "publish", "label": "Publish plugins", "children": [ + { + "type": "dict", + "collapsible": true, + "key": "ValidateExpectedFiles", + "label": "Validate Expected Files", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + }, + { + "type": "label", + "label": "Validate if all expected files were rendered" + }, + { + "type": "boolean", + "key": "allow_user_override", + "object_type": "text", + "label": "Allow user change frame range" + }, + { + "type": "list", + "key": "families", + "object_type": "text", + "label": "Trigger on families" + }, + { + "type": "list", + "key": "targets", + "object_type": "text", + "label": "Trigger for plugins" + } + ] + }, { "type": "dict", "collapsible": true, @@ -67,6 +108,16 @@ "key": "limit", "label": "Limit Groups", "object_type": "text" + }, + { + "type": "raw-json", + "key": "jobInfo", + "label": "Additional JobInfo data" + }, + { + "type": "raw-json", + "key": "pluginInfo", + "label": "Additional PluginInfo data" } ] }, @@ -132,6 +183,20 @@ "key": "use_gpu", "label": "Use GPU" }, + { + "type": "list", + "key": "env_allowed_keys", + "object_type": "text", + "label": "Allowed environment keys" + }, + { + "type": "dict-modifiable", + "key": "env_search_replace_values", + "label": "Search & replace in environment values", + "object_type": { + "type": "text" + } + }, { "type": "dict-modifiable", "key": "limit_groups", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index aae2bb2539..a94ebc8888 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -604,6 +604,82 @@ "key": "publish", "label": "Publish plugins", "children": [ + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CollectFtrackFamily", + "label": "Collect Ftrack Family", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "collapsible": true, + "key": "profiles", + "label": "Profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "list", + "object_type": "text" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "key": "add_ftrack_family", + "label": "Add Ftrack Family", + "type": "boolean" + }, + { + "type": "list", + "collapsible": true, + "key": "advanced_filtering", + "label": "Advanced adding if additional families present", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Additional Families", + "type": "list", + "object_type": "text" + }, + { + "key": "add_ftrack_family", + "label": "Add Ftrack Family", + "type": "boolean" + } + ] + } + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_global.json b/openpype/settings/entities/schemas/projects_schema/schema_project_global.json index 6e5cf0671c..a8bce47592 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_global.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_global.json @@ -17,7 +17,8 @@ "type": "raw-json", "label": "Project Folder Structure", "key": "project_folder_structure", - "use_label_wrap": true + "use_label_wrap": true, + "store_as_string": true }, { "type": "schema", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_harmony.json b/openpype/settings/entities/schemas/projects_schema/schema_project_harmony.json index 8b5d638cd8..ce6246a8de 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_harmony.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_harmony.json @@ -25,6 +25,16 @@ } ] }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json new file mode 100644 index 0000000000..c6de257a61 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -0,0 +1,27 @@ +{ + "type": "dict", + "collapsible": true, + "key": "houdini", + "label": "Houdini", + "is_file": true, + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index f709e84651..e0b21f4037 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -63,6 +63,14 @@ "type": "text", "key": "fpath_template", "label": "Path template" + }, + { + "type": "list", + "key": "defaults", + "label": "Subset name defaults", + "object_type": { + "type": "text" + } } ] }, @@ -77,6 +85,19 @@ "type": "text", "key": "fpath_template", "label": "Path template" + }, + { + "type": "boolean", + "key": "use_range_limit", + "label": "Use Frame range limit by default" + }, + { + "type": "list", + "key": "defaults", + "label": "Subset name defaults", + "object_type": { + "type": "text" + } } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 4eb6c26dbb..3b65f08ac4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -33,6 +33,16 @@ "key": "publish", "label": "Publish plugins", "children": [ + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + }, { "type": "dict", "collapsible": true, @@ -50,7 +60,7 @@ "object_type": "text" } ] - } + } ] }, { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json index 58708776ca..170de7c8a2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json @@ -59,10 +59,10 @@ "object_type": "text" }, { + "type": "hosts-enum", "key": "hosts", "label": "Host names", - "type": "list", - "object_type": "text" + "multiselection": true }, { "type": "separator" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json index 0ef7612805..c627012531 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json @@ -130,6 +130,165 @@ ] } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CollectEditorial", + "label": "Collect Editorial", + "is_group": true, + "children": [ + { + "type": "text", + "key": "source_dir", + "label": "Editorial resources pointer" + }, + { + "type": "list", + "key": "extensions", + "label": "Accepted extensions", + "object_type": "text" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CollectHierarchyInstance", + "label": "Collect Instance Hierarchy", + "is_group": true, + "children": [ + { + "type": "text", + "key": "shot_rename_template", + "label": "Shot rename template" + }, + { + "key": "shot_rename_search_patterns", + "label": "Shot renaming paterns search", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "text" + } + }, + { + "type": "dict", + "key": "shot_add_hierarchy", + "label": "Shot hierarchy", + "children": [ + { + "type": "text", + "key": "parents_path", + "label": "Parents path template" + }, + { + "key": "parents", + "label": "Parents", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "text" + } + } + ] + }, + { + "key": "shot_add_tasks", + "label": "Add tasks to shot", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "task-types-enum", + "key": "type", + "label": "Task type" + } + ] + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "shot_add_tasks", + "label": "Collect Clip Instances", + "is_group": true, + "children": [ + { + "type": "number", + "key": "custom_start_frame", + "label": "Custom start frame", + "default": 0, + "minimum": 1, + "maximum": 100000 + }, + { + "type": "number", + "key": "timeline_frame_start", + "label": "Timeline start frame", + "default": 900000, + "minimum": 1, + "maximum": 10000000 + }, + { + "type": "number", + "key": "timeline_frame_offset", + "label": "Timeline frame offset", + "default": 0, + "minimum": -1000000, + "maximum": 1000000 + }, + { + "key": "subsets", + "label": "Subsets", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "family", + "label": "Family" + }, + { + "type": "list", + "key": "families", + "label": "Families", + "object_type": "text" + }, + { + "type": "splitter" + }, + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "object_type": "text" + }, + { + "key": "version", + "label": "Version lock", + "type": "number", + "default": 0, + "minimum": 0, + "maximum": 10 + } + , + { + "type": "boolean", + "key": "keepSequence", + "label": "Keep sequence if used for review", + "default": false + } + ] + } + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 6f90bb4263..67aa4b0a06 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -11,6 +11,25 @@ "key": "publish", "label": "Publish plugins", "children": [ + { + "type": "dict", + "collapsible": true, + "key": "ExtractSequence", + "label": "ExtractSequence", + "is_group": true, + "children": [ + { + "type": "label", + "label": "Review BG color is used for whole scene review and for thumbnails." + }, + { + "type": "color", + "key": "review_bg", + "label": "Review BG color", + "use_alpha": false + } + ] + }, { "type": "schema_template", "name": "template_publish_plugin", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index b6e94d9d03..4e197e9fc8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -15,11 +15,6 @@ "type": "boolean", "key": "dev_mode", "label": "Dev mode" - }, - { - "type": "boolean", - "key": "install_unreal_python_engine", - "label": "Install unreal python engine" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 3c589f9492..2b2eab8868 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -3,7 +3,6 @@ "key": "imageio", "label": "Color Management and Output Formats", "is_file": true, - "is_group": true, "children": [ { "key": "hiero", @@ -15,6 +14,7 @@ "type": "dict", "label": "Workfile", "collapsible": false, + "is_group": true, "children": [ { "type": "form", @@ -89,6 +89,7 @@ "type": "dict", "label": "Colorspace on Inputs by regex detection", "collapsible": true, + "is_group": true, "children": [ { "type": "list", @@ -123,6 +124,7 @@ "type": "dict", "label": "Viewer", "collapsible": false, + "is_group": true, "children": [ { "type": "text", @@ -136,6 +138,7 @@ "type": "dict", "label": "Workfile", "collapsible": false, + "is_group": true, "children": [ { "type": "form", @@ -233,6 +236,7 @@ "type": "dict", "label": "Nodes", "collapsible": true, + "is_group": true, "children": [ { "key": "requiredNodes", @@ -335,6 +339,7 @@ "type": "dict", "label": "Colorspace on Inputs by regex detection", "collapsible": true, + "is_group": true, "children": [ { "type": "list", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 0c89575d74..4715db4888 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -90,10 +90,10 @@ "object_type": "text" }, { + "type": "hosts-enum", "key": "hosts", "label": "Hosts", - "type": "list", - "object_type": "text" + "multiselection": true }, { "type": "splitter" @@ -182,6 +182,16 @@ "key": "overscan_crop", "label": "Overscan crop" }, + { + "type": "label", + "label": "Overscan color is used when input aspect ratio is not same as output aspect ratio." + }, + { + "type": "color", + "label": "Overscan color", + "key": "overscan_color", + "use_alpha": false + }, { "type": "label", "label": "Width and Height must be both set to higher value than 0 else source resolution is used." @@ -348,10 +358,10 @@ "object_type": "text" }, { + "type": "hosts-enum", "key": "hosts", "label": "Hosts", - "type": "list", - "object_type": "text" + "multiselection": true }, { "type": "splitter" @@ -482,10 +492,10 @@ "object_type": "text" }, { + "type": "hosts-enum", "key": "hosts", "label": "Hosts", - "type": "list", - "object_type": "text" + "multiselection": true }, { "key": "tasks", @@ -544,6 +554,22 @@ "key": "deadline_priority", "label": "Deadline Priotity" }, + { + "type": "splitter" + }, + { + "type": "text", + "key": "publishing_script", + "label": "Publishing script path" + }, + { + "type": "list", + "key": "skip_integration_repre_list", + "label": "Skip integration of representation with ext", + "object_type": { + "type": "text" + } + }, { "type": "dict", "key": "aov_filter", @@ -584,6 +610,30 @@ ] } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CleanUp", + "label": "Clean Up", + "is_group": true, + "children": [ + { + "type": "list", + "key": "paterns", + "label": "Paterrns (regex)", + "object_type": { + "type": "text" + } + }, + { + "type": "boolean", + "key": "remove_temp_renders", + "label": "Remove Temp renders", + "default": false + } + + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 224389d42e..8c92a45a56 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -35,10 +35,10 @@ "object_type": "text" }, { + "type": "hosts-enum", "key": "hosts", "label": "Hosts", - "type": "list", - "object_type": "text" + "multiselection": true }, { "key": "tasks", @@ -75,10 +75,10 @@ "type": "dict", "children": [ { + "type": "hosts-enum", "key": "hosts", "label": "Hosts", - "type": "list", - "object_type": "text" + "multiselection": true }, { "key": "tasks", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 0abcdd2965..5ca7059ee5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -28,7 +28,16 @@ "type": "label", "label": "Validators" }, - + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 6873ed5190..782179cfd1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -29,6 +29,16 @@ "type": "label", "label": "Validators" }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/system_schema/example_schema.json b/openpype/settings/entities/schemas/system_schema/example_schema.json index a4ed56df32..c3287d7452 100644 --- a/openpype/settings/entities/schemas/system_schema/example_schema.json +++ b/openpype/settings/entities/schemas/system_schema/example_schema.json @@ -9,6 +9,54 @@ "label": "Color input", "type": "color" }, + { + "type": "dict-conditional", + "use_label_wrap": true, + "collapsible": true, + "key": "menu_items", + "label": "Menu items", + "enum_key": "type", + "enum_label": "Type", + "enum_children": [ + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "command", + "label": "Comand" + } + ] + }, + { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": "text" + } + ] + }, + { + "key": "separator", + "label": "Separator" + } + ] + }, { "type": "dict", "key": "schema_template_exaples", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json index afadf48173..6c36a9bb8a 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json @@ -29,11 +29,13 @@ "template_data": [ { "app_variant_label": "2020", - "app_variant": "2020" + "app_variant": "2020", + "variant_skip_paths": ["use_python_2"] }, { "app_variant_label": "2021", - "app_variant": "2021" + "app_variant": "2021", + "variant_skip_paths": ["use_python_2"] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json index 0a6c8ca035..27ead6e6da 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json @@ -30,7 +30,8 @@ "children": [ { "type": "schema_template", - "name": "template_host_variant_items" + "name": "template_host_variant_items", + "skip_paths": ["use_python_2"] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json index 083885a53b..c122b8930b 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json @@ -29,11 +29,13 @@ "template_data": [ { "app_variant_label": "20", - "app_variant": "20" + "app_variant": "20", + "variant_skip_paths": ["use_python_2"] }, { "app_variant_label": "17", - "app_variant": "17" + "app_variant": "17", + "variant_skip_paths": ["use_python_2"] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json index 9c21166b63..7bcd89c650 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json @@ -29,11 +29,13 @@ "template_data": [ { "app_variant_label": "2020", - "app_variant": "2020" + "app_variant": "2020", + "variant_skip_paths": ["use_python_2"] }, { "app_variant_label": "2021", - "app_variant": "2021" + "app_variant": "2021", + "variant_skip_paths": ["use_python_2"] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json index c39e6f7a30..ff57d767c4 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json @@ -30,7 +30,8 @@ "children": [ { "type": "schema_template", - "name": "template_host_variant_items" + "name": "template_host_variant_items", + "skip_paths": ["use_python_2"] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json index df5ec0e6fa..133d6c9eaf 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json @@ -30,7 +30,12 @@ "children": [ { "type": "schema_template", - "name": "template_host_variant_items" + "name": "template_host_variant_items", + "skip_paths": [ + "executables", + "separator", + "arguments" + ] } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_unchangables.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_unchangables.json index e8b2a70076..c4d8d89209 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_unchangables.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_unchangables.json @@ -14,25 +14,11 @@ "roles": ["developer"] }, { - "type": "enum", + "type": "hosts-enum", "key": "host_name", "label": "Host implementation", - "enum_items": [ - { "": "< without host >" }, - { "aftereffects": "aftereffects" }, - { "blender": "blender" }, - { "celaction": "celaction" }, - { "fusion": "fusion" }, - { "harmony": "harmony" }, - { "hiero": "hiero" }, - { "houdini": "houdini" }, - { "maya": "maya" }, - { "nuke": "nuke" }, - { "photoshop": "photoshop" }, - { "resolve": "resolve" }, - { "tvpaint": "tvpaint" }, - { "unreal": "unreal" } - ], + "multiselection": false, + "use_empty_value": true, "roles": ["developer"] } ] diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json index 33cde3d216..96a936c27b 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json @@ -1,4 +1,9 @@ [ + { + "__default_values__": { + "variant_skip_paths": null + } + }, { "type": "dict", "key": "{app_variant}", @@ -19,7 +24,8 @@ }, { "type": "schema_template", - "name": "template_host_variant_items" + "name": "template_host_variant_items", + "skip_paths": "{variant_skip_paths}" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json index ab4d2374a3..409efb006e 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json @@ -14,6 +14,7 @@ "placeholder": "Executable path" }, { + "key": "separator", "type":"separator" }, { diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index f61166fa69..4a3e66de33 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -532,7 +532,11 @@ def apply_local_settings_on_system_settings(system_settings, local_settings): variants = system_settings["applications"][app_group_name]["variants"] for app_name, app_value in value.items(): - if not app_value or app_name not in variants: + if ( + not app_value + or app_name not in variants + or "executables" not in variants[app_name] + ): continue executable = app_value.get("executable") diff --git a/openpype/style/style.css b/openpype/style/style.css index 645e68ce64..c57b9a8da6 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -97,6 +97,29 @@ QToolButton:disabled { background: {color:bg-buttons-disabled}; } +QToolButton[popupMode="1"] { + /* make way for the popup button */ + padding-right: 20px; + border: 1px solid {color:bg-buttons}; +} + +QToolButton::menu-button { + width: 16px; + /* Set border only of left side. */ + border: 1px solid transparent; + border-left: 1px solid {color:bg-buttons}; +} + +QToolButton::menu-arrow { + /* Offset arrow a little bit to center. */ + left: 1px; top: 1px; +} + +QToolButton::menu-arrow:open { + /* Don't offset arrow on open. */ + left: 0px; top: 0px; +} + /* QMenu */ QMenu { border: 1px solid #555555; diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index b4e6a0c3e9..65d40cd0df 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -15,7 +15,7 @@ provides a bridge between the file-based project inventory and configuration. """ import os -from Qt import QtGui +from Qt import QtGui, QtCore from avalon.vendor import qtawesome from openpype.api import resources @@ -23,6 +23,51 @@ ICON_CACHE = {} NOT_FOUND = type("NotFound", (object, ), {}) +class ProjectHandler(QtCore.QObject): + """Handler of project model and current project in Launcher tool. + + Helps to organize two separate widgets handling current project selection. + + It is easier to trigger project change callbacks from one place than from + multiple differect places without proper handling or sequence changes. + + Args: + dbcon(AvalonMongoDB): Mongo connection with Session. + model(ProjectModel): Object of projects model which is shared across + all widgets using projects. Arg dbcon should be used as source for + the model. + """ + # Project list will be refreshed each 10000 msecs + # - this is not part of helper implementation but should be used by widgets + # that may require reshing of projects + refresh_interval = 10000 + + # Signal emmited when project has changed + project_changed = QtCore.Signal(str) + + def __init__(self, dbcon, model): + super(ProjectHandler, self).__init__() + # Store project model for usage + self.model = model + # Store dbcon + self.dbcon = dbcon + + self.current_project = dbcon.Session.get("AVALON_PROJECT") + + def set_project(self, project_name): + # Change current project of this handler + self.current_project = project_name + # Change session project to take effect for other widgets using the + # dbcon object. + self.dbcon.Session["AVALON_PROJECT"] = project_name + + # Trigger change signal when everything is updated to new project + self.project_changed.emit(project_name) + + def refresh_model(self): + self.model.refresh() + + def get_action_icon(action): icon_name = action.icon if not icon_name: diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 25b6dcdbf0..846a07e081 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -325,19 +325,59 @@ class ProjectModel(QtGui.QStandardItemModel): self.hide_invisible = False self.project_icon = qtawesome.icon("fa.map", color="white") + self._project_names = set() def refresh(self): - self.clear() - self.beginResetModel() - + project_names = set() for project_doc in self.get_projects(): - item = QtGui.QStandardItem(self.project_icon, project_doc["name"]) - self.appendRow(item) + project_names.add(project_doc["name"]) - self.endResetModel() + origin_project_names = set(self._project_names) + self._project_names = project_names + + project_names_to_remove = origin_project_names - project_names + if project_names_to_remove: + row_counts = {} + continuous = None + for row in range(self.rowCount()): + index = self.index(row, 0) + index_name = index.data(QtCore.Qt.DisplayRole) + if index_name in project_names_to_remove: + if continuous is None: + continuous = row + row_counts[continuous] = 0 + row_counts[continuous] += 1 + else: + continuous = None + + for row in reversed(sorted(row_counts.keys())): + count = row_counts[row] + self.removeRows(row, count) + + continuous = None + row_counts = {} + for idx, project_name in enumerate(sorted(project_names)): + if project_name in origin_project_names: + continuous = None + continue + + if continuous is None: + continuous = idx + row_counts[continuous] = [] + + row_counts[continuous].append(project_name) + + for row in reversed(sorted(row_counts.keys())): + items = [] + for project_name in row_counts[row]: + item = QtGui.QStandardItem(self.project_icon, project_name) + items.append(item) + + self.invisibleRootItem().insertRows(row, items) def get_projects(self): project_docs = [] + for project_doc in sorted( self.dbcon.projects(), key=lambda x: x["name"] ): diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 22b08d7d15..048210115c 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -20,22 +20,15 @@ from .constants import ( class ProjectBar(QtWidgets.QWidget): - project_changed = QtCore.Signal(int) - - def __init__(self, dbcon, parent=None): + def __init__(self, project_handler, parent=None): super(ProjectBar, self).__init__(parent) - self.dbcon = dbcon - - model = ProjectModel(dbcon) - model.hide_invisible = True - project_combobox = QtWidgets.QComboBox(self) # Change delegate so stylysheets are applied project_delegate = QtWidgets.QStyledItemDelegate(project_combobox) project_combobox.setItemDelegate(project_delegate) - project_combobox.setModel(model) + project_combobox.setModel(project_handler.model) project_combobox.setRootModelIndex(QtCore.QModelIndex()) layout = QtWidgets.QHBoxLayout(self) @@ -47,21 +40,43 @@ class ProjectBar(QtWidgets.QWidget): QtWidgets.QSizePolicy.Maximum ) - self.model = model + refresh_timer = QtCore.QTimer() + refresh_timer.setInterval(project_handler.refresh_interval) + + self.project_handler = project_handler self.project_delegate = project_delegate self.project_combobox = project_combobox - - # Initialize - self.refresh() + self.refresh_timer = refresh_timer # Signals - self.project_combobox.currentIndexChanged.connect(self.project_changed) + refresh_timer.timeout.connect(self._on_refresh_timeout) + self.project_combobox.currentIndexChanged.connect(self.on_index_change) + project_handler.project_changed.connect(self._on_project_change) # Set current project by default if it's set. - project_name = self.dbcon.Session.get("AVALON_PROJECT") + project_name = project_handler.current_project if project_name: self.set_project(project_name) + def showEvent(self, event): + if not self.refresh_timer.isActive(): + self.refresh_timer.start() + super(ProjectBar, self).showEvent(event) + + def _on_refresh_timeout(self): + if not self.isVisible(): + # Stop timer if widget is not visible + self.refresh_timer.stop() + + elif self.isActiveWindow(): + # Refresh projects if window is active + self.project_handler.refresh_model() + + def _on_project_change(self, project_name): + if self.get_current_project() == project_name: + return + self.set_project(project_name) + def get_current_project(self): return self.project_combobox.currentText() @@ -69,27 +84,18 @@ class ProjectBar(QtWidgets.QWidget): index = self.project_combobox.findText(project_name) if index < 0: # Try refresh combobox model - self.project_combobox.blockSignals(True) - self.model.refresh() - self.project_combobox.blockSignals(False) - + self.project_handler.refresh_model() index = self.project_combobox.findText(project_name) if index >= 0: self.project_combobox.setCurrentIndex(index) - def refresh(self): - prev_project_name = self.get_current_project() + def on_index_change(self, idx): + if not self.isVisible(): + return - # Refresh without signals - self.project_combobox.blockSignals(True) - - self.model.refresh() - self.set_project(prev_project_name) - - self.project_combobox.blockSignals(False) - - self.project_changed.emit(self.project_combobox.currentIndex()) + project_name = self.get_current_project() + self.project_handler.set_project(project_name) class ActionBar(QtWidgets.QWidget): diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index af749814b7..979aab42cf 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -12,7 +12,7 @@ from avalon.tools import lib as tools_lib from avalon.tools.widgets import AssetWidget from avalon.vendor import qtawesome from .models import ProjectModel -from .lib import get_action_label +from .lib import get_action_label, ProjectHandler from .widgets import ( ProjectBar, ActionBar, @@ -89,37 +89,49 @@ class ProjectIconView(QtWidgets.QListView): class ProjectsPanel(QtWidgets.QWidget): """Projects Page""" - - project_clicked = QtCore.Signal(str) - - def __init__(self, dbcon, parent=None): + def __init__(self, project_handler, parent=None): super(ProjectsPanel, self).__init__(parent=parent) layout = QtWidgets.QVBoxLayout(self) - self.dbcon = dbcon - self.dbcon.install() - view = ProjectIconView(parent=self) view.setSelectionMode(QtWidgets.QListView.NoSelection) flick = FlickCharm(parent=self) flick.activateOn(view) - model = ProjectModel(self.dbcon) - model.hide_invisible = True - model.refresh() - view.setModel(model) + + view.setModel(project_handler.model) layout.addWidget(view) + refresh_timer = QtCore.QTimer() + refresh_timer.setInterval(project_handler.refresh_interval) + + refresh_timer.timeout.connect(self._on_refresh_timeout) view.clicked.connect(self.on_clicked) - self.model = model self.view = view + self.refresh_timer = refresh_timer + self.project_handler = project_handler def on_clicked(self, index): if index.isValid(): project_name = index.data(QtCore.Qt.DisplayRole) - self.project_clicked.emit(project_name) + self.project_handler.set_project(project_name) + + def showEvent(self, event): + self.project_handler.refresh_model() + if not self.refresh_timer.isActive(): + self.refresh_timer.start() + super(ProjectsPanel, self).showEvent(event) + + def _on_refresh_timeout(self): + if not self.isVisible(): + # Stop timer if widget is not visible + self.refresh_timer.stop() + + elif self.isActiveWindow(): + # Refresh projects if window is active + self.project_handler.refresh_model() class AssetsPanel(QtWidgets.QWidget): @@ -127,7 +139,7 @@ class AssetsPanel(QtWidgets.QWidget): back_clicked = QtCore.Signal() session_changed = QtCore.Signal() - def __init__(self, dbcon, parent=None): + def __init__(self, project_handler, dbcon, parent=None): super(AssetsPanel, self).__init__(parent=parent) self.dbcon = dbcon @@ -142,7 +154,7 @@ class AssetsPanel(QtWidgets.QWidget): btn_back = QtWidgets.QPushButton(project_bar_widget) btn_back.setIcon(btn_back_icon) - project_bar = ProjectBar(self.dbcon, project_bar_widget) + project_bar = ProjectBar(project_handler, project_bar_widget) layout.addWidget(btn_back) layout.addWidget(project_bar) @@ -185,24 +197,19 @@ class AssetsPanel(QtWidgets.QWidget): layout.addWidget(body) # signals - project_bar.project_changed.connect(self.on_project_changed) + project_handler.project_changed.connect(self.on_project_changed) assets_widget.selection_changed.connect(self.on_asset_changed) assets_widget.refreshed.connect(self.on_asset_changed) tasks_widget.task_changed.connect(self.on_task_change) btn_back.clicked.connect(self.back_clicked) + self.project_handler = project_handler self.project_bar = project_bar self.assets_widget = assets_widget self.tasks_widget = tasks_widget self._btn_back = btn_back - # Force initial refresh for the assets since we might not be - # trigging a Project switch if we click the project that was set - # prior to launching the Launcher - # todo: remove this behavior when AVALON_PROJECT is not required - assets_widget.refresh() - def showEvent(self, event): super(AssetsPanel, self).showEvent(event) @@ -211,19 +218,7 @@ class AssetsPanel(QtWidgets.QWidget): btn_size = self.project_bar.height() self._btn_back.setFixedSize(QtCore.QSize(btn_size, btn_size)) - def set_project(self, project): - before = self.project_bar.get_current_project() - if before == project: - self.assets_widget.refresh() - return - - self.project_bar.set_project(project) - self.on_project_changed() - def on_project_changed(self): - project_name = self.project_bar.get_current_project() - self.dbcon.Session["AVALON_PROJECT"] = project_name - self.session_changed.emit() self.assets_widget.refresh() @@ -232,11 +227,8 @@ class AssetsPanel(QtWidgets.QWidget): """Callback on asset selection changed This updates the task view. - """ - print("Asset changed..") - asset_name = None asset_silo = None @@ -276,6 +268,8 @@ class AssetsPanel(QtWidgets.QWidget): class LauncherWindow(QtWidgets.QDialog): """Launcher interface""" + # Refresh actions each 10000msecs + actions_refresh_timeout = 10000 def __init__(self, parent=None): super(LauncherWindow, self).__init__(parent) @@ -298,8 +292,12 @@ class LauncherWindow(QtWidgets.QDialog): self.windowFlags() | QtCore.Qt.WindowMinimizeButtonHint ) - project_panel = ProjectsPanel(self.dbcon) - asset_panel = AssetsPanel(self.dbcon) + project_model = ProjectModel(self.dbcon) + project_model.hide_invisible = True + project_handler = ProjectHandler(self.dbcon, project_model) + + project_panel = ProjectsPanel(project_handler) + asset_panel = AssetsPanel(project_handler, self.dbcon) page_slider = SlidePageWidget() page_slider.addWidget(project_panel) @@ -344,6 +342,12 @@ class LauncherWindow(QtWidgets.QDialog): layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) + actions_refresh_timer = QtCore.QTimer() + actions_refresh_timer.setInterval(self.actions_refresh_timeout) + + self.actions_refresh_timer = actions_refresh_timer + self.project_handler = project_handler + self.message_label = message_label self.project_panel = project_panel self.asset_panel = asset_panel @@ -353,23 +357,21 @@ class LauncherWindow(QtWidgets.QDialog): self._page = 0 # signals + actions_refresh_timer.timeout.connect(self._on_action_timer) actions_bar.action_clicked.connect(self.on_action_clicked) action_history.trigger_history.connect(self.on_history_action) - project_panel.project_clicked.connect(self.on_project_clicked) + project_handler.project_changed.connect(self.on_project_change) asset_panel.back_clicked.connect(self.on_back_clicked) asset_panel.session_changed.connect(self.on_session_changed) - # todo: Simplify this callback connection - asset_panel.project_bar.project_changed.connect( - self.on_project_changed - ) - self.resize(520, 740) def showEvent(self, event): - super().showEvent(event) - # TODO implement refresh/reset which will trigger updating - self.discover_actions() + if not self.actions_refresh_timer.isActive(): + self.actions_refresh_timer.start() + self.discover_actions() + + super(LauncherWindow, self).showEvent(event) def set_page(self, page): current = self.page_slider.currentIndex() @@ -385,13 +387,6 @@ class LauncherWindow(QtWidgets.QDialog): QtCore.QTimer.singleShot(5000, lambda: self.message_label.setText("")) self.log.debug(message) - def on_project_changed(self): - project_name = self.asset_panel.project_bar.get_current_project() - self.dbcon.Session["AVALON_PROJECT"] = project_name - - # Update the Action plug-ins available for the current project - self.discover_actions() - def on_session_changed(self): self.filter_actions() @@ -402,17 +397,23 @@ class LauncherWindow(QtWidgets.QDialog): def filter_actions(self): self.actions_bar.filter_actions() - def on_project_clicked(self, project_name): - self.dbcon.Session["AVALON_PROJECT"] = project_name - # Refresh projects - self.asset_panel.set_project(project_name) + def _on_action_timer(self): + if not self.isVisible(): + # Stop timer if widget is not visible + self.actions_refresh_timer.stop() + + elif self.isActiveWindow(): + # Refresh projects if window is active + self.discover_actions() + + def on_project_change(self, project_name): + # Update the Action plug-ins available for the current project self.set_page(1) self.discover_actions() def on_back_clicked(self): - self.dbcon.Session["AVALON_PROJECT"] = None + self.project_handler.set_project(None) self.set_page(0) - self.project_panel.model.refresh() # Refresh projects self.discover_actions() def on_action_clicked(self, action): diff --git a/openpype/tools/settings/local_settings/apps_widget.py b/openpype/tools/settings/local_settings/apps_widget.py index 5f4e5dd1c5..e6a4132955 100644 --- a/openpype/tools/settings/local_settings/apps_widget.py +++ b/openpype/tools/settings/local_settings/apps_widget.py @@ -121,6 +121,9 @@ class AppGroupWidget(QtWidgets.QWidget): widgets_by_variant_name = {} for variant_name, variant_entity in valid_variants.items(): + if "executables" not in variant_entity: + continue + variant_widget = AppVariantWidget( group_label, variant_name, variant_entity, content_widget ) @@ -193,8 +196,12 @@ class LocalApplicationsWidgets(QtWidgets.QWidget): # Create App group specific widget and store it by the key group_widget = AppGroupWidget(entity, self) - self.widgets_by_group_name[key] = group_widget - self.content_layout.addWidget(group_widget) + if group_widget.widgets_by_variant_name: + self.widgets_by_group_name[key] = group_widget + self.content_layout.addWidget(group_widget) + else: + group_widget.setVisible(False) + group_widget.deleteLater() def update_local_settings(self, value): if not value: diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index 69562d0b1f..9e8fd89b23 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -168,9 +168,6 @@ class LocalSettingsWindow(QtWidgets.QWidget): scroll_widget = QtWidgets.QScrollArea(self) scroll_widget.setObjectName("GroupWidget") - settings_widget = LocalSettingsWidget(scroll_widget) - - scroll_widget.setWidget(settings_widget) scroll_widget.setWidgetResizable(True) footer = QtWidgets.QWidget(self) @@ -191,7 +188,12 @@ class LocalSettingsWindow(QtWidgets.QWidget): save_btn.clicked.connect(self._on_save_clicked) reset_btn.clicked.connect(self._on_reset_clicked) - self.settings_widget = settings_widget + # Do not create local settings widget in init phase as it's using + # settings objects that must be OK to be able create this widget + # - we want to show dialog if anything goes wrong + # - without reseting nothing is shown + self._settings_widget = None + self._scroll_widget = scroll_widget self.reset_btn = reset_btn self.save_btn = save_btn @@ -203,13 +205,53 @@ class LocalSettingsWindow(QtWidgets.QWidget): def reset(self): if self._reset_on_show: self._reset_on_show = False - value = get_local_settings() - self.settings_widget.update_local_settings(value) + + error_msg = None + try: + # Create settings widget if is not created yet + if self._settings_widget is None: + self._settings_widget = LocalSettingsWidget( + self._scroll_widget + ) + self._scroll_widget.setWidget(self._settings_widget) + + value = get_local_settings() + self._settings_widget.update_local_settings(value) + + except Exception as exc: + error_msg = str(exc) + + crashed = error_msg is not None + # Enable/Disable save button if crashed or not + self.save_btn.setEnabled(not crashed) + # Show/Hide settings widget if crashed or not + if self._settings_widget: + self._settings_widget.setVisible(not crashed) + + if not crashed: + return + + # Show message with error + title = "Something went wrong" + msg = ( + "Bug: Loading of settings failed." + " Please contact your project manager or OpenPype team." + "\n\nError message:\n{}" + ).format(error_msg) + + dialog = QtWidgets.QMessageBox( + QtWidgets.QMessageBox.Critical, + title, + msg, + QtWidgets.QMessageBox.Ok, + self + ) + dialog.exec_() def _on_reset_clicked(self): self.reset() def _on_save_clicked(self): - value = self.settings_widget.settings_value() + value = self._settings_widget.settings_value() save_local_settings(value) self.reset() diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index 03f920b7dc..eb5f82ab9a 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -1,3 +1,5 @@ +import json + from Qt import QtWidgets, QtGui, QtCore from openpype.tools.settings import CHILD_OFFSET from .widgets import ExpandingWidget @@ -125,6 +127,117 @@ class BaseWidget(QtWidgets.QWidget): actions_mapping[action] = remove_from_project_override menu.addAction(action) + def _copy_value_actions(self, menu): + def copy_value(): + mime_data = QtCore.QMimeData() + + if self.entity.is_dynamic_item or self.entity.is_in_dynamic_item: + entity_path = None + else: + entity_path = "/".join( + [self.entity.root_key, self.entity.path] + ) + + value = self.entity.value + # Copy for settings tool + settings_data = { + "root_key": self.entity.root_key, + "value": value, + "path": entity_path + } + settings_encoded_data = QtCore.QByteArray() + settings_stream = QtCore.QDataStream( + settings_encoded_data, QtCore.QIODevice.WriteOnly + ) + settings_stream.writeQString(json.dumps(settings_data)) + mime_data.setData( + "application/copy_settings_value", settings_encoded_data + ) + + # Copy as json + json_encoded_data = None + if isinstance(value, (dict, list)): + json_encoded_data = QtCore.QByteArray() + json_stream = QtCore.QDataStream( + json_encoded_data, QtCore.QIODevice.WriteOnly + ) + json_stream.writeQString(json.dumps(value)) + + mime_data.setData("application/json", json_encoded_data) + + # Copy as text + if json_encoded_data is None: + # Store value as string + mime_data.setText(str(value)) + else: + # Store data as json string + mime_data.setText(json.dumps(value, indent=4)) + + QtWidgets.QApplication.clipboard().setMimeData(mime_data) + + action = QtWidgets.QAction("Copy", menu) + return [(action, copy_value)] + + def _paste_value_actions(self, menu): + output = [] + # Allow paste of value only if were copied from this UI + mime_data = QtWidgets.QApplication.clipboard().mimeData() + mime_value = mime_data.data("application/copy_settings_value") + # Skip if there is nothing to do + if not mime_value: + return output + + settings_stream = QtCore.QDataStream( + mime_value, QtCore.QIODevice.ReadOnly + ) + mime_data_value_str = settings_stream.readQString() + mime_data_value = json.loads(mime_data_value_str) + + value = mime_data_value["value"] + path = mime_data_value["path"] + root_key = mime_data_value["root_key"] + + # Try to find matching entity to be able paste values to same spot + # - entity can't by dynamic or in dynamic item + # - must be in same root entity as source copy + # Can't copy system settings <-> project settings + matching_entity = None + if path and root_key == self.entity.root_key: + try: + matching_entity = self.entity.get_entity_from_path(path) + except Exception: + pass + + def _set_entity_value(_entity, _value): + try: + _entity.set(_value) + except Exception: + dialog = QtWidgets.QMessageBox(self) + dialog.setWindowTitle("Value does not match settings schema") + dialog.setIcon(QtWidgets.QMessageBox.Warning) + dialog.setText(( + "Pasted value does not seem to match schema of destination" + " settings entity." + )) + dialog.exec_() + + # Simple paste value method + def paste_value(): + _set_entity_value(self.entity, value) + + action = QtWidgets.QAction("Paste", menu) + output.append((action, paste_value)) + + # Paste value to matchin entity + def paste_value_to_path(): + _set_entity_value(matching_entity, value) + + if matching_entity is not None: + action = QtWidgets.QAction("Paste to same place", menu) + output.append((action, paste_value_to_path)) + + return output + def show_actions_menu(self, event=None): if event and event.button() != QtCore.Qt.RightButton: return @@ -144,6 +257,15 @@ class BaseWidget(QtWidgets.QWidget): self._add_to_project_override_action(menu, actions_mapping) self._remove_from_project_override_action(menu, actions_mapping) + ui_actions = [] + ui_actions.extend(self._copy_value_actions(menu)) + ui_actions.extend(self._paste_value_actions(menu)) + if ui_actions: + menu.addSeparator() + for action, callback in ui_actions: + menu.addAction(action) + actions_mapping[action] = callback + if not actions_mapping: action = QtWidgets.QAction("< No action >") actions_mapping[action] = None diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index 34ab4c464a..0aacd590db 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -11,6 +11,7 @@ from openpype.settings.entities import ( GUIEntity, DictImmutableKeysEntity, DictMutableKeysEntity, + DictConditionalEntity, ListEntity, PathEntity, ListStrictEntity, @@ -35,6 +36,7 @@ from .base import GUIWidget from .list_item_widget import ListWidget from .list_strict_widget import ListStrictWidget from .dict_mutable_widget import DictMutableKeysWidget +from .dict_conditional import DictConditionalWidget from .item_widgets import ( BoolWidget, DictImmutableKeysWidget, @@ -100,6 +102,9 @@ class SettingsCategoryWidget(QtWidgets.QWidget): if isinstance(entity, GUIEntity): return GUIWidget(*args) + elif isinstance(entity, DictConditionalEntity): + return DictConditionalWidget(*args) + elif isinstance(entity, DictImmutableKeysEntity): return DictImmutableKeysWidget(*args) @@ -183,6 +188,12 @@ class SettingsCategoryWidget(QtWidgets.QWidget): footer_widget = QtWidgets.QWidget(configurations_widget) footer_layout = QtWidgets.QHBoxLayout(footer_widget) + refresh_icon = qtawesome.icon("fa.refresh", color="white") + refresh_btn = QtWidgets.QPushButton(footer_widget) + refresh_btn.setIcon(refresh_icon) + + footer_layout.addWidget(refresh_btn, 0) + if self.user_role == "developer": self._add_developer_ui(footer_layout) @@ -205,8 +216,10 @@ class SettingsCategoryWidget(QtWidgets.QWidget): main_layout.addWidget(configurations_widget, 1) save_btn.clicked.connect(self._save) + refresh_btn.clicked.connect(self._on_refresh) self.save_btn = save_btn + self.refresh_btn = refresh_btn self.require_restart_label = require_restart_label self.scroll_widget = scroll_widget self.content_layout = content_layout @@ -220,10 +233,6 @@ class SettingsCategoryWidget(QtWidgets.QWidget): return def _add_developer_ui(self, footer_layout): - refresh_icon = qtawesome.icon("fa.refresh", color="white") - refresh_button = QtWidgets.QPushButton() - refresh_button.setIcon(refresh_icon) - modify_defaults_widget = QtWidgets.QWidget() modify_defaults_checkbox = QtWidgets.QCheckBox(modify_defaults_widget) modify_defaults_checkbox.setChecked(self._hide_studio_overrides) @@ -235,10 +244,8 @@ class SettingsCategoryWidget(QtWidgets.QWidget): modify_defaults_layout.addWidget(label_widget) modify_defaults_layout.addWidget(modify_defaults_checkbox) - footer_layout.addWidget(refresh_button, 0) footer_layout.addWidget(modify_defaults_widget, 0) - refresh_button.clicked.connect(self._on_refresh) modify_defaults_checkbox.stateChanged.connect( self._on_modify_defaults ) diff --git a/openpype/tools/settings/settings/color_widget.py b/openpype/tools/settings/settings/color_widget.py index fa0cd2c989..b38b46f3cb 100644 --- a/openpype/tools/settings/settings/color_widget.py +++ b/openpype/tools/settings/settings/color_widget.py @@ -25,7 +25,9 @@ class ColorWidget(InputWidget): self._dialog.open() return - dialog = ColorDialog(self.input_field.color(), self) + dialog = ColorDialog( + self.input_field.color(), self.entity.use_alpha, self + ) self._dialog = dialog dialog.open() @@ -120,12 +122,12 @@ class ColorViewer(QtWidgets.QWidget): class ColorDialog(QtWidgets.QDialog): - def __init__(self, color=None, parent=None): + def __init__(self, color=None, use_alpha=True, parent=None): super(ColorDialog, self).__init__(parent) self.setWindowTitle("Color picker dialog") - picker_widget = ColorPickerWidget(color, self) + picker_widget = ColorPickerWidget(color, use_alpha, self) footer_widget = QtWidgets.QWidget(self) diff --git a/openpype/tools/settings/settings/dict_conditional.py b/openpype/tools/settings/settings/dict_conditional.py new file mode 100644 index 0000000000..da2f53497e --- /dev/null +++ b/openpype/tools/settings/settings/dict_conditional.py @@ -0,0 +1,304 @@ +from Qt import QtWidgets + +from .widgets import ( + ExpandingWidget, + GridLabelWidget +) +from .wrapper_widgets import ( + WrapperWidget, + CollapsibleWrapper, + FormWrapper +) +from .base import BaseWidget +from openpype.tools.settings import CHILD_OFFSET + + +class DictConditionalWidget(BaseWidget): + def create_ui(self): + self.input_fields = [] + + self._content_by_enum_value = {} + self._last_enum_value = None + + self.label_widget = None + self.body_widget = None + self.content_widget = None + self.content_layout = None + + label = None + if self.entity.is_dynamic_item: + self._ui_as_dynamic_item() + + elif self.entity.use_label_wrap: + self._ui_label_wrap() + + else: + self._ui_item_base() + label = self.entity.label + + self._parent_widget_by_entity_id = {} + self._enum_key_by_wrapper_id = {} + self._added_wrapper_ids = set() + + self.content_layout.setColumnStretch(0, 0) + self.content_layout.setColumnStretch(1, 1) + + # Add enum entity to layout mapping + enum_entity = self.entity.enum_entity + self._parent_widget_by_entity_id[enum_entity.id] = self.content_widget + + # Add rest of entities to wrapper mappings + for enum_key, children in self.entity.gui_layout.items(): + parent_widget_by_entity_id = {} + + content_widget = QtWidgets.QWidget(self.content_widget) + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setColumnStretch(0, 0) + content_layout.setColumnStretch(1, 1) + content_layout.setContentsMargins(0, 0, 0, 0) + content_layout.setSpacing(5) + + self._content_by_enum_value[enum_key] = { + "widget": content_widget, + "layout": content_layout + } + + self._prepare_entity_layouts( + children, + content_widget, + parent_widget_by_entity_id + ) + for item_id in parent_widget_by_entity_id.keys(): + self._enum_key_by_wrapper_id[item_id] = enum_key + self._parent_widget_by_entity_id.update(parent_widget_by_entity_id) + + enum_input_field = self.create_ui_for_entity( + self.category_widget, self.entity.enum_entity, self + ) + self.enum_input_field = enum_input_field + self.input_fields.append(enum_input_field) + + for item_key, children in self.entity.children.items(): + content_widget = self._content_by_enum_value[item_key]["widget"] + row = self.content_layout.rowCount() + self.content_layout.addWidget(content_widget, row, 0, 1, 2) + + for child_obj in children: + self.input_fields.append( + self.create_ui_for_entity( + self.category_widget, child_obj, self + ) + ) + + if self.entity.use_label_wrap and self.content_layout.count() == 0: + self.body_widget.hide_toolbox(True) + + self.entity_widget.add_widget_to_layout(self, label) + + def _prepare_entity_layouts( + self, gui_layout, widget, parent_widget_by_entity_id + ): + for child in gui_layout: + if not isinstance(child, dict): + parent_widget_by_entity_id[child.id] = widget + continue + + if child["type"] == "collapsible-wrap": + wrapper = CollapsibleWrapper(child, widget) + + elif child["type"] == "form": + wrapper = FormWrapper(child, widget) + + else: + raise KeyError( + "Unknown Wrapper type \"{}\"".format(child["type"]) + ) + + parent_widget_by_entity_id[wrapper.id] = widget + + self._prepare_entity_layouts( + child["children"], wrapper, parent_widget_by_entity_id + ) + + def _ui_item_base(self): + self.setObjectName("DictInvisible") + + self.content_widget = self + self.content_layout = QtWidgets.QGridLayout(self) + self.content_layout.setContentsMargins(0, 0, 0, 0) + self.content_layout.setSpacing(5) + + def _ui_as_dynamic_item(self): + content_widget = QtWidgets.QWidget(self) + content_widget.setObjectName("DictAsWidgetBody") + + show_borders = str(int(self.entity.show_borders)) + content_widget.setProperty("show_borders", show_borders) + + label_widget = QtWidgets.QLabel(self.entity.label) + + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setContentsMargins(5, 5, 5, 5) + + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setSpacing(5) + main_layout.addWidget(content_widget) + + self.label_widget = label_widget + self.content_widget = content_widget + self.content_layout = content_layout + + def _ui_label_wrap(self): + content_widget = QtWidgets.QWidget(self) + content_widget.setObjectName("ContentWidget") + + if self.entity.highlight_content: + content_state = "hightlighted" + bottom_margin = 5 + else: + content_state = "" + bottom_margin = 0 + content_widget.setProperty("content_state", content_state) + content_layout_margins = (CHILD_OFFSET, 5, 0, bottom_margin) + + body_widget = ExpandingWidget(self.entity.label, self) + label_widget = body_widget.label_widget + body_widget.set_content_widget(content_widget) + + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setContentsMargins(*content_layout_margins) + + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setSpacing(0) + main_layout.addWidget(body_widget) + + self.label_widget = label_widget + self.body_widget = body_widget + self.content_widget = content_widget + self.content_layout = content_layout + + if self.entity.collapsible: + if not self.entity.collapsed: + body_widget.toggle_content() + else: + body_widget.hide_toolbox(hide_content=False) + + def add_widget_to_layout(self, widget, label=None): + if not widget.entity: + map_id = widget.id + else: + map_id = widget.entity.id + + content_widget = self.content_widget + content_layout = self.content_layout + if map_id != self.entity.enum_entity.id: + enum_value = self._enum_key_by_wrapper_id[map_id] + content_widget = self._content_by_enum_value[enum_value]["widget"] + content_layout = self._content_by_enum_value[enum_value]["layout"] + + wrapper = self._parent_widget_by_entity_id[map_id] + if wrapper is not content_widget: + wrapper.add_widget_to_layout(widget, label) + if wrapper.id not in self._added_wrapper_ids: + self.add_widget_to_layout(wrapper) + self._added_wrapper_ids.add(wrapper.id) + return + + row = content_layout.rowCount() + if not label or isinstance(widget, WrapperWidget): + content_layout.addWidget(widget, row, 0, 1, 2) + else: + label_widget = GridLabelWidget(label, widget) + label_widget.input_field = widget + widget.label_widget = label_widget + content_layout.addWidget(label_widget, row, 0, 1, 1) + content_layout.addWidget(widget, row, 1, 1, 1) + + def set_entity_value(self): + for input_field in self.input_fields: + input_field.set_entity_value() + + self._on_entity_change() + + def hierarchical_style_update(self): + self.update_style() + for input_field in self.input_fields: + input_field.hierarchical_style_update() + + def update_style(self): + if not self.body_widget and not self.label_widget: + return + + if self.entity.group_item: + group_item = self.entity.group_item + has_unsaved_changes = group_item.has_unsaved_changes + has_project_override = group_item.has_project_override + has_studio_override = group_item.has_studio_override + else: + has_unsaved_changes = self.entity.has_unsaved_changes + has_project_override = self.entity.has_project_override + has_studio_override = self.entity.has_studio_override + + style_state = self.get_style_state( + self.is_invalid, + has_unsaved_changes, + has_project_override, + has_studio_override + ) + if self._style_state == style_state: + return + + self._style_state = style_state + + if self.body_widget: + if style_state: + child_style_state = "child-{}".format(style_state) + else: + child_style_state = "" + + self.body_widget.side_line_widget.setProperty( + "state", child_style_state + ) + self.body_widget.side_line_widget.style().polish( + self.body_widget.side_line_widget + ) + + # There is nothing to care if there is no label + if not self.label_widget: + return + + # Don't change label if is not group or under group item + if not self.entity.is_group and not self.entity.group_item: + return + + self.label_widget.setProperty("state", style_state) + self.label_widget.style().polish(self.label_widget) + + def _on_entity_change(self): + enum_value = self.enum_input_field.entity.value + if enum_value == self._last_enum_value: + return + + self._last_enum_value = enum_value + for item_key, content in self._content_by_enum_value.items(): + widget = content["widget"] + widget.setVisible(item_key == enum_value) + + @property + def is_invalid(self): + return self._is_invalid or self._child_invalid + + @property + def _child_invalid(self): + for input_field in self.input_fields: + if input_field.is_invalid: + return True + return False + + def get_invalid(self): + invalid = [] + for input_field in self.input_fields: + invalid.extend(input_field.get_invalid()) + return invalid diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index b23372e9ac..82afbb0a13 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -145,7 +145,7 @@ class DictImmutableKeysWidget(BaseWidget): self.content_widget = content_widget self.content_layout = content_layout - if len(self.input_fields) == 1 and self.checkbox_widget: + if len(self.input_fields) == 1 and self.checkbox_child: body_widget.hide_toolbox(hide_content=True) elif self.entity.collapsible: diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 169abe530a..81a53c52b8 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -34,6 +34,13 @@ class Window(QtWidgets.QDialog): self._db = AvalonMongoDB() self._db.install() + try: + settings = QtCore.QSettings("pypeclub", "StandalonePublisher") + except Exception: + settings = None + + self._settings = settings + self.pyblish_paths = pyblish_paths self.setWindowTitle("Standalone Publish") @@ -44,7 +51,7 @@ class Window(QtWidgets.QDialog): self.valid_parent = False # assets widget - widget_assets = AssetWidget(dbcon=self._db, parent=self) + widget_assets = AssetWidget(self._db, settings, self) # family widget widget_family = FamilyWidget(dbcon=self._db, parent=self) diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index 4680e88344..c39d71b055 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -127,11 +127,12 @@ class AssetWidget(QtWidgets.QWidget): current_changed = QtCore.Signal() # on view current index change task_changed = QtCore.Signal() - def __init__(self, dbcon, parent=None): + def __init__(self, dbcon, settings, parent=None): super(AssetWidget, self).__init__(parent=parent) self.setContentsMargins(0, 0, 0, 0) self.dbcon = dbcon + self._settings = settings layout = QtWidgets.QVBoxLayout() layout.setContentsMargins(0, 0, 0, 0) @@ -139,6 +140,10 @@ class AssetWidget(QtWidgets.QWidget): # Project self.combo_projects = QtWidgets.QComboBox() + # Change delegate so stylysheets are applied + project_delegate = QtWidgets.QStyledItemDelegate(self.combo_projects) + self.combo_projects.setItemDelegate(project_delegate) + self._set_projects() self.combo_projects.currentTextChanged.connect(self.on_project_change) # Tree View @@ -198,6 +203,7 @@ class AssetWidget(QtWidgets.QWidget): self.selection_changed.connect(self._refresh_tasks) + self.project_delegate = project_delegate self.task_view = task_view self.task_model = task_model self.refreshButton = refresh @@ -237,15 +243,59 @@ class AssetWidget(QtWidgets.QWidget): output.extend(self.get_parents(parent)) return output + def _get_last_projects(self): + if not self._settings: + return [] + + project_names = [] + for project_name in self._settings.value("projects", "").split("|"): + if project_name: + project_names.append(project_name) + return project_names + + def _add_last_project(self, project_name): + if not self._settings: + return + + last_projects = [] + for _project_name in self._settings.value("projects", "").split("|"): + if _project_name: + last_projects.append(_project_name) + + if project_name in last_projects: + last_projects.remove(project_name) + + last_projects.insert(0, project_name) + while len(last_projects) > 5: + last_projects.pop(-1) + + self._settings.setValue("projects", "|".join(last_projects)) + def _set_projects(self): - projects = list() + project_names = list() for project in self.dbcon.projects(): - projects.append(project['name']) + project_name = project.get("name") + if project_name: + project_names.append(project_name) self.combo_projects.clear() - if len(projects) > 0: - self.combo_projects.addItems(projects) - self.dbcon.Session["AVALON_PROJECT"] = projects[0] + + if not project_names: + return + + sorted_project_names = list(sorted(project_names)) + self.combo_projects.addItems(list(sorted(sorted_project_names))) + + last_project = sorted_project_names[0] + for project_name in self._get_last_projects(): + if project_name in sorted_project_names: + last_project = project_name + break + + index = sorted_project_names.index(last_project) + self.combo_projects.setCurrentIndex(index) + + self.dbcon.Session["AVALON_PROJECT"] = last_project def on_project_change(self): projects = list() @@ -254,6 +304,7 @@ class AssetWidget(QtWidgets.QWidget): project_name = self.combo_projects.currentText() if project_name in projects: self.dbcon.Session["AVALON_PROJECT"] = project_name + self._add_last_project(project_name) self.project_changed.emit(project_name) diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index f95a31f7c2..2965463c37 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -28,6 +28,8 @@ class EnvironmentsView(QtWidgets.QTreeView): def __init__(self, parent=None): super(EnvironmentsView, self).__init__(parent) + self._scroll_enabled = False + model = QtGui.QStandardItemModel() env = os.environ.copy() @@ -112,8 +114,11 @@ class EnvironmentsView(QtWidgets.QTreeView): else: return super(EnvironmentsView, self).keyPressEvent(event) + def set_scroll_enabled(self, value): + self._scroll_enabled = value + def wheelEvent(self, event): - if not self.hasFocus(): + if not self._scroll_enabled: event.ignore() return return super(EnvironmentsView, self).wheelEvent(event) @@ -200,9 +205,13 @@ class CollapsibleWidget(QtWidgets.QWidget): class PypeInfoWidget(QtWidgets.QWidget): + _resized = QtCore.Signal() + def __init__(self, parent=None): super(PypeInfoWidget, self).__init__(parent) + self._scroll_at_bottom = False + self.setStyleSheet(style.load_stylesheet()) icon = QtGui.QIcon(resources.pype_icon_filepath()) @@ -219,11 +228,39 @@ class PypeInfoWidget(QtWidgets.QWidget): main_layout.addWidget(scroll_area, 1) main_layout.addWidget(self._create_btns_section(), 0) + scroll_area.verticalScrollBar().valueChanged.connect( + self._on_area_scroll + ) + self._resized.connect(self._on_resize) self.resize(740, 540) self.scroll_area = scroll_area self.info_widget = info_widget + def _on_area_scroll(self, value): + vertical_bar = self.scroll_area.verticalScrollBar() + self._scroll_at_bottom = vertical_bar.maximum() == vertical_bar.value() + self.info_widget.set_scroll_enabled(self._scroll_at_bottom) + + def _on_resize(self): + if not self._scroll_at_bottom: + return + vertical_bar = self.scroll_area.verticalScrollBar() + vertical_bar.setValue(vertical_bar.maximum()) + + def resizeEvent(self, event): + super(PypeInfoWidget, self).resizeEvent(event) + self._resized.emit() + self.info_widget.set_content_height( + self.scroll_area.height() + ) + + def showEvent(self, event): + super(PypeInfoWidget, self).showEvent(event) + self.info_widget.set_content_height( + self.scroll_area.height() + ) + def _create_btns_section(self): btns_widget = QtWidgets.QWidget(self) btns_layout = QtWidgets.QHBoxLayout(btns_widget) @@ -282,6 +319,8 @@ class PypeInfoSubWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(PypeInfoSubWidget, self).__init__(parent) + self.env_view = None + main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) main_layout.setAlignment(QtCore.Qt.AlignTop) @@ -293,6 +332,14 @@ class PypeInfoSubWidget(QtWidgets.QWidget): main_layout.addWidget(self._create_separator(), 0) main_layout.addWidget(self._create_environ_widget(), 1) + def set_content_height(self, height): + if self.env_view: + self.env_view.setMinimumHeight(height) + + def set_scroll_enabled(self, value): + if self.env_view: + self.env_view.set_scroll_enabled(value) + def _create_separator(self): separator_widget = QtWidgets.QWidget(self) separator_widget.setObjectName("Separator") @@ -369,9 +416,10 @@ class PypeInfoSubWidget(QtWidgets.QWidget): env_view = EnvironmentsView(env_widget) env_view.setMinimumHeight(300) - env_widget.set_content_widget(env_view) + self.env_view = env_view + return env_widget def _create_openpype_info_widget(self): diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index c79e55a143..d567e26d74 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -944,10 +944,8 @@ class Window(QtWidgets.QMainWindow): split_widget.addWidget(tasks_widget) split_widget.addWidget(files_widget) split_widget.addWidget(side_panel) - split_widget.setStretchFactor(0, 1) - split_widget.setStretchFactor(1, 1) - split_widget.setStretchFactor(2, 3) - split_widget.setStretchFactor(3, 1) + split_widget.setSizes([255, 160, 455, 175]) + body_layout.addWidget(split_widget) # Add top margin for tasks to align it visually with files as @@ -976,7 +974,7 @@ class Window(QtWidgets.QMainWindow): # Force focus on the open button by default, required for Houdini. files_widget.btn_open.setFocus() - self.resize(1000, 600) + self.resize(1200, 600) def keyPressEvent(self, event): """Custom keyPressEvent. diff --git a/openpype/version.py b/openpype/version.py index d6d6a4544b..bbf93baec0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.1.0-nightly.2" +__version__ = "3.3.0-nightly.3" diff --git a/openpype/widgets/color_widgets/color_inputs.py b/openpype/widgets/color_widgets/color_inputs.py index eda8c618f1..6f5d4baa02 100644 --- a/openpype/widgets/color_widgets/color_inputs.py +++ b/openpype/widgets/color_widgets/color_inputs.py @@ -4,35 +4,6 @@ from Qt import QtWidgets, QtCore, QtGui from .color_view import draw_checkerboard_tile -slide_style = """ -QSlider::groove:horizontal { - background: qlineargradient( - x1: 0, y1: 0, x2: 1, y2: 0, stop: 0 #000, stop: 1 #fff - ); - height: 8px; - border-radius: 4px; -} - -QSlider::handle:horizontal { - background: qlineargradient( - x1:0, y1:0, x2:1, y2:1, stop:0 #ddd, stop:1 #bbb - ); - border: 1px solid #777; - width: 8px; - margin-top: -1px; - margin-bottom: -1px; - border-radius: 4px; -} - -QSlider::handle:horizontal:hover { - background: qlineargradient( - x1:0, y1:0, x2:1, y2:1, stop:0 #eee, stop:1 #ddd - ); - border: 1px solid #444;ff - border-radius: 4px; -}""" - - class AlphaSlider(QtWidgets.QSlider): def __init__(self, *args, **kwargs): super(AlphaSlider, self).__init__(*args, **kwargs) @@ -80,7 +51,7 @@ class AlphaSlider(QtWidgets.QSlider): painter.fillRect(event.rect(), QtCore.Qt.transparent) - painter.setRenderHint(QtGui.QPainter.SmoothPixmapTransform) + painter.setRenderHint(QtGui.QPainter.HighQualityAntialiasing) rect = self.style().subControlRect( QtWidgets.QStyle.CC_Slider, opt, @@ -135,19 +106,8 @@ class AlphaSlider(QtWidgets.QSlider): painter.save() - gradient = QtGui.QRadialGradient() - radius = handle_rect.height() / 2 - center_x = handle_rect.width() / 2 + handle_rect.x() - center_y = handle_rect.height() - gradient.setCenter(center_x, center_y) - gradient.setCenterRadius(radius) - gradient.setFocalPoint(center_x, center_y) - - gradient.setColorAt(0.9, QtGui.QColor(127, 127, 127)) - gradient.setColorAt(1, QtCore.Qt.transparent) - painter.setPen(QtCore.Qt.NoPen) - painter.setBrush(gradient) + painter.setBrush(QtGui.QColor(127, 127, 127)) painter.drawEllipse(handle_rect) painter.restore() diff --git a/openpype/widgets/color_widgets/color_picker_widget.py b/openpype/widgets/color_widgets/color_picker_widget.py index 81ec1f87aa..228d35a77c 100644 --- a/openpype/widgets/color_widgets/color_picker_widget.py +++ b/openpype/widgets/color_widgets/color_picker_widget.py @@ -17,19 +17,12 @@ from .color_inputs import ( class ColorPickerWidget(QtWidgets.QWidget): color_changed = QtCore.Signal(QtGui.QColor) - def __init__(self, color=None, parent=None): + def __init__(self, color=None, use_alpha=True, parent=None): super(ColorPickerWidget, self).__init__(parent) # Color triangle color_triangle = QtColorTriangle(self) - alpha_slider_proxy = QtWidgets.QWidget(self) - alpha_slider = AlphaSlider(QtCore.Qt.Horizontal, alpha_slider_proxy) - - alpha_slider_layout = QtWidgets.QHBoxLayout(alpha_slider_proxy) - alpha_slider_layout.setContentsMargins(5, 5, 5, 5) - alpha_slider_layout.addWidget(alpha_slider, 1) - # Eye picked widget pick_widget = PickScreenColorWidget() pick_widget.setMaximumHeight(50) @@ -47,8 +40,6 @@ class ColorPickerWidget(QtWidgets.QWidget): color_view = ColorViewer(self) color_view.setMaximumHeight(50) - alpha_inputs = AlphaInputs(self) - color_inputs_color = QtGui.QColor() col_inputs_by_label = [ ("HEX", HEXInputs(color_inputs_color, self)), @@ -58,6 +49,7 @@ class ColorPickerWidget(QtWidgets.QWidget): ] layout = QtWidgets.QGridLayout(self) + empty_col = 1 label_col = empty_col + 1 input_col = label_col + 1 @@ -65,6 +57,9 @@ class ColorPickerWidget(QtWidgets.QWidget): empty_widget.setFixedWidth(10) layout.addWidget(empty_widget, 0, empty_col) + layout.setColumnStretch(0, 1) + layout.setColumnStretch(input_col, 1) + row = 0 layout.addWidget(btn_pick_color, row, label_col) layout.addWidget(color_view, row, input_col) @@ -84,20 +79,41 @@ class ColorPickerWidget(QtWidgets.QWidget): layout.setRowStretch(row, 1) row += 1 - layout.addWidget(alpha_slider_proxy, row, 0) + alpha_label = None + alpha_slider_proxy = None + alpha_slider = None + alpha_inputs = None + if not use_alpha: + color.setAlpha(255) + else: + alpha_inputs = AlphaInputs(self) + alpha_label = QtWidgets.QLabel("Alpha", self) + alpha_slider_proxy = QtWidgets.QWidget(self) + alpha_slider = AlphaSlider( + QtCore.Qt.Horizontal, alpha_slider_proxy + ) + + alpha_slider_layout = QtWidgets.QHBoxLayout(alpha_slider_proxy) + alpha_slider_layout.setContentsMargins(5, 5, 5, 5) + alpha_slider_layout.addWidget(alpha_slider, 1) + + layout.addWidget(alpha_slider_proxy, row, 0) + + layout.addWidget(alpha_label, row, label_col) + layout.addWidget(alpha_inputs, row, input_col) + + row += 1 - layout.addWidget(QtWidgets.QLabel("Alpha", self), row, label_col) - layout.addWidget(alpha_inputs, row, input_col) - row += 1 layout.setRowStretch(row, 1) color_view.set_color(color_triangle.cur_color) color_triangle.color_changed.connect(self.triangle_color_changed) - alpha_slider.valueChanged.connect(self._on_alpha_slider_change) pick_widget.color_selected.connect(self.on_color_change) - alpha_inputs.alpha_changed.connect(self._on_alpha_inputs_changed) btn_pick_color.released.connect(self.pick_color) + if alpha_slider: + alpha_slider.valueChanged.connect(self._on_alpha_slider_change) + alpha_inputs.alpha_changed.connect(self._on_alpha_inputs_changed) self.color_input_fields = color_input_fields self.color_inputs_color = color_inputs_color @@ -131,7 +147,8 @@ class ColorPickerWidget(QtWidgets.QWidget): return self.color_view.color() def set_color(self, color): - self.alpha_inputs.set_alpha(color.alpha()) + if self.alpha_inputs: + self.alpha_inputs.set_alpha(color.alpha()) self.on_color_change(color) def pick_color(self): @@ -163,10 +180,10 @@ class ColorPickerWidget(QtWidgets.QWidget): def alpha_changed(self, value): self.color_view.set_alpha(value) - if self.alpha_slider.value() != value: + if self.alpha_slider and self.alpha_slider.value() != value: self.alpha_slider.setValue(value) - if self.alpha_inputs.alpha_value != value: + if self.alpha_inputs and self.alpha_inputs.alpha_value != value: self.alpha_inputs.set_alpha(value) def _on_alpha_inputs_changed(self, value): diff --git a/openpype/widgets/color_widgets/color_triangle.py b/openpype/widgets/color_widgets/color_triangle.py index d4db175d84..f4a86c4fa5 100644 --- a/openpype/widgets/color_widgets/color_triangle.py +++ b/openpype/widgets/color_widgets/color_triangle.py @@ -241,7 +241,11 @@ class QtColorTriangle(QtWidgets.QWidget): # Blit the static generated background with the hue gradient onto # the double buffer. - buf = QtGui.QImage(self.bg_image.copy()) + buf = QtGui.QImage( + self.bg_image.width(), + self.bg_image.height(), + QtGui.QImage.Format_RGB32 + ) # Draw the trigon # Find the color with only the hue, and max value and saturation @@ -254,9 +258,21 @@ class QtColorTriangle(QtWidgets.QWidget): ) # Slow step: convert the image to a pixmap - pix = QtGui.QPixmap.fromImage(buf) + pix = self.bg_image.copy() pix_painter = QtGui.QPainter(pix) - pix_painter.setRenderHint(QtGui.QPainter.Antialiasing) + + pix_painter.setRenderHint(QtGui.QPainter.HighQualityAntialiasing) + + trigon_path = QtGui.QPainterPath() + trigon_path.moveTo(self.point_a) + trigon_path.lineTo(self.point_b) + trigon_path.lineTo(self.point_c) + trigon_path.closeSubpath() + pix_painter.setClipPath(trigon_path) + + pix_painter.drawImage(0, 0, buf) + + pix_painter.setClipping(False) # Draw an outline of the triangle pix_painter.setPen(self._triangle_outline_pen) @@ -724,27 +740,37 @@ class QtColorTriangle(QtWidgets.QWidget): lx = leftX[y] rx = rightX[y] + # if the xdist is 0, don't draw anything. + xdist = rx - lx + if xdist == 0.0: + continue + lxi = int(floor(lx)) rxi = int(floor(rx)) rc = rightColors[y] lc = leftColors[y] - # if the xdist is 0, don't draw anything. - xdist = rx - lx - if xdist != 0.0: - r = lc.r - g = lc.g - b = lc.b - rdelta = (rc.r - r) / xdist - gdelta = (rc.g - g) / xdist - bdelta = (rc.b - b) / xdist + r = lc.r + g = lc.g + b = lc.b + rdelta = (rc.r - r) / xdist + gdelta = (rc.g - g) / xdist + bdelta = (rc.b - b) / xdist - # Inner loop 2. Draws the line from left to right. - for x in range(lxi, rxi + 1): - buf.setPixel(x, y, QtGui.qRgb(int(r), int(g), int(b))) - r += rdelta - g += gdelta - b += bdelta + # Draw 2 more pixels on left side for smoothing + for x in range(lxi - 2, lxi): + buf.setPixel(x, y, QtGui.qRgb(int(r), int(g), int(b))) + + # Inner loop 2. Draws the line from left to right. + for x in range(lxi, rxi): + buf.setPixel(x, y, QtGui.qRgb(int(r), int(g), int(b))) + r += rdelta + g += gdelta + b += bdelta + + # Draw 2 more pixels on right side for smoothing + for x in range(rxi, rxi + 3): + buf.setPixel(x, y, QtGui.qRgb(int(r), int(g), int(b))) def _radius_at(self, pos, rect): mousexdist = pos.x() - float(rect.center().x()) diff --git a/openpype/widgets/color_widgets/color_view.py b/openpype/widgets/color_widgets/color_view.py index 8644281a1d..b5fce28894 100644 --- a/openpype/widgets/color_widgets/color_view.py +++ b/openpype/widgets/color_widgets/color_view.py @@ -5,6 +5,8 @@ def draw_checkerboard_tile(piece_size=None, color_1=None, color_2=None): if piece_size is None: piece_size = 7 + # Make sure piece size is not float + piece_size = int(piece_size) if color_1 is None: color_1 = QtGui.QColor(188, 188, 188) diff --git a/poetry.lock b/poetry.lock index 0951427449..aad1898983 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,7 +11,7 @@ develop = false type = "git" url = "https://github.com/pypeclub/acre.git" reference = "master" -resolved_reference = "9bf19573acb9328a3e5f5de96c619060177795cf" +resolved_reference = "5a812c6dcfd3aada87adb49be98c548c894d6566" [[package]] name = "aiohttp" @@ -307,7 +307,7 @@ trio = ["trio (>=0.14.0)", "sniffio (>=1.1)"] [[package]] name = "docutils" -version = "0.17.1" +version = "0.16" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false @@ -398,7 +398,7 @@ typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\"" [[package]] name = "google-api-core" -version = "1.29.0" +version = "1.30.0" description = "Google API client core library" category = "main" optional = false @@ -436,7 +436,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "1.30.1" +version = "1.31.0" description = "Google Authentication Library" category = "main" optional = false @@ -449,7 +449,7 @@ rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -509,7 +509,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.4.0" +version = "4.5.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -759,7 +759,7 @@ python-versions = "*" [[package]] name = "protobuf" -version = "3.17.2" +version = "3.17.3" description = "Protocol Buffers" category = "main" optional = false @@ -904,7 +904,7 @@ six = "*" [[package]] name = "pyobjc-core" -version = "7.2" +version = "7.3" description = "Python<->ObjC Interoperability Module" category = "main" optional = false @@ -912,26 +912,26 @@ python-versions = ">=3.6" [[package]] name = "pyobjc-framework-cocoa" -version = "7.2" +version = "7.3" description = "Wrappers for the Cocoa frameworks on macOS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyobjc-core = ">=7.2" +pyobjc-core = ">=7.3" [[package]] name = "pyobjc-framework-quartz" -version = "7.2" +version = "7.3" description = "Wrappers for the Quartz frameworks on macOS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyobjc-core = ">=7.2" -pyobjc-framework-Cocoa = ">=7.2" +pyobjc-core = ">=7.3" +pyobjc-framework-Cocoa = ">=7.3" [[package]] name = "pyparsing" @@ -1161,6 +1161,18 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "slack-sdk" +version = "3.6.0" +description = "The Slack API Platform SDK for Python" +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=0.57,<1)"] +testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.5b1)", "psutil (>=5,<6)", "databases (>=0.3)"] + [[package]] name = "smmap" version = "4.0.0" @@ -1230,13 +1242,14 @@ sphinx = "*" [[package]] name = "sphinx-rtd-theme" -version = "0.5.1" +version = "0.5.2" description = "Read the Docs theme for Sphinx" category = "dev" optional = false python-versions = "*" [package.dependencies] +docutils = "<0.17" sphinx = "*" [package.extras] @@ -1453,7 +1466,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "abfb3fda5422d9119fb942e66d82fad5c7fae46d0ed240876c3a61333e15febe" +content-hash = "8875d530ae66f9763b5b0cb84d9d35edc184ef5c141b63d38bf1ff5a1226e556" [metadata.files] acre = [] @@ -1714,8 +1727,8 @@ dnspython = [ {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, ] docutils = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, + {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, + {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, ] enlighten = [ {file = "enlighten-1.10.1-py2.py3-none-any.whl", hash = "sha256:3d6c3eec8cf3eb626ee7b65eddc1b3e904d01f4547a2b9fe7f1da8892a0297e8"}, @@ -1744,16 +1757,16 @@ gitpython = [ {file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"}, ] google-api-core = [ - {file = "google-api-core-1.29.0.tar.gz", hash = "sha256:dbe885011111a9afd9ea9a347db6a9c608e82e5c7648c1f7fabf2b4079a579b5"}, - {file = "google_api_core-1.29.0-py2.py3-none-any.whl", hash = "sha256:f83118319d2a28806ec3399671cbe4af5351bd0dac54c40a0395da6162ebe324"}, + {file = "google-api-core-1.30.0.tar.gz", hash = "sha256:0724d354d394b3d763bc10dfee05807813c5210f0bd9b8e2ddf6b6925603411c"}, + {file = "google_api_core-1.30.0-py2.py3-none-any.whl", hash = "sha256:92cd9e9f366e84bfcf2524e34d2dc244906c645e731962617ba620da1620a1e0"}, ] google-api-python-client = [ {file = "google-api-python-client-1.12.8.tar.gz", hash = "sha256:f3b9684442eec2cfe9f9bb48e796ef919456b82142c7528c5fd527e5224f08bb"}, {file = "google_api_python_client-1.12.8-py2.py3-none-any.whl", hash = "sha256:3c4c4ca46b5c21196bec7ee93453443e477d82cbfa79234d1ce0645f81170eaf"}, ] google-auth = [ - {file = "google-auth-1.30.1.tar.gz", hash = "sha256:044d81b1e58012f8ebc71cc134e191c1fa312f543f1fbc99973afe28c25e3228"}, - {file = "google_auth-1.30.1-py2.py3-none-any.whl", hash = "sha256:b3ca7a8ff9ab3bdefee3ad5aefb11fc6485423767eee016f5942d8e606ca23fb"}, + {file = "google-auth-1.31.0.tar.gz", hash = "sha256:154f7889c5d679a6f626f36adb12afbd4dbb0a9a04ec575d989d6ba79c4fd65e"}, + {file = "google_auth-1.31.0-py2.py3-none-any.whl", hash = "sha256:6d47c79b5d09fbc7e8355fd9594cc4cf65fdde5d401c63951eaac4baa1ba2ae1"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -1776,8 +1789,8 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.4.0-py3-none-any.whl", hash = "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786"}, - {file = "importlib_metadata-4.4.0.tar.gz", hash = "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5"}, + {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, + {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1963,6 +1976,7 @@ pillow = [ {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"}, {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"}, {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"}, + {file = "Pillow-8.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8b56553c0345ad6dcb2e9b433ae47d67f95fc23fe28a0bde15a120f25257e291"}, {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"}, ] pluggy = [ @@ -1974,29 +1988,29 @@ prefixed = [ {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] protobuf = [ - {file = "protobuf-3.17.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c49e673436e24e925022c090a98905cfe88d056cb5dde67a8e20ae339acd8140"}, - {file = "protobuf-3.17.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e62be28dcaab52c7e8e8e3fb9a7005dec6374e698f3b22d79276d95c13151e5"}, - {file = "protobuf-3.17.2-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:c2038dec269b65683f16057886c09ca7526471793029bdd43259282e1e0fb668"}, - {file = "protobuf-3.17.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b667ddbb77ff619fdbd18be75445d4448ee68493c9bddd1b4d0b177025e2f6f6"}, - {file = "protobuf-3.17.2-cp35-cp35m-win32.whl", hash = "sha256:993814b0199f22523a227696a8e20d2cd4b9cda60c76d2fb3969ce0c77eb9e0f"}, - {file = "protobuf-3.17.2-cp35-cp35m-win_amd64.whl", hash = "sha256:38b2c6e2204731cfebdb2452ffb7addf0367172b35cff8ccda338ccea9e7c87a"}, - {file = "protobuf-3.17.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:42239d47f213f1ce12ddca62c0c70856efbac09797d715e5d606b3fbc3f16c44"}, - {file = "protobuf-3.17.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4da073b6c1a83e4ff1294156844f21343c5094e295c194d8ecc94703c7a6f42a"}, - {file = "protobuf-3.17.2-cp36-cp36m-win32.whl", hash = "sha256:c303ce92c60d069237cfbd41790fde3d00066ca9500fac464454e20c2f12250c"}, - {file = "protobuf-3.17.2-cp36-cp36m-win_amd64.whl", hash = "sha256:751d71dc6559dd794d309811d8dcf179d6a128b38a1655ae7137530f33895cb4"}, - {file = "protobuf-3.17.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f057ad59cd4d5ea2b1bb88d36c6f009b8043007cf03d96cbd3b9c06859d4fa"}, - {file = "protobuf-3.17.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c5b512c1982f8b427a302db094cf79f8f235284014d01b23fba461aa2c1459a0"}, - {file = "protobuf-3.17.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9dc01ddc3b195c4538942790c4b195cf17b52d3785a60c1f6f25b794f51e2071"}, - {file = "protobuf-3.17.2-cp37-cp37m-win32.whl", hash = "sha256:45cc0197e9f9192693c8a4dbcba8c9227a53a2720fc3826dfe791113d9ff5e5e"}, - {file = "protobuf-3.17.2-cp37-cp37m-win_amd64.whl", hash = "sha256:816fe5e8b73c29adb13a57e1653da15f24cbb90e86ea92e6f08abe6d8c0cbdb4"}, - {file = "protobuf-3.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:557e16884d7276caf92c1fb188fe6dfbec47891d3507d4982db1e3d89ffd22de"}, - {file = "protobuf-3.17.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:89613ec119fdcad992f65d7a5bfe3170c17edc839982d0089fc0c9242fb8e517"}, - {file = "protobuf-3.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:744f5c9a3b9e7538c4c70f2b0e46f86858b684f5d17bf78643a19a6c21c7936f"}, - {file = "protobuf-3.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1e08da38d56b74962d9cb05d86ca5f25d2e5b78f05fd00db7900cad3faa6de00"}, - {file = "protobuf-3.17.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:6388e7f300010ea7ac77113c7491c5622645d2447fdf701cbfe026b832d728cd"}, - {file = "protobuf-3.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5a73fa43efda0df0191c162680ec40cef45463fa8ff69fbeaeeddda4c760f5"}, - {file = "protobuf-3.17.2-py2.py3-none-any.whl", hash = "sha256:50c657a54592c1bec7b24521fdbbbd2f7b51325ba23ab505ed03e8ebf3a5aeff"}, - {file = "protobuf-3.17.2.tar.gz", hash = "sha256:5a3450acf046716e4a4f02a3f7adfb7b86f1b5b3ae392cec759915e79538d40d"}, + {file = "protobuf-3.17.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ab6bb0e270c6c58e7ff4345b3a803cc59dbee19ddf77a4719c5b635f1d547aa8"}, + {file = "protobuf-3.17.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:13ee7be3c2d9a5d2b42a1030976f760f28755fcf5863c55b1460fd205e6cd637"}, + {file = "protobuf-3.17.3-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:1556a1049ccec58c7855a78d27e5c6e70e95103b32de9142bae0576e9200a1b0"}, + {file = "protobuf-3.17.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f0e59430ee953184a703a324b8ec52f571c6c4259d496a19d1cabcdc19dabc62"}, + {file = "protobuf-3.17.3-cp35-cp35m-win32.whl", hash = "sha256:a981222367fb4210a10a929ad5983ae93bd5a050a0824fc35d6371c07b78caf6"}, + {file = "protobuf-3.17.3-cp35-cp35m-win_amd64.whl", hash = "sha256:6d847c59963c03fd7a0cd7c488cadfa10cda4fff34d8bc8cba92935a91b7a037"}, + {file = "protobuf-3.17.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:145ce0af55c4259ca74993ddab3479c78af064002ec8227beb3d944405123c71"}, + {file = "protobuf-3.17.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ce4d8bf0321e7b2d4395e253f8002a1a5ffbcfd7bcc0a6ba46712c07d47d0b4"}, + {file = "protobuf-3.17.3-cp36-cp36m-win32.whl", hash = "sha256:7a4c97961e9e5b03a56f9a6c82742ed55375c4a25f2692b625d4087d02ed31b9"}, + {file = "protobuf-3.17.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a22b3a0dbac6544dacbafd4c5f6a29e389a50e3b193e2c70dae6bbf7930f651d"}, + {file = "protobuf-3.17.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ffea251f5cd3c0b9b43c7a7a912777e0bc86263436a87c2555242a348817221b"}, + {file = "protobuf-3.17.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:9b7a5c1022e0fa0dbde7fd03682d07d14624ad870ae52054849d8960f04bc764"}, + {file = "protobuf-3.17.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8727ee027157516e2c311f218ebf2260a18088ffb2d29473e82add217d196b1c"}, + {file = "protobuf-3.17.3-cp37-cp37m-win32.whl", hash = "sha256:14c1c9377a7ffbeaccd4722ab0aa900091f52b516ad89c4b0c3bb0a4af903ba5"}, + {file = "protobuf-3.17.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c56c050a947186ba51de4f94ab441d7f04fcd44c56df6e922369cc2e1a92d683"}, + {file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"}, + {file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"}, + {file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"}, + {file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"}, + {file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"}, + {file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"}, + {file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"}, + {file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, @@ -2136,28 +2150,28 @@ pynput = [ {file = "pynput-1.7.3.tar.gz", hash = "sha256:4e50b1a0ab86847e87e58f6d1993688b9a44f9f4c88d4712315ea8eb552ef828"}, ] pyobjc-core = [ - {file = "pyobjc-core-7.2.tar.gz", hash = "sha256:9e9ec482d80ea030cdb1613d05a247f31eedabe6666d884d42dd890cc5fb0e05"}, - {file = "pyobjc_core-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94b4d9de9d228db52dd35012096d63bdf8c1ace58ea3be1d5f6f39313cd502f2"}, - {file = "pyobjc_core-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:971cbd7189ae1aa03ef0d16124aa5bcd053779e0e6b6011a41c3dbd5b4ea7e88"}, - {file = "pyobjc_core-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9d93b20394008373d6d2856d49aaff26f4b97ff42d924a14516c8a82313ec8c0"}, - {file = "pyobjc_core-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:860183540d1be792c26426018139ac8ba75e85f675c59ba080ccdc52d8e74c7a"}, - {file = "pyobjc_core-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ffe61d3c2a404354daf2d895e34e38c5044453353581b3c396bf5365de26250c"}, + {file = "pyobjc-core-7.3.tar.gz", hash = "sha256:5081aedf8bb40aac1a8ad95adac9e44e148a882686ded614adf46bb67fd67574"}, + {file = "pyobjc_core-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e93ad769a20b908778fe950f62a843a6d8f0fa71996e5f3cc9fab5ae7d17771"}, + {file = "pyobjc_core-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f63fd37bbf3785af4ddb2f86cad5ca81c62cfc7d1c0099637ca18343c3656c1"}, + {file = "pyobjc_core-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9b1311f72f2e170742a7ee3a8149f52c35158dc024a21e88d6f1e52ba5d718b"}, + {file = "pyobjc_core-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8d5e12a0729dfd1d998a861998b422d0a3e41923d75ea229bacf31372c831d7b"}, + {file = "pyobjc_core-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efdee8c4884405e0c0186c57f87d7bfaa0abc1f50b18e865db3caea3a1f329b9"}, ] pyobjc-framework-cocoa = [ - {file = "pyobjc-framework-Cocoa-7.2.tar.gz", hash = "sha256:c8b23f03dc3f4436d36c0fd006a8a084835c4f6015187df7c3aa5de8ecd5c653"}, - {file = "pyobjc_framework_Cocoa-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8e5dd5daa0096755937ec24c345a4b07c3fa131a457f99e0fdeeb01979178ec7"}, - {file = "pyobjc_framework_Cocoa-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:828d183947fc7746953fd0c9b1092cc423745ba0b49719e7b7d1e1614aaa20ec"}, - {file = "pyobjc_framework_Cocoa-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e4c6d7baa0c2ab5ea5efb8836ad0b3b3976cffcfc6195c1f195e826c6eb5744"}, - {file = "pyobjc_framework_Cocoa-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9a9d1d49cc5a810773c88d6de821e60c8cc41d01113cf1b9e7662938f5f7d66"}, - {file = "pyobjc_framework_Cocoa-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:506c2cd09f421eac92b9008a0142174c3d1d70ecd4b0e3fa2b924767995fd14e"}, + {file = "pyobjc-framework-Cocoa-7.3.tar.gz", hash = "sha256:b18d05e7a795a3455ad191c3e43d6bfa673c2a4fd480bb1ccf57191051b80b7e"}, + {file = "pyobjc_framework_Cocoa-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9edffdfa6dd1f71f21b531c3e61fdd3e4d5d3bf6c5a528c98e88828cd60bac11"}, + {file = "pyobjc_framework_Cocoa-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35a6340437a4e0109a302150b7d1f6baf57004ccf74834f9e6062fcafe2fd8d7"}, + {file = "pyobjc_framework_Cocoa-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c3886f2608ab3ed02482f8b2ebf9f782b324c559e84b52cfd92dba8a1109872"}, + {file = "pyobjc_framework_Cocoa-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e8e7a1a82cca21d9bfac9115baf065305f3da577bf240085964dfb9c9fff337"}, + {file = "pyobjc_framework_Cocoa-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c15f43077c9a2ba1853eb402ff7a9515df9e584315bc2fcb779d4c95ef46dc5"}, ] pyobjc-framework-quartz = [ - {file = "pyobjc-framework-Quartz-7.2.tar.gz", hash = "sha256:ea554e5697bc6747a4ce793c0b0036da16622b44ff75196d6124603008922afa"}, - {file = "pyobjc_framework_Quartz-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dc61fe61d26f797e4335f3ffc891bcef64624c728c2603e3307b3910580b2cb8"}, - {file = "pyobjc_framework_Quartz-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ad8103cc38923f2708904db11a0992ea960125ce6adf7b4c7a77d8fdafd412c4"}, - {file = "pyobjc_framework_Quartz-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4549d17ca41f0bf62792d5bc4b4293ba9a6cc560014b3e18ba22c65e4a5030d2"}, - {file = "pyobjc_framework_Quartz-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:da16e4f1e13cb7b02e30fa538cbb3a356e4a694bbc2bb26d2bd100ca12a54ff6"}, - {file = "pyobjc_framework_Quartz-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1f6471177a39535cd0358ae29b8f3d31fe778a21deb74105c448c4e726619d7"}, + {file = "pyobjc-framework-Quartz-7.3.tar.gz", hash = "sha256:98812844c34262def980bdf60923a875cd43428a8375b6fd53bd2cd800eccf0b"}, + {file = "pyobjc_framework_Quartz-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ef18f5a16511ded65980bf4f5983ea5d35c88224dbad1b3112abd29c60413ea"}, + {file = "pyobjc_framework_Quartz-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b41eec8d4b10c7c7e011e2f9051367f5499ef315ba52dfbae573c3a2e05469c"}, + {file = "pyobjc_framework_Quartz-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c65456ed045dfe1711d0298734e5a3ad670f8c770f7eb3b19979256c388bdd2"}, + {file = "pyobjc_framework_Quartz-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ddbca6b466584c3dc0e5b701b1c2a9b5d97ddc1d79a949927499ebb1be1f210"}, + {file = "pyobjc_framework_Quartz-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7aba3cd966a0768dd58a35680742820f0c5ac596a9cd11014e2057818e65b0af"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -2269,6 +2283,10 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +slack-sdk = [ + {file = "slack_sdk-3.6.0-py2.py3-none-any.whl", hash = "sha256:e1b257923a1ef88b8620dd3abff94dc5b3eee16ef37975d101ba9e60123ac3af"}, + {file = "slack_sdk-3.6.0.tar.gz", hash = "sha256:195f044e02a2844579a7a26818ce323e85dde8de224730c859644918d793399e"}, +] smmap = [ {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, @@ -2290,8 +2308,8 @@ sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.3.tar.gz", hash = "sha256:f09a0c9d9e989172ba3e282b92bf55613bb23ad47315ec5b0d38536b343ac6c8"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, - {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, + {file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"}, + {file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, diff --git a/pyproject.toml b/pyproject.toml index 1425b88236..e376986606 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,6 +85,8 @@ sphinx-qt-documentation = "*" recommonmark = "*" wheel = "*" enlighten = "*" # cool terminal progress bars +toml = "^0.10.2" # for parsing pyproject.toml + [tool.poetry.urls] "Bug Tracker" = "https://github.com/pypeclub/openpype/issues" diff --git a/repos/avalon-core b/repos/avalon-core index efde026e5a..d8be0bdb37 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit efde026e5aad72dac0e69848005419e2c4f067f2 +Subproject commit d8be0bdb37961e32243f1de0eb9696e86acf7443 diff --git a/start.py b/start.py index 8e7c195e95..1b5c25ae3a 100644 --- a/start.py +++ b/start.py @@ -135,18 +135,36 @@ if sys.__stdout__: def _print(message: str): if message.startswith("!!! "): print("{}{}".format(term.orangered2("!!! "), message[4:])) + return if message.startswith(">>> "): print("{}{}".format(term.aquamarine3(">>> "), message[4:])) + return if message.startswith("--- "): print("{}{}".format(term.darkolivegreen3("--- "), message[4:])) - if message.startswith(" "): - print("{}{}".format(term.darkseagreen3(" "), message[4:])) + return if message.startswith("*** "): print("{}{}".format(term.gold("*** "), message[4:])) + return if message.startswith(" - "): print("{}{}".format(term.wheat(" - "), message[4:])) + return if message.startswith(" . "): print("{}{}".format(term.tan(" . "), message[4:])) + return + if message.startswith(" - "): + print("{}{}".format(term.seagreen3(" - "), message[7:])) + return + if message.startswith(" ! "): + print("{}{}".format(term.goldenrod(" ! "), message[7:])) + return + if message.startswith(" * "): + print("{}{}".format(term.aquamarine1(" * "), message[7:])) + return + if message.startswith(" "): + print("{}{}".format(term.darkseagreen3(" "), message[4:])) + return + + print(message) else: def _print(message: str): print(message) @@ -175,6 +193,17 @@ silent_commands = ["run", "igniter", "standalonepublisher", "extractenvironments"] +def list_versions(openpype_versions: list, local_version=None) -> None: + """Print list of detected versions.""" + _print(" - Detected versions:") + for v in sorted(openpype_versions): + _print(f" - {v}: {v.path}") + if not openpype_versions: + _print(" ! none in repository detected") + if local_version: + _print(f" * local version {local_version}") + + def set_openpype_global_environments() -> None: """Set global OpenPype's environments.""" import acre @@ -303,22 +332,37 @@ def _process_arguments() -> tuple: # check for `--use-version=3.0.0` argument and `--use-staging` use_version = None use_staging = False + print_versions = False for arg in sys.argv: if arg == "--use-version": _print("!!! Please use option --use-version like:") _print(" --use-version=3.0.0") sys.exit(1) - m = re.search( - r"--use-version=(?P\d+\.\d+\.\d+(?:\S*)?)", arg) - if m and m.group('version'): - use_version = m.group('version') - sys.argv.remove(arg) - break + if arg.startswith("--use-version="): + m = re.search( + r"--use-version=(?P\d+\.\d+\.\d+(?:\S*)?)", arg) + if m and m.group('version'): + use_version = m.group('version') + _print(">>> Requested version [ {} ]".format(use_version)) + sys.argv.remove(arg) + if "+staging" in use_version: + use_staging = True + break + else: + _print("!!! Requested version isn't in correct format.") + _print((" Use --list-versions to find out" + " proper version string.")) + sys.exit(1) + if "--use-staging" in sys.argv: use_staging = True sys.argv.remove("--use-staging") + if "--list-versions" in sys.argv: + print_versions = True + sys.argv.remove("--list-versions") + # handle igniter # this is helper to run igniter before anything else if "igniter" in sys.argv: @@ -334,7 +378,7 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging + return use_version, use_staging, print_versions def _determine_mongodb() -> str: @@ -487,7 +531,7 @@ def _find_frozen_openpype(use_version: str = None, openpype_version = openpype_versions[-1] except IndexError: _print(("!!! Something is wrong and we didn't " - "found it again.")) + "found it again.")) sys.exit(1) elif return_code != 2: _print(f" . finished ({return_code})") @@ -500,7 +544,7 @@ def _find_frozen_openpype(use_version: str = None, _print("*** Still no luck finding OpenPype.") _print(("*** We'll try to use the one coming " "with OpenPype installation.")) - version_path = _bootstrap_from_code(use_version) + version_path = _bootstrap_from_code(use_version, use_staging) openpype_version = OpenPypeVersion( version=BootstrapRepos.get_version(version_path), path=version_path) @@ -519,13 +563,8 @@ def _find_frozen_openpype(use_version: str = None, if found: openpype_version = sorted(found)[-1] if not openpype_version: - _print(f"!!! requested version {use_version} was not found.") - if openpype_versions: - _print(" - found: ") - for v in sorted(openpype_versions): - _print(f" - {v}: {v.path}") - - _print(f" - local version {local_version}") + _print(f"!!! Requested version {use_version} was not found.") + list_versions(openpype_versions, local_version) sys.exit(1) # test if latest detected is installed (in user data dir) @@ -560,7 +599,7 @@ def _find_frozen_openpype(use_version: str = None, return openpype_version.path -def _bootstrap_from_code(use_version): +def _bootstrap_from_code(use_version, use_staging): """Bootstrap live code (or the one coming with frozen OpenPype. Args: @@ -575,15 +614,33 @@ def _bootstrap_from_code(use_version): _openpype_root = OPENPYPE_ROOT if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(_openpype_root)) - _print(f" - running version: {local_version}") + switch_str = f" - will switch to {use_version}" if use_version else "" + _print(f" - booting version: {local_version}{switch_str}") assert local_version else: # get current version of OpenPype local_version = bootstrap.get_local_live_version() - if use_version and use_version != local_version: - version_to_use = None - openpype_versions = bootstrap.find_openpype(include_zips=True) + version_to_use = None + openpype_versions = bootstrap.find_openpype( + include_zips=True, staging=use_staging) + if use_staging and not use_version: + try: + version_to_use = openpype_versions[-1] + except IndexError: + _print("!!! No staging versions are found.") + list_versions(openpype_versions, local_version) + sys.exit(1) + if version_to_use.path.is_file(): + version_to_use.path = bootstrap.extract_openpype( + version_to_use) + bootstrap.add_paths_from_directory(version_to_use.path) + os.environ["OPENPYPE_VERSION"] = str(version_to_use) + version_path = version_to_use.path + os.environ["OPENPYPE_REPOS_ROOT"] = (version_path / "openpype").as_posix() # noqa: E501 + _openpype_root = version_to_use.path.as_posix() + + elif use_version and use_version != local_version: v: OpenPypeVersion found = [v for v in openpype_versions if str(v) == use_version] if found: @@ -600,13 +657,8 @@ def _bootstrap_from_code(use_version): os.environ["OPENPYPE_REPOS_ROOT"] = (version_path / "openpype").as_posix() # noqa: E501 _openpype_root = version_to_use.path.as_posix() else: - _print(f"!!! requested version {use_version} was not found.") - if openpype_versions: - _print(" - found: ") - for v in sorted(openpype_versions): - _print(f" - {v}: {v.path}") - - _print(f" - local version {local_version}") + _print(f"!!! Requested version {use_version} was not found.") + list_versions(openpype_versions, local_version) sys.exit(1) else: os.environ["OPENPYPE_VERSION"] = local_version @@ -675,11 +727,16 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging = _process_arguments() + use_version, use_staging, print_versions = _process_arguments() if os.getenv("OPENPYPE_VERSION"): - use_staging = "staging" in os.getenv("OPENPYPE_VERSION") - use_version = os.getenv("OPENPYPE_VERSION") + if use_version: + _print(("*** environment variable OPENPYPE_VERSION" + "is overridden by command line argument.")) + else: + _print(">>> version set by environment variable") + use_staging = "staging" in os.getenv("OPENPYPE_VERSION") + use_version = os.getenv("OPENPYPE_VERSION") # ------------------------------------------------------------------------ # Determine mongodb connection @@ -704,6 +761,24 @@ def boot(): if not os.getenv("OPENPYPE_PATH") and openpype_path: os.environ["OPENPYPE_PATH"] = openpype_path + if print_versions: + if not use_staging: + _print("--- This will list only non-staging versions detected.") + _print(" To see staging versions, use --use-staging argument.") + else: + _print("--- This will list only staging versions detected.") + _print(" To see other version, omit --use-staging argument.") + _openpype_root = OPENPYPE_ROOT + openpype_versions = bootstrap.find_openpype(include_zips=True, + staging=use_staging) + if getattr(sys, 'frozen', False): + local_version = bootstrap.get_version(Path(_openpype_root)) + else: + local_version = bootstrap.get_local_live_version() + + list_versions(openpype_versions, local_version) + sys.exit(1) + # ------------------------------------------------------------------------ # Find OpenPype versions # ------------------------------------------------------------------------ @@ -718,7 +793,7 @@ def boot(): _print(f"!!! {e}") sys.exit(1) else: - version_path = _bootstrap_from_code(use_version) + version_path = _bootstrap_from_code(use_version, use_staging) # set this to point either to `python` from venv in case of live code # or to `openpype` or `openpype_console` in case of frozen code @@ -754,7 +829,7 @@ def boot(): from openpype.version import __version__ assert version_path, "Version path not defined." - info = get_info() + info = get_info(use_staging) info.insert(0, f">>> Using OpenPype from [ {version_path} ]") t_width = 20 @@ -781,7 +856,7 @@ def boot(): sys.exit(1) -def get_info() -> list: +def get_info(use_staging=None) -> list: """Print additional information to console.""" from openpype.lib.mongo import get_default_components from openpype.lib.log import PypeLogger @@ -789,7 +864,7 @@ def get_info() -> list: components = get_default_components() inf = [] - if not getattr(sys, 'frozen', False): + if use_staging: inf.append(("OpenPype variant", "staging")) else: inf.append(("OpenPype variant", "production")) diff --git a/tests/igniter/test_bootstrap_repos.py b/tests/igniter/test_bootstrap_repos.py index 743131acfa..740a71a5ce 100644 --- a/tests/igniter/test_bootstrap_repos.py +++ b/tests/igniter/test_bootstrap_repos.py @@ -330,8 +330,8 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): assert result[-1].path == expected_path, ("not a latest version of " "OpenPype 3") + printer("testing finding OpenPype in OPENPYPE_PATH ...") monkeypatch.setenv("OPENPYPE_PATH", e_path.as_posix()) - result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" @@ -349,6 +349,8 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): monkeypatch.delenv("OPENPYPE_PATH", raising=False) + printer("testing finding OpenPype in user data dir ...") + # mock appdirs user_data_dir def mock_user_data_dir(*args, **kwargs): """Mock local app data dir.""" @@ -373,18 +375,7 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): assert result[-1].path == expected_path, ("not a latest version of " "OpenPype 2") - result = fix_bootstrap.find_openpype(e_path, include_zips=True) - assert result is not None, "no OpenPype version found" - expected_path = Path( - e_path / "{}{}{}".format( - test_versions_1[5].prefix, - test_versions_1[5].version, - test_versions_1[5].suffix - ) - ) - assert result[-1].path == expected_path, ("not a latest version of " - "OpenPype 1") - + printer("testing finding OpenPype zip/dir precedence ...") result = fix_bootstrap.find_openpype(dir_path, include_zips=True) assert result is not None, "no OpenPype versions found" expected_path = Path( diff --git a/tools/build.ps1 b/tools/build.ps1 index c8c2f392ad..cc4253fe24 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -83,7 +83,8 @@ function Show-PSWarning() { function Install-Poetry() { Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Installing Poetry ... " - (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python - + $env:POETRY_HOME="$openpype_root\.poetry" + (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py -UseBasicParsing).Content | python - } $art = @" @@ -115,11 +116,9 @@ $openpype_root = (Get-Item $script_dir).parent.FullName $env:_INSIDE_OPENPYPE_TOOL = "1" -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root @@ -164,7 +163,7 @@ Write-Host " ]" -ForegroundColor white Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -184,10 +183,10 @@ Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Building OpenPype ..." $startTime = [int][double]::Parse((Get-Date -UFormat %s)) -$out = & poetry run python setup.py build 2>&1 +$out = & "$($env:POETRY_HOME)\bin\poetry" run python setup.py build 2>&1 +Set-Content -Path "$($openpype_root)\build\build.log" -Value $out if ($LASTEXITCODE -ne 0) { - Set-Content -Path "$($openpype_root)\build\build.log" -Value $out Write-Host "!!! " -NoNewLine -ForegroundColor Red Write-Host "Build failed. Check the log: " -NoNewline Write-Host ".\build\build.log" -ForegroundColor Yellow @@ -195,7 +194,7 @@ if ($LASTEXITCODE -ne 0) } Set-Content -Path "$($openpype_root)\build\build.log" -Value $out -& poetry run python "$($openpype_root)\tools\build_dependencies.py" +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\build_dependencies.py" Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "restoring current directory" diff --git a/tools/build.sh b/tools/build.sh index aa8f0121ea..c44e7157af 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -140,21 +140,6 @@ realpath () { echo $(cd $(dirname "$1") || return; pwd)/$(basename "$1") } -############################################################################## -# Install Poetry when needed -# Globals: -# PATH -# Arguments: -# None -# Returns: -# None -############################################################################### -install_poetry () { - echo -e "${BIGreen}>>>${RST} Installing Poetry ..." - command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; } - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - -} - # Main main () { echo -e "${BGreen}" @@ -171,11 +156,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" echo -e "${BIYellow}---${RST} Cleaning build directory ..." rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null @@ -201,11 +184,11 @@ if [ "$disable_submodule_update" == 1 ]; then fi echo -e "${BIGreen}>>>${RST} Building ..." if [[ "$OSTYPE" == "linux-gnu"* ]]; then - poetry run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } elif [[ "$OSTYPE" == "darwin"* ]]; then - poetry run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } fi - poetry run python "$openpype_root/tools/build_dependencies.py" + "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" if [[ "$OSTYPE" == "darwin"* ]]; then # fix code signing issue diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 index 05ec0f9823..a0832e0135 100644 --- a/tools/build_win_installer.ps1 +++ b/tools/build_win_installer.ps1 @@ -64,14 +64,6 @@ function Show-PSWarning() { } } -function Install-Poetry() { - Write-Host ">>> " -NoNewline -ForegroundColor Green - Write-Host "Installing Poetry ... " - (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python - - # add it to PATH - $env:PATH = "$($env:PATH);$($env:USERPROFILE)\.poetry\bin" -} - $art = @" . . .. . .. diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 85ef6bdcee..436551c243 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -92,6 +92,24 @@ def calculate_next_nightly(token="nightly"): next_tag = last_pre_v.bump_prerelease(token=token).__str__() return next_tag +def finalize_latest_nightly(): + last_prerelease, last_pre_tag = get_last_version("CI") + last_pre_v = VersionInfo.parse(last_prerelease) + last_pre_v_finalized = last_pre_v.finalize_version() + # print(last_pre_v_finalized) + + return last_pre_v_finalized.__str__() + +def finalize_prerelease(prerelease): + + if "/" in prerelease: + prerelease = prerelease.split("/")[-1] + + prerelease_v = VersionInfo.parse(prerelease) + prerelease_v_finalized = prerelease_v.finalize_version() + + return prerelease_v_finalized.__str__() + def main(): usage = "usage: %prog [options] arg" @@ -102,12 +120,22 @@ def main(): parser.add_option("-b", "--bump", dest="bump", action="store_true", help="Return if there is something to bump") - parser.add_option("-v", "--version", - dest="version", action="store", - help="work with explicit version") + parser.add_option("-r", "--release-latest", + dest="releaselatest", action="store_true", + help="finalize latest prerelease to a release") parser.add_option("-p", "--prerelease", dest="prerelease", action="store", help="define prerelease token") + parser.add_option("-f", "--finalize", + dest="finalize", action="store", + help="define prerelease token") + parser.add_option("-v", "--version", + dest="version", action="store", + help="work with explicit version") + parser.add_option("-l", "--lastversion", + dest="lastversion", action="store", + help="work with explicit version") + (options, args) = parser.parse_args() @@ -124,6 +152,25 @@ def main(): print(next_tag_v) bump_file_versions(next_tag_v) + if options.finalize: + new_release = finalize_prerelease(options.finalize) + print(new_release) + bump_file_versions(new_release) + + if options.lastversion: + last_release, last_release_tag = get_last_version(options.lastversion) + print(last_release_tag) + + if options.releaselatest: + new_release = finalize_latest_nightly() + last_release, last_release_tag = get_last_version("release") + + if VersionInfo.parse(new_release) > VersionInfo.parse(last_release): + print(new_release) + bump_file_versions(new_release) + else: + print("skip") + if options.prerelease: current_prerelease = VersionInfo.parse(options.prerelease) new_prerelease = current_prerelease.bump_prerelease().__str__() diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 94a91ce48f..6c8124ccb2 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -49,9 +49,7 @@ function Install-Poetry() { Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Installing Poetry ... " $env:POETRY_HOME="$openpype_root\.poetry" - (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python - - # add it to PATH - $env:PATH = "$($env:PATH);$openpype_root\.poetry\bin" + (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py -UseBasicParsing).Content | python - } @@ -94,11 +92,10 @@ $current_dir = Get-Location $script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $openpype_root = (Get-Item $script_dir).parent.FullName -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" + Set-Location -Path $openpype_root @@ -145,7 +142,7 @@ Test-Python Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Install-Poetry Write-Host "INSTALLED" -ForegroundColor Cyan @@ -160,7 +157,7 @@ if (-not (Test-Path -PathType Leaf -Path "$($openpype_root)\poetry.lock")) { Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Installing virtual environment from lock." } -& poetry install --no-root $poetry_verbosity +& "$env:POETRY_HOME\bin\poetry" install --no-root $poetry_verbosity --ansi if ($LASTEXITCODE -ne 0) { Write-Host "!!! " -ForegroundColor yellow -NoNewline Write-Host "Poetry command failed." diff --git a/tools/create_env.sh b/tools/create_env.sh index 226a26e199..cc9eddc317 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -109,8 +109,7 @@ install_poetry () { echo -e "${BIGreen}>>>${RST} Installing Poetry ..." export POETRY_HOME="$openpype_root/.poetry" command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; } - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - - export PATH="$PATH:$POETRY_HOME/bin" + curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py | python - } ############################################################################## @@ -154,11 +153,10 @@ main () { # Directories openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) - # make sure Poetry is in PATH + if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" pushd "$openpype_root" > /dev/null || return > /dev/null @@ -177,7 +175,7 @@ main () { echo -e "${BIGreen}>>>${RST} Installing dependencies ..." fi - poetry install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } + "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } echo -e "${BIGreen}>>>${RST} Cleaning cache files ..." clean_pyc @@ -186,10 +184,10 @@ main () { # cx_freeze will crash on missing __pychache__ on these but # reinstalling them solves the problem. echo -e "${BIGreen}>>>${RST} Fixing pycache bug ..." - poetry run python -m pip install --force-reinstall pip - poetry run pip install --force-reinstall setuptools - poetry run pip install --force-reinstall wheel - poetry run python -m pip install --force-reinstall pip + "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip + "$POETRY_HOME/bin/poetry" run pip install --force-reinstall setuptools + "$POETRY_HOME/bin/poetry" run pip install --force-reinstall wheel + "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip } main -3 diff --git a/tools/create_zip.ps1 b/tools/create_zip.ps1 index 1a7520eb11..c27857b480 100644 --- a/tools/create_zip.ps1 +++ b/tools/create_zip.ps1 @@ -45,11 +45,9 @@ $openpype_root = (Get-Item $script_dir).parent.FullName $env:_INSIDE_OPENPYPE_TOOL = "1" -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root @@ -87,7 +85,7 @@ if (-not $openpype_version) { Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -107,5 +105,5 @@ Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Generating zip from current sources ..." $env:PYTHONPATH="$($openpype_root);$($env:PYTHONPATH)" $env:OPENPYPE_ROOT="$($openpype_root)" -& poetry run python "$($openpype_root)\tools\create_zip.py" $ARGS +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\create_zip.py" $ARGS Set-Location -Path $current_dir diff --git a/tools/create_zip.sh b/tools/create_zip.sh index ec0276b040..85ee18a839 100755 --- a/tools/create_zip.sh +++ b/tools/create_zip.sh @@ -114,11 +114,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" pushd "$openpype_root" > /dev/null || return > /dev/null @@ -134,7 +132,7 @@ main () { echo -e "${BIGreen}>>>${RST} Generating zip from current sources ..." PYTHONPATH="$openpype_root:$PYTHONPATH" OPENPYPE_ROOT="$openpype_root" - poetry run python3 "$openpype_root/tools/create_zip.py" "$@" + "$POETRY_HOME/bin/poetry" run python3 "$openpype_root/tools/create_zip.py" "$@" } main "$@" diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 index 23f0b50c7a..16f7b70e7a 100644 --- a/tools/fetch_thirdparty_libs.ps1 +++ b/tools/fetch_thirdparty_libs.ps1 @@ -17,18 +17,15 @@ $openpype_root = (Get-Item $script_dir).parent.FullName $env:_INSIDE_OPENPYPE_TOOL = "1" -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root - Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -37,5 +34,5 @@ if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { Write-Host "OK" -ForegroundColor Green } -& poetry run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" Set-Location -Path $current_dir diff --git a/tools/fetch_thirdparty_libs.sh b/tools/fetch_thirdparty_libs.sh index 31f109ba68..93d0674965 100755 --- a/tools/fetch_thirdparty_libs.sh +++ b/tools/fetch_thirdparty_libs.sh @@ -1,8 +1,5 @@ #!/usr/bin/env bash -# Run Pype Tray - - art () { cat <<-EOF @@ -82,11 +79,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c" if [ -f "$POETRY_HOME/bin/poetry" ]; then @@ -100,7 +95,7 @@ main () { pushd "$openpype_root" > /dev/null || return > /dev/null echo -e "${BIGreen}>>>${RST} Running Pype tool ..." - poetry run python "$openpype_root/tools/fetch_thirdparty_libs.py" + "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/fetch_thirdparty_libs.py" } main \ No newline at end of file diff --git a/tools/make_docs.ps1 b/tools/make_docs.ps1 index 2f9350eff0..45a11171ae 100644 --- a/tools/make_docs.ps1 +++ b/tools/make_docs.ps1 @@ -19,11 +19,9 @@ $openpype_root = (Get-Item $script_dir).parent.FullName $env:_INSIDE_OPENPYPE_TOOL = "1" -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root @@ -50,7 +48,7 @@ Write-Host $art -ForegroundColor DarkGreen Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -63,10 +61,10 @@ Write-Host "This will not overwrite existing source rst files, only scan and add Set-Location -Path $openpype_root Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Running apidoc ..." -& poetry run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" igniter -& poetry run sphinx-apidoc.exe -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" openpype vendor, openpype\vendor +& "$env:POETRY_HOME\bin\poetry" run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" igniter +& "$env:POETRY_HOME\bin\poetry" run sphinx-apidoc.exe -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$($openpype_root)\docs\source" openpype vendor, openpype\vendor Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Building html ..." -& poetry run python "$($openpype_root)\setup.py" build_sphinx +& "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\setup.py" build_sphinx Set-Location -Path $current_dir diff --git a/tools/make_docs.sh b/tools/make_docs.sh index 9dfab26a38..52ee57dcf0 100755 --- a/tools/make_docs.sh +++ b/tools/make_docs.sh @@ -83,11 +83,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c" if [ -f "$POETRY_HOME/bin/poetry" ]; then @@ -101,11 +99,11 @@ main () { pushd "$openpype_root" > /dev/null || return > /dev/null echo -e "${BIGreen}>>>${RST} Running apidoc ..." - poetry run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$openpype_root/docs/source" igniter - poetry run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$openpype_root/docs/source" openpype vendor, openpype\vendor + "$POETRY_HOME/bin/poetry" run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$openpype_root/docs/source" igniter + "$POETRY_HOME/bin/poetry" run sphinx-apidoc -M -e -d 10 --ext-intersphinx --ext-todo --ext-coverage --ext-viewcode -o "$openpype_root/docs/source" openpype vendor, openpype\vendor echo -e "${BIGreen}>>>${RST} Building html ..." - poetry run python3 "$openpype_root/setup.py" build_sphinx + "$POETRY_HOME/bin/poetry" run python3 "$openpype_root/setup.py" build_sphinx } main diff --git a/tools/run_project_manager.ps1 b/tools/run_project_manager.ps1 index 9886a80316..a9cfbb1e7b 100644 --- a/tools/run_project_manager.ps1 +++ b/tools/run_project_manager.ps1 @@ -47,7 +47,7 @@ Set-Location -Path $openpype_root Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -56,5 +56,5 @@ if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { Write-Host "OK" -ForegroundColor Green } -& poetry run python "$($openpype_root)\start.py" projectmanager +& "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\start.py" projectmanager Set-Location -Path $current_dir diff --git a/tools/run_projectmanager.sh b/tools/run_projectmanager.sh index 312f321d67..b5c858c34a 100755 --- a/tools/run_projectmanager.sh +++ b/tools/run_projectmanager.sh @@ -79,11 +79,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" pushd "$openpype_root" > /dev/null || return > /dev/null @@ -97,7 +95,7 @@ main () { fi echo -e "${BIGreen}>>>${RST} Generating zip from current sources ..." - poetry run python "$openpype_root/start.py" projectmanager + "$POETRY_HOME/bin/poetry" run python "$openpype_root/start.py" projectmanager } main diff --git a/tools/run_settings.ps1 b/tools/run_settings.ps1 index 7477e546b3..1c0aa6e8f3 100644 --- a/tools/run_settings.ps1 +++ b/tools/run_settings.ps1 @@ -27,7 +27,7 @@ Set-Location -Path $openpype_root Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -36,5 +36,5 @@ if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { Write-Host "OK" -ForegroundColor Green } -& poetry run python "$($openpype_root)\start.py" settings --dev +& "$env:POETRY_HOME\bin\poetry" run python "$($openpype_root)\start.py" settings --dev Set-Location -Path $current_dir \ No newline at end of file diff --git a/tools/run_settings.sh b/tools/run_settings.sh index 0287043bb6..5a465dce2c 100755 --- a/tools/run_settings.sh +++ b/tools/run_settings.sh @@ -79,11 +79,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" pushd "$openpype_root" > /dev/null || return > /dev/null @@ -97,7 +95,7 @@ main () { fi echo -e "${BIGreen}>>>${RST} Generating zip from current sources ..." - poetry run python3 "$openpype_root/start.py" settings --dev + "$POETRY_HOME/bin/poetry" run python3 "$openpype_root/start.py" settings --dev } main diff --git a/tools/run_tests.ps1 b/tools/run_tests.ps1 index a6882e2a09..e631cb72df 100644 --- a/tools/run_tests.ps1 +++ b/tools/run_tests.ps1 @@ -59,11 +59,9 @@ $openpype_root = (Get-Item $script_dir).parent.FullName $env:_INSIDE_OPENPYPE_TOOL = "1" -# make sure Poetry is in PATH if (-not (Test-Path 'env:POETRY_HOME')) { $env:POETRY_HOME = "$openpype_root\.poetry" } -$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" Set-Location -Path $openpype_root @@ -83,7 +81,7 @@ Write-Host " ] ..." -ForegroundColor white Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -102,7 +100,7 @@ Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Testing OpenPype ..." $original_pythonpath = $env:PYTHONPATH $env:PYTHONPATH="$($openpype_root);$($env:PYTHONPATH)" -& poetry run pytest -x --capture=sys --print -W ignore::DeprecationWarning "$($openpype_root)/tests" +& "$env:POETRY_HOME\bin\poetry" run pytest -x --capture=sys --print -W ignore::DeprecationWarning "$($openpype_root)/tests" $env:PYTHONPATH = $original_pythonpath Write-Host ">>> " -NoNewline -ForegroundColor green diff --git a/tools/run_tests.sh b/tools/run_tests.sh index 90977edc83..8f8f82fd9c 100755 --- a/tools/run_tests.sh +++ b/tools/run_tests.sh @@ -98,11 +98,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c" if [ -f "$POETRY_HOME/bin/poetry" ]; then @@ -118,7 +116,7 @@ main () { echo -e "${BIGreen}>>>${RST} Testing OpenPype ..." original_pythonpath=$PYTHONPATH export PYTHONPATH="$openpype_root:$PYTHONPATH" - poetry run pytest -x --capture=sys --print -W ignore::DeprecationWarning "$openpype_root/tests" + "$POETRY_HOME/bin/poetry" run pytest -x --capture=sys --print -W ignore::DeprecationWarning "$openpype_root/tests" PYTHONPATH=$original_pythonpath } diff --git a/tools/run_tray.ps1 b/tools/run_tray.ps1 index 533a791836..872c1524a6 100644 --- a/tools/run_tray.ps1 +++ b/tools/run_tray.ps1 @@ -26,7 +26,7 @@ Set-Location -Path $openpype_root Write-Host ">>> " -NoNewline -ForegroundColor Green Write-Host "Reading Poetry ... " -NoNewline -if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Write-Host "*** " -NoNewline -ForegroundColor Yellow Write-Host "We need to install Poetry create virtual env first ..." @@ -35,5 +35,5 @@ if (-not (Test-Path -PathType Container -Path "$openpype_root\.poetry\bin")) { Write-Host "OK" -ForegroundColor Green } -& poetry run python "$($openpype_root)\start.py" tray --debug +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" tray --debug Set-Location -Path $current_dir \ No newline at end of file diff --git a/tools/run_tray.sh b/tools/run_tray.sh index 339ff6f918..2eb9886063 100755 --- a/tools/run_tray.sh +++ b/tools/run_tray.sh @@ -56,11 +56,9 @@ main () { _inside_openpype_tool="1" - # make sure Poetry is in PATH if [[ -z $POETRY_HOME ]]; then export POETRY_HOME="$openpype_root/.poetry" fi - export PATH="$POETRY_HOME/bin:$PATH" echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c" if [ -f "$POETRY_HOME/bin/poetry" ]; then @@ -74,7 +72,7 @@ main () { pushd "$openpype_root" > /dev/null || return > /dev/null echo -e "${BIGreen}>>>${RST} Running OpenPype Tray with debug option ..." - poetry run python3 "$openpype_root/start.py" tray --debug + "$POETRY_HOME/bin/poetry" run python3 "$openpype_root/start.py" tray --debug } main \ No newline at end of file diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 5e64605271..41df9d4dc9 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -9,6 +9,10 @@ from Deadline.Scripting import RepositoryUtils, FileUtils def inject_openpype_environment(deadlinePlugin): + """ Pull env vars from OpenPype and push them to rendering process. + + Used for correct paths, configuration from OpenPype etc. + """ job = deadlinePlugin.GetJob() job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache @@ -73,6 +77,21 @@ def inject_openpype_environment(deadlinePlugin): raise +def inject_render_job_id(deadlinePlugin): + """Inject dependency ids to publish process as env var for validation.""" + print("inject_render_job_id start") + job = deadlinePlugin.GetJob() + job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache + + dependency_ids = job.JobDependencyIDs + print("dependency_ids {}".format(dependency_ids)) + render_job_ids = ",".join(dependency_ids) + + deadlinePlugin.SetProcessEnvironmentVariable("RENDER_JOB_IDS", + render_job_ids) + print("inject_render_job_id end") + + def pype_command_line(executable, arguments, workingDirectory): """Remap paths in comand line argument string. @@ -156,8 +175,7 @@ def __main__(deadlinePlugin): "render and publish.") if openpype_publish_job == '1': - print("Publish job, skipping inject.") - return + inject_render_job_id(deadlinePlugin) elif openpype_render_job == '1': inject_openpype_environment(deadlinePlugin) else: diff --git a/website/docs/admin_hosts_aftereffects.md b/website/docs/admin_hosts_aftereffects.md index dc43820465..e3a09058da 100644 --- a/website/docs/admin_hosts_aftereffects.md +++ b/website/docs/admin_hosts_aftereffects.md @@ -14,7 +14,7 @@ All of them are Project based, eg. each project could have different configurati Location: Settings > Project > AfterEffects -![Harmony Project Settings](assets/admin_hosts_aftereffects_settings.png) +![AfterEffects Project Settings](assets/admin_hosts_aftereffects_settings.png) ## Publish plugins diff --git a/website/docs/admin_hosts_tvpaint.md b/website/docs/admin_hosts_tvpaint.md new file mode 100644 index 0000000000..a99cd19010 --- /dev/null +++ b/website/docs/admin_hosts_tvpaint.md @@ -0,0 +1,30 @@ +--- +id: admin_hosts_tvpaint +title: TVPaint +sidebar_label: TVPaint +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Subset name templates +Definition of possibile subset name templates in TVPaint integration. + +### [Render Layer](artist_hosts_tvpaint#render-layer) +Render layer has additional keys for subset name template. It is possible to use **render_layer** and **render_pass**. + +- Key **render_layer** is alias for variant (user's input). +- For key **render_pass** is used predefined value `"Beauty"` (ATM value can't be changed). + +### [Render pass](artist_hosts_tvpaint#render-pass) +Render pass has additional keys for subset name template. It is possible to use **render_layer** and **render_pass**. +- Key **render_layer** is filled with value of **render_pass** from `renderLayer` group. +- Key **render_pass** is alias for variant (user's input). + +:::important Render Layer/Pass templates +It is recommended to use same subset name template for both **renderLayer** and **renderPass** families. +- Example template: `"{family}{Task}_{Render_layer}_{Render_pass}"` +::: + +### [Review](artist_hosts_tvpaint#review) and [Workfile](artist_hosts_tvpaint#workfile) +Families **review** and **workfile** are not manually created but are automatically generated during publishing. That's why it is recommended to not use **variant** key in their subset name template. diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 324e0e8481..1a91e2e7fe 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -21,6 +21,8 @@ openpype_console --use-version=3.0.0-foo+bar `--use-staging` - to use staging versions of OpenPype. +`--list-versions [--use-staging]` - to list available versions. + For more information [see here](admin_use#run-openpype). ## Commands diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 4a2b56e6f4..4ad08a0174 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -46,6 +46,16 @@ openpype_console --use-version=3.0.1 `--use-staging` - to specify you prefer staging version. In that case it will be used (if found) instead of production one. +:::tip List available versions +To list all available versions, use: + +```shell +openpype_console --list-versions +``` + +You can add `--use-staging` to list staging versions. +::: + ### Details When you run OpenPype from executable, few check are made: diff --git a/website/docs/artist_hosts_tvpaint.md b/website/docs/artist_hosts_tvpaint.md index 19cb615158..2e831e64d8 100644 --- a/website/docs/artist_hosts_tvpaint.md +++ b/website/docs/artist_hosts_tvpaint.md @@ -45,7 +45,7 @@ In TVPaint you can find the Tools in OpenPype menu extension. The OpenPype Tools ## Create -In TVPaint you can create and publish **[Reviews](#review)**, **[Render Passes](#render-pass)**, and **[Render Layers](#render-layer)**. +In TVPaint you can create and publish **[Reviews](#review)**, **[Workfile](#workfile)**, **[Render Passes](#render-pass)** and **[Render Layers](#render-layer)**. You have the possibility to organize your layers by using `Color group`. @@ -67,26 +67,13 @@ OpenPype specifically never tries to guess what you want to publish from the sce When you want to publish `review` or `render layer` or `render pass`, open the `Creator` through the Tools menu `Create` button. -### Review +### Review +`Review` renders the whole file as is and sends the resulting QuickTime to Ftrack. +- Is automatically created during publishing. -
-
- -`Review` renders the whole file as is and sends the resulting QuickTime to Ftrack. - -To create reviewable quicktime of your animation: - -- select `Review` in the `Creator` -- press `Create` -- When you run [publish](#publish), file will be rendered and converted to quicktime.` - -
-
- -![createreview](assets/tvp_create_review.png) - -
-
+### Workfile +`Workfile` stores the source workfile as is during publishing (e.g. for backup). +- Is automatically created during publishing. ### Render Layer diff --git a/website/docs/assets/ftrack/ftrack-collect-advanced.png b/website/docs/assets/ftrack/ftrack-collect-advanced.png new file mode 100644 index 0000000000..5249685c86 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-collect-advanced.png differ diff --git a/website/docs/assets/ftrack/ftrack-collect-main.png b/website/docs/assets/ftrack/ftrack-collect-main.png new file mode 100644 index 0000000000..7c75cd6269 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-collect-main.png differ diff --git a/website/docs/assets/tvp_create_review.png b/website/docs/assets/tvp_create_review.png deleted file mode 100644 index d6e9f63428..0000000000 Binary files a/website/docs/assets/tvp_create_review.png and /dev/null differ diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 2c4bd1e9af..b3e0c24fc2 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -137,12 +137,12 @@ $ pyenv install -v 3.7.10 $ cd /path/to/pype-3 # set local python version -$ pyenv local 3.7.9 +$ pyenv local 3.7.10 ``` :::note Install build requirements for **Ubuntu** ```shell -sudo apt-get update; sudo apt-get install --no-install-recommends make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev git +sudo apt-get update; sudo apt-get install --no-install-recommends make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev git patchelf ``` In case you run in error about `xcb` when running Pype, diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md index bd0dbaef4f..005270b3b9 100644 --- a/website/docs/module_ftrack.md +++ b/website/docs/module_ftrack.md @@ -34,7 +34,7 @@ To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Ftrack Event Server is the key to automation of many tasks like _status change_, _thumbnail update_, _automatic synchronization to Avalon database_ and many more. Event server should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with enough certainty. ### Running event server -There are specific launch arguments for event server. With `openpype eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. +There are specific launch arguments for event server. With `openpype_console eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. @@ -56,7 +56,7 @@ There are specific launch arguments for event server. With `openpype eventserver - `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_ - `--ftrack-events-path "//Paths/To/Events/"` : Paths to events folder. May contain multiple paths separated by `;`. _(it is not needed to enter if you have set `FTRACK_EVENTS_PATH` in OpenPype' environments)_ -So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype.exe eventserver`. +So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe eventserver`. @@ -100,14 +100,17 @@ Event server should **not** run more than once! It may cause major issues. - create file: - `sudo vi /opt/OpenPype/run_event_server.sh` + `sudo vi /opt/openpype/run_event_server.sh` - add content to the file: ```sh -#!\usr\bin\env +#!/usr/bin/env export OPENPYPE_DEBUG=3 pushd /mnt/pipeline/prod/openpype-setup -. openpype eventserver --ftrack-user --ftrack-api-key +. openpype_console eventserver --ftrack-user --ftrack-api-key ``` +- change file permission: + `sudo chmod 0755 /opt/openpype/run_event_server.sh` + - create service file: `sudo vi /etc/systemd/system/openpype-ftrack-event-server.service` - add content to the service file @@ -145,7 +148,7 @@ WantedBy=multi-user.target @echo off set OPENPYPE_DEBUG=3 pushd \\path\to\file\ -call openpype.bat eventserver --ftrack-user --ftrack-api-key +openpype_console.exe eventserver --ftrack-user --ftrack-api-key ``` - download and install `nssm.cc` - create Windows service according to nssm.cc manual @@ -196,7 +199,7 @@ Is used to remove value from `Avalon/Mongo Id` Custom Attribute when entity is c ### Sync status from Task to Parent -List of parent boject types where this is triggered ("Shot", "Asset build", etc. Skipped if it is empty) +List of parent object types where this is triggered ("Shot", "Asset build", etc. Skipped if it is empty) ### Sync status from Version to Task @@ -214,3 +217,29 @@ This is usefull for example if first version publish doesn't contain any actual ### Update status on next task Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From → To. Some status can be ignored. + +## Publish plugins + +### Collect Ftrack Family + +Reviews uploads to Ftrack could be configured by combination of hosts, families and task names. +(Currently implemented only in Standalone Publisher, Maya.) + +#### Profiles + +Profiles are used to select when to add Ftrack family to the instance. One or multiple profiles could be configured, Families, Task names (regex available), Host names combination is needed. + +Eg. If I want review created and uploaded to Ftrack for render published from Maya , setting is: + +Host names: 'Maya' +Families: 'render' +Add Ftrack Family: enabled + +![Collect Ftrack Family](assets/ftrack/ftrack-collect-main.png) + +#### Advanced adding if additional families present + +In special cases adding 'ftrack' based on main family ('Families' set higher) is not enough. +(For example upload to Ftrack for 'plate' main family should only happen if 'review' is contained in instance 'families', not added in other cases. ) + +![Collect Ftrack Family](assets/ftrack/ftrack-collect-advanced.png) \ No newline at end of file diff --git a/website/docs/project_settings/assets/global_extract_review_output_defs.png b/website/docs/project_settings/assets/global_extract_review_output_defs.png index ce3c00ca40..f4c1661b11 100644 Binary files a/website/docs/project_settings/assets/global_extract_review_output_defs.png and b/website/docs/project_settings/assets/global_extract_review_output_defs.png differ diff --git a/website/docs/project_settings/assets/global_tools_creator_subset_template.png b/website/docs/project_settings/assets/global_tools_creator_subset_template.png new file mode 100644 index 0000000000..c4e863c4e0 Binary files /dev/null and b/website/docs/project_settings/assets/global_tools_creator_subset_template.png differ diff --git a/website/docs/project_settings/settings_project_global.md b/website/docs/project_settings/settings_project_global.md index 5d23dd75e6..e6336c36e2 100644 --- a/website/docs/project_settings/settings_project_global.md +++ b/website/docs/project_settings/settings_project_global.md @@ -112,6 +112,10 @@ Profile may generate multiple outputs from a single input. Each output must defi | "-10% -200px" | 1800px 800px | | "-10% -0px" | 1800px 1000px | +- **`Overscan color`** + - Color of empty area caused by different aspect ratio of input and output. + - By default is set to black color. + - **`Letter Box`** - **Enabled** - Enable letter boxes - **Ratio** - Ratio of letter boxes @@ -124,6 +128,14 @@ Profile may generate multiple outputs from a single input. Each output must defi ![global_extract_review_letter_box_settings](assets/global_extract_review_letter_box_settings.png) ![global_extract_review_letter_box](assets/global_extract_review_letter_box.png) +- **`Background color`** + - Background color can be used for inputs with possible transparency (e.g. png sequence). + - Input's without possible alpha channel are ignored all the time (e.g. mov). + - Background color slows down rendering process. + - set alpha to `0` to not use this option at all (in most of cases background stays black) + - other than `0` alpha will draw color as background + + ### IntegrateAssetNew Saves information for all published subsets into DB, published assets are available for other hosts, tools and tasks after. @@ -160,6 +172,39 @@ Applicable context filters: ## Tools Settings for OpenPype tools. +## Creator +Settings related to [Creator tool](artist_tools.md#details). + +### Subset name profiles +![global_tools_creator_subset_template](assets/global_tools_creator_subset_template.png) + +Subset name helps to identify published content. More specific name helps with organization and avoid mixing of published content. Subset name is defined using one of templates defined in **Subset name profiles settings**. The template is filled with context information at the time of creation. + +Usage of template is defined by profile filtering using creator's family, host and task name. Profile without filters is used as default template and it is recommend to set default template. If default template is not available `"{family}{Task}"` is used. + +**Formatting keys** + +All templates can contain text and formatting keys **family**, **task** and **variant** e.g. `"MyStudio_{family}_{task}"` (example - not recommended in production). + +|Key|Description| +|---|---| +|family|Creators family| +|task|Task under which is creation triggered| +|variant|User input in creator tool| + +**Formatting keys have 3 variants with different letter capitalization.** + +|Task|Key variant|Description|Result| +|---|---|---|---| +|`bgAnim`|`{task}`|Keep original value as is.|`bgAnim`| +|`bgAnim`|`{Task}`|Capitalize first letter of value.|`BgAnim`| +|`bgAnim`|`{TASK}`|Each letter which be capitalized.|`BGANIM`| + +Template may look like `"{family}{Task}{Variant}"`. + +Some creators may have other keys as their context may require more information or more specific values. Make sure you've read documentation of host you're using. + + ## Workfiles All settings related to Workfile tool. diff --git a/website/sidebars.js b/website/sidebars.js index 59071ec34f..d38973e40f 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -90,7 +90,8 @@ module.exports = { "admin_hosts_maya", "admin_hosts_resolve", "admin_hosts_harmony", - "admin_hosts_aftereffects" + "admin_hosts_aftereffects", + "admin_hosts_tvpaint" ], }, { diff --git a/website/yarn.lock b/website/yarn.lock index 2d5ec103d4..a63bf37731 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2667,15 +2667,6 @@ cli-boxes@^2.2.1: resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== -clipboard@^2.0.0: - version "2.0.8" - resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.8.tgz#ffc6c103dd2967a83005f3f61976aa4655a4cdba" - integrity sha512-Y6WO0unAIQp5bLmk1zdThRhgJt/x3ks6f30s3oE3H1mgIEU33XyQjEf8gsf6DxC7NPX8Y1SsNWjUjL/ywLnnbQ== - dependencies: - good-listener "^1.2.2" - select "^1.1.2" - tiny-emitter "^2.0.0" - cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" @@ -3310,11 +3301,6 @@ del@^6.0.0: rimraf "^3.0.2" slash "^3.0.0" -delegate@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" - integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw== - depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" @@ -4224,13 +4210,6 @@ globby@^6.1.0: pify "^2.0.0" pinkie-promise "^2.0.0" -good-listener@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" - integrity sha1-1TswzfkxPf+33JoNR3CWqm0UXFA= - dependencies: - delegate "^3.1.2" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -6615,11 +6594,9 @@ prism-react-renderer@^1.1.1: integrity sha512-GHqzxLYImx1iKN1jJURcuRoA/0ygCcNhfGw1IT8nPIMzarmKQ3Nc+JcG0gi8JXQzuh0C5ShE4npMIoqNin40hg== prismjs@^1.23.0: - version "1.23.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.23.0.tgz#d3b3967f7d72440690497652a9d40ff046067f33" - integrity sha512-c29LVsqOaLbBHuIbsTxaKENh1N2EQBOHaWv7gkHN4dgRbxSREqDnDbtFJYdpPauS4YCplMSNCABQ6Eeor69bAA== - optionalDependencies: - clipboard "^2.0.0" + version "1.24.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.0.tgz#0409c30068a6c52c89ef7f1089b3ca4de56be2ac" + integrity sha512-SqV5GRsNqnzCL8k5dfAjCNhUrF3pR0A9lTDSCUZeh/LIshheXJEaP0hwLz2t4XHivd2J/v2HR+gRnigzeKe3cQ== process-nextick-args@~2.0.0: version "2.0.1" @@ -7390,11 +7367,6 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= -select@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" - integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= - selfsigned@^1.10.8: version "1.10.8" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.8.tgz#0d17208b7d12c33f8eac85c41835f27fc3d81a30" @@ -8016,11 +7988,6 @@ timsort@^0.3.0: resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= -tiny-emitter@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" - integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== - tiny-invariant@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875"