mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into enhancement/OP-2825_attach-loaded-containers
This commit is contained in:
commit
b30a95a991
215 changed files with 4781 additions and 2401 deletions
2
.github/workflows/prerelease.yml
vendored
2
.github/workflows/prerelease.yml
vendored
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
|
|||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
|
|
@ -39,7 +39,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]},"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
@ -81,7 +81,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: ${{ steps.version.outputs.last_release }}
|
||||
|
|
|
|||
51
CHANGELOG.md
51
CHANGELOG.md
|
|
@ -1,67 +1,66 @@
|
|||
# Changelog
|
||||
|
||||
## [3.9.0-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.9.0-nightly.7](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.2...HEAD)
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845)
|
||||
- Houdini: Remove unused code [\#2779](https://github.com/pypeclub/OpenPype/pull/2779)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Documentation: fixed broken links [\#2799](https://github.com/pypeclub/OpenPype/pull/2799)
|
||||
- Documentation: broken link fix [\#2785](https://github.com/pypeclub/OpenPype/pull/2785)
|
||||
- Documentation: link fixes [\#2772](https://github.com/pypeclub/OpenPype/pull/2772)
|
||||
- Update docusaurus to latest version [\#2760](https://github.com/pypeclub/OpenPype/pull/2760)
|
||||
- Various testing updates [\#2726](https://github.com/pypeclub/OpenPype/pull/2726)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841)
|
||||
- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836)
|
||||
- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822)
|
||||
- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817)
|
||||
- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812)
|
||||
- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811)
|
||||
- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805)
|
||||
- General: Set context environments for non host applications [\#2803](https://github.com/pypeclub/OpenPype/pull/2803)
|
||||
- Tray publisher: New Tray Publisher host \(beta\) [\#2778](https://github.com/pypeclub/OpenPype/pull/2778)
|
||||
- Houdini: Implement Reset Frame Range [\#2770](https://github.com/pypeclub/OpenPype/pull/2770)
|
||||
- Pyblish Pype: Remove redundant new line in installed fonts printing [\#2758](https://github.com/pypeclub/OpenPype/pull/2758)
|
||||
- Flame: use Shot Name on segment for asset name [\#2751](https://github.com/pypeclub/OpenPype/pull/2751)
|
||||
- Flame: adding validator source clip [\#2746](https://github.com/pypeclub/OpenPype/pull/2746)
|
||||
- Ftrack: Disable ftrack module by default [\#2732](https://github.com/pypeclub/OpenPype/pull/2732)
|
||||
- RoyalRender: Minor enhancements [\#2700](https://github.com/pypeclub/OpenPype/pull/2700)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852)
|
||||
- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851)
|
||||
- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842)
|
||||
- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840)
|
||||
- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832)
|
||||
- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828)
|
||||
- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827)
|
||||
- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825)
|
||||
- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824)
|
||||
- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821)
|
||||
- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820)
|
||||
- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819)
|
||||
- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816)
|
||||
- Settings UI: Search case sensitivity [\#2810](https://github.com/pypeclub/OpenPype/pull/2810)
|
||||
- Flame Babypublisher optimalization [\#2806](https://github.com/pypeclub/OpenPype/pull/2806)
|
||||
- resolve: fixing fusion module loading [\#2802](https://github.com/pypeclub/OpenPype/pull/2802)
|
||||
- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800)
|
||||
- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798)
|
||||
- Flame: Fix version string in default settings [\#2783](https://github.com/pypeclub/OpenPype/pull/2783)
|
||||
- After Effects: Fix typo in name `afftereffects` -\> `aftereffects` [\#2768](https://github.com/pypeclub/OpenPype/pull/2768)
|
||||
- Avoid renaming udim indexes [\#2765](https://github.com/pypeclub/OpenPype/pull/2765)
|
||||
- Maya: Fix `unique\_namespace` when in an namespace that is empty [\#2759](https://github.com/pypeclub/OpenPype/pull/2759)
|
||||
- Loader UI: Fix right click in representation widget [\#2757](https://github.com/pypeclub/OpenPype/pull/2757)
|
||||
- Aftereffects 2022 and Deadline [\#2748](https://github.com/pypeclub/OpenPype/pull/2748)
|
||||
- Flame: bunch of bugs [\#2745](https://github.com/pypeclub/OpenPype/pull/2745)
|
||||
- Maya: Save current scene on workfile publish [\#2744](https://github.com/pypeclub/OpenPype/pull/2744)
|
||||
- Version Up: Preserve parts of filename after version number \(like subversion\) on version\_up [\#2741](https://github.com/pypeclub/OpenPype/pull/2741)
|
||||
- Maya: Remove some unused code [\#2709](https://github.com/pypeclub/OpenPype/pull/2709)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839)
|
||||
- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829)
|
||||
- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823)
|
||||
- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819)
|
||||
- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800)
|
||||
- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798)
|
||||
- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818)
|
||||
- Ftrack: Moved module one hierarchy level higher [\#2792](https://github.com/pypeclub/OpenPype/pull/2792)
|
||||
- SyncServer: Moved module one hierarchy level higher [\#2791](https://github.com/pypeclub/OpenPype/pull/2791)
|
||||
- Royal render: Move module one hierarchy level higher [\#2790](https://github.com/pypeclub/OpenPype/pull/2790)
|
||||
- Deadline: Move module one hierarchy level higher [\#2789](https://github.com/pypeclub/OpenPype/pull/2789)
|
||||
- Houdini: Remove duplicate ValidateOutputNode plug-in [\#2780](https://github.com/pypeclub/OpenPype/pull/2780)
|
||||
- Slack: Added regex for filtering on subset names [\#2775](https://github.com/pypeclub/OpenPype/pull/2775)
|
||||
- Houdini: Fix open last workfile [\#2767](https://github.com/pypeclub/OpenPype/pull/2767)
|
||||
- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766)
|
||||
- Harmony: Rendering in Deadline didn't work in other machines than submitter [\#2754](https://github.com/pypeclub/OpenPype/pull/2754)
|
||||
- Houdini: Move Houdini Save Current File to beginning of ExtractorOrder [\#2747](https://github.com/pypeclub/OpenPype/pull/2747)
|
||||
- Maya: set Deadline job/batch name to original source workfile name instead of published workfile [\#2733](https://github.com/pypeclub/OpenPype/pull/2733)
|
||||
|
||||
## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import platform
|
|||
import functools
|
||||
import logging
|
||||
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .settings import get_project_settings
|
||||
from .lib import (
|
||||
Anatomy,
|
||||
|
|
@ -58,10 +59,15 @@ def patched_discover(superclass):
|
|||
"""
|
||||
# run original discover and get plugins
|
||||
plugins = _original_discover(superclass)
|
||||
filtered_plugins = [
|
||||
plugin
|
||||
for plugin in plugins
|
||||
if issubclass(plugin, superclass)
|
||||
]
|
||||
|
||||
set_plugin_attributes_from_settings(plugins, superclass)
|
||||
set_plugin_attributes_from_settings(filtered_plugins, superclass)
|
||||
|
||||
return plugins
|
||||
return filtered_plugins
|
||||
|
||||
|
||||
@import_wrapper
|
||||
|
|
@ -113,7 +119,7 @@ def install():
|
|||
|
||||
pyblish.register_plugin_path(path)
|
||||
avalon.register_plugin_path(avalon.Loader, path)
|
||||
avalon.register_plugin_path(avalon.Creator, path)
|
||||
avalon.register_plugin_path(LegacyCreator, path)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, path)
|
||||
|
||||
# apply monkey patched discover to original one
|
||||
|
|
|
|||
|
|
@ -45,9 +45,6 @@ from .lib.avalon_context import (
|
|||
from . import resources
|
||||
|
||||
from .plugin import (
|
||||
PypeCreatorMixin,
|
||||
Creator,
|
||||
|
||||
Extractor,
|
||||
|
||||
ValidatePipelineOrder,
|
||||
|
|
@ -89,9 +86,6 @@ __all__ = [
|
|||
# Resources
|
||||
"resources",
|
||||
|
||||
# Pype creator mixin
|
||||
"PypeCreatorMixin",
|
||||
"Creator",
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
# ordering
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from avalon import io, pipeline
|
|||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.aftereffects
|
||||
|
||||
from .launch_logic import get_stub
|
||||
|
|
@ -66,7 +67,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -79,7 +80,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -1,13 +1,12 @@
|
|||
from avalon.api import CreatorError
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(openpype.api.Creator):
|
||||
class CreateRender(create.LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<description>
|
||||
## Invalid subset context
|
||||
|
||||
Context of the given subset doesn't match your current scene.
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with "repair" button on the right.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are reuse old workfile and open it in different context.
|
||||
(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.)
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid scene setting found
|
||||
|
||||
One of the settings in a scene doesn't match to asset settings in database.
|
||||
|
||||
{invalid_setting_str}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database.
|
||||
Either value in the database or in the scene is wrong.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="file_not_found">
|
||||
<title>Scene file doesn't exist</title>
|
||||
<description>
|
||||
## Scene file doesn't exist
|
||||
|
||||
Collected scene {scene_url} doesn't exist.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Re-save file, start publish from the beginning again.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
from avalon import api
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
|
|
@ -53,9 +54,8 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
|||
current_asset = api.Session["AVALON_ASSET"]
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
f"as current context {current_asset}. PLEASE DO:\n"
|
||||
f"Repair with 'A' action to use '{current_asset}'.\n"
|
||||
f"If that's not correct value, close workfile and "
|
||||
f"reopen via Workfiles!"
|
||||
f"as current context {current_asset}."
|
||||
)
|
||||
assert instance_asset == current_asset, msg
|
||||
|
||||
if instance_asset != current_asset:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import re
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.aftereffects.api import get_asset_settings
|
||||
|
||||
|
||||
|
|
@ -99,12 +100,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
self.log.info("current_settings:: {}".format(current_settings))
|
||||
|
||||
invalid_settings = []
|
||||
invalid_keys = set()
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
invalid_settings.append(
|
||||
"{} expected: {} found: {}".format(key, value,
|
||||
current_settings[key])
|
||||
)
|
||||
invalid_keys.add(key)
|
||||
|
||||
if ((expected_settings.get("handleStart")
|
||||
or expected_settings.get("handleEnd"))
|
||||
|
|
@ -116,7 +119,27 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
msg = "Found invalid settings:\n{}".format(
|
||||
"\n".join(invalid_settings)
|
||||
)
|
||||
assert not invalid_settings, msg
|
||||
assert os.path.exists(instance.data.get("source")), (
|
||||
"Scene file not found (saved under wrong name)"
|
||||
)
|
||||
|
||||
if invalid_settings:
|
||||
invalid_keys_str = ",".join(invalid_keys)
|
||||
break_str = "<br/>"
|
||||
invalid_setting_str = "<b>Found invalid settings:</b><br/>{}".\
|
||||
format(break_str.join(invalid_settings))
|
||||
|
||||
formatting_data = {
|
||||
"invalid_setting_str": invalid_setting_str,
|
||||
"invalid_keys_str": invalid_keys_str
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
if not os.path.exists(instance.data.get("source")):
|
||||
scene_url = instance.data.get("source")
|
||||
msg = "Scene file {} not found (saved under wrong name)".format(
|
||||
scene_url
|
||||
)
|
||||
formatting_data = {
|
||||
"scene_url": scene_url
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg, key="file_not_found",
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import avalon.api
|
|||
from avalon import io, schema
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.api import Logger
|
||||
import openpype.hosts.blender
|
||||
|
||||
|
|
@ -46,7 +47,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, str(LOAD_PATH))
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, str(CREATE_PATH))
|
||||
avalon.api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
|
||||
lib.append_user_scripts()
|
||||
|
||||
|
|
@ -67,7 +68,7 @@ def uninstall():
|
|||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, str(LOAD_PATH))
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, str(CREATE_PATH))
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
|
||||
if not IS_HEADLESS:
|
||||
ops.unregister()
|
||||
|
|
@ -202,13 +203,10 @@ def reload_pipeline(*args):
|
|||
avalon.api.uninstall()
|
||||
|
||||
for module in (
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.tools.creator.app",
|
||||
"avalon.tools.manager.app",
|
||||
"avalon.api",
|
||||
"avalon.tools",
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.api",
|
||||
):
|
||||
module = importlib.import_module(module)
|
||||
importlib.reload(module)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
import avalon.api
|
||||
from openpype.api import PypeCreatorMixin
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .pipeline import AVALON_CONTAINERS
|
||||
from .ops import (
|
||||
MainThreadItem,
|
||||
|
|
@ -129,7 +129,7 @@ def deselect_all():
|
|||
bpy.context.view_layer.objects.active = active
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, avalon.api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Base class for Creator plug-ins."""
|
||||
defaults = ['Main']
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import bpy
|
|||
|
||||
from avalon import api
|
||||
from openpype import lib
|
||||
from openpype.pipeline import legacy_create
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
|
|
@ -159,7 +160,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||
"not found.")
|
||||
|
||||
api.create(
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=local_obj.name.split(':')[-1] + "_animation",
|
||||
asset=asset,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Dict, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype import lib
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_INSTANCES,
|
||||
AVALON_CONTAINERS,
|
||||
|
|
@ -118,7 +117,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
# raise ValueError("Creator plugin \"CreateCamera\" was "
|
||||
# "not found.")
|
||||
|
||||
# api.create(
|
||||
# legacy_create(
|
||||
# creator_plugin,
|
||||
# name="camera",
|
||||
# # name=f"{unique_number}_{subset}_animation",
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import bpy
|
|||
from avalon import api
|
||||
from avalon.blender import lib as avalon_lib
|
||||
from openpype import lib
|
||||
from openpype.pipeline import legacy_create
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
|
|
@ -248,7 +249,7 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
animation_asset = options.get('animation_asset')
|
||||
|
||||
api.create(
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=namespace + "_animation",
|
||||
# name=f"{unique_number}_{subset}_animation",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,10 @@ class ExtractCamera(api.Extractor):
|
|||
filepath=filepath,
|
||||
use_active_collection=False,
|
||||
use_selection=True,
|
||||
bake_anim_use_nla_strips=False,
|
||||
bake_anim_use_all_actions=False,
|
||||
add_leaf_bones=False,
|
||||
armature_nodetype='ROOT',
|
||||
object_types={'CAMERA'},
|
||||
bake_anim_simplify_factor=0.0
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon.vendor import requests
|
||||
import re
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from avalon import api as avalon
|
|||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .lib import (
|
||||
set_segment_data_marker,
|
||||
set_publish_attribute,
|
||||
|
|
@ -33,7 +34,7 @@ def install():
|
|||
pyblish.register_host("flame")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
log.info("OpenPype Flame plug-ins registred ...")
|
||||
|
||||
|
|
@ -48,7 +49,7 @@ def uninstall():
|
|||
log.info("Deregistering Flame plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
|
|||
|
|
@ -2,11 +2,12 @@ import os
|
|||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from avalon.vendor import qargparse
|
||||
from xml.etree import ElementTree as ET
|
||||
import six
|
||||
import qargparse
|
||||
from Qt import QtWidgets, QtCore
|
||||
import openpype.api as openpype
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype import style
|
||||
import avalon.api as avalon
|
||||
from . import (
|
||||
|
|
@ -299,7 +300,7 @@ class Spacer(QtWidgets.QWidget):
|
|||
self.setLayout(layout)
|
||||
|
||||
|
||||
class Creator(openpype.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator class wrapper
|
||||
"""
|
||||
clip_color = constants.COLOR_MAP["purple"]
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import avalon.api
|
|||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.fusion
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -63,7 +64,7 @@ def install():
|
|||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -87,7 +88,7 @@ def uninstall():
|
|||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(
|
||||
avalon.api.InventoryAction, INVENTORY_PATH
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import create
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class CreateOpenEXRSaver(openpype.api.Creator):
|
||||
class CreateOpenEXRSaver(create.LegacyCreator):
|
||||
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
|
|
|
|||
|
|
@ -5,11 +5,12 @@ import logging
|
|||
|
||||
# Pipeline imports
|
||||
import avalon.api
|
||||
from avalon import io, pipeline
|
||||
from avalon import io
|
||||
|
||||
from openpype.lib import version_up
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.hosts.fusion.api import lib
|
||||
from openpype.lib.avalon_context import get_workdir_from_session
|
||||
|
||||
log = logging.getLogger("Update Slap Comp")
|
||||
|
||||
|
|
@ -44,16 +45,6 @@ def _format_version_folder(folder):
|
|||
return version_folder
|
||||
|
||||
|
||||
def _get_work_folder(session):
|
||||
"""Convenience function to get the work folder path of the current asset"""
|
||||
|
||||
# Get new filename, create path based on asset and work template
|
||||
template_work = self._project["config"]["template"]["work"]
|
||||
work_path = pipeline._format_work_template(template_work, session)
|
||||
|
||||
return os.path.normpath(work_path)
|
||||
|
||||
|
||||
def _get_fusion_instance():
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
if fusion is None:
|
||||
|
|
@ -72,7 +63,7 @@ def _format_filepath(session):
|
|||
asset = session["AVALON_ASSET"]
|
||||
|
||||
# Save updated slap comp
|
||||
work_path = _get_work_folder(session)
|
||||
work_path = get_workdir_from_session(session)
|
||||
walk_to_dir = os.path.join(work_path, "scenes", "slapcomp")
|
||||
slapcomp_dir = os.path.abspath(walk_to_dir)
|
||||
|
||||
|
|
@ -112,7 +103,7 @@ def _update_savers(comp, session):
|
|||
None
|
||||
"""
|
||||
|
||||
new_work = _get_work_folder(session)
|
||||
new_work = get_workdir_from_session(session)
|
||||
renders = os.path.join(new_work, "renders")
|
||||
version_folder = _format_version_folder(renders)
|
||||
renders_version = os.path.join(renders, version_folder)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from Qt import QtWidgets
|
||||
from avalon.vendor import qtawesome
|
||||
import qtawesome
|
||||
from openpype.hosts.fusion.api import get_current_comp
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,11 +5,12 @@ import logging
|
|||
from Qt import QtWidgets, QtCore
|
||||
|
||||
import avalon.api
|
||||
from avalon import io, pipeline
|
||||
from avalon.vendor import qtawesome as qta
|
||||
from avalon import io
|
||||
import qtawesome as qta
|
||||
|
||||
from openpype import style
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.lib.avalon_context import get_workdir_from_session
|
||||
|
||||
log = logging.getLogger("Fusion Switch Shot")
|
||||
|
||||
|
|
@ -123,7 +124,7 @@ class App(QtWidgets.QWidget):
|
|||
|
||||
def _on_open_from_dir(self):
|
||||
|
||||
start_dir = self._get_context_directory()
|
||||
start_dir = get_workdir_from_session()
|
||||
comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
|
||||
self, "Choose comp", start_dir)
|
||||
|
||||
|
|
@ -157,17 +158,6 @@ class App(QtWidgets.QWidget):
|
|||
import colorbleed.scripts.fusion_switch_shot as switch_shot
|
||||
switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
|
||||
|
||||
def _get_context_directory(self):
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": avalon.api.Session["AVALON_PROJECT"]},
|
||||
projection={"config": True})
|
||||
|
||||
template = project["config"]["template"]["work"]
|
||||
dir = pipeline._format_work_template(template, avalon.api.Session)
|
||||
|
||||
return dir
|
||||
|
||||
def collect_slap_comps(self, directory):
|
||||
items = glob.glob("{}/*.comp".format(directory))
|
||||
return items
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ def zip_and_move(source, destination):
|
|||
log.debug(f"Saved '{source}' to '{destination}'")
|
||||
|
||||
|
||||
def show(module_name):
|
||||
def show(tool_name):
|
||||
"""Call show on "module_name".
|
||||
|
||||
This allows to make a QApplication ahead of time and always "exec_" to
|
||||
|
|
@ -375,13 +375,6 @@ def show(module_name):
|
|||
# requests to be received properly.
|
||||
time.sleep(1)
|
||||
|
||||
# Get tool name from module name
|
||||
# TODO this is for backwards compatibility not sure if `TB_sceneOpened.js`
|
||||
# is automatically updated.
|
||||
# Previous javascript sent 'module_name' which contained whole tool import
|
||||
# string e.g. "avalon.tools.workfiles" now it should be only "workfiles"
|
||||
tool_name = module_name.split(".")[-1]
|
||||
|
||||
kwargs = {}
|
||||
if tool_name == "loader":
|
||||
kwargs["use_context"] = True
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import avalon.api
|
|||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
from openpype import lib
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.harmony
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
|
||||
|
|
@ -179,7 +180,7 @@ def install():
|
|||
pyblish.api.register_host("harmony")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
# Register callbacks.
|
||||
|
|
@ -193,7 +194,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import avalon.api
|
||||
from openpype.api import PypeCreatorMixin
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, avalon.api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator plugin to create instances in Harmony.
|
||||
|
||||
By default a Composite node is created to support any number of nodes in
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Missing audio file</title>
|
||||
<description>
|
||||
## Cannot locate linked audio file
|
||||
|
||||
Audio file at {audio_url} cannot be found.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Copy audio file to the highlighted location or remove audio link in the workfile.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<description>
|
||||
## Invalid subset context
|
||||
|
||||
Asset name found '{found}' in subsets, expected '{expected}'.
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with `Repair` button on the right. This will use '{expected}' asset name and overwrite '{found}' asset name in scene metadata.
|
||||
|
||||
After that restart `Publish` with a `Reload button`.
|
||||
|
||||
If this is unwanted, close workfile and open again, that way different asset value would be used for context information.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are reuse old workfile and open it in different context.
|
||||
(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.)
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid scene setting found
|
||||
|
||||
One of the settings in a scene doesn't match to asset settings in database.
|
||||
|
||||
{invalid_setting_str}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database.
|
||||
Either value in the database or in the scene is wrong.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="file_not_found">
|
||||
<title>Scene file doesn't exist</title>
|
||||
<description>
|
||||
## Scene file doesn't exist
|
||||
|
||||
Collected scene {scene_url} doesn't exist.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Re-save file, start publish from the beginning again.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -4,6 +4,8 @@ import pyblish.api
|
|||
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateAudio(pyblish.api.InstancePlugin):
|
||||
"""Ensures that there is an audio file in the scene.
|
||||
|
|
@ -42,4 +44,9 @@ class ValidateAudio(pyblish.api.InstancePlugin):
|
|||
|
||||
msg = "You are missing audio file:\n{}".format(audio_path)
|
||||
|
||||
assert os.path.isfile(audio_path), msg
|
||||
formatting_data = {
|
||||
"audio_url": audio_path
|
||||
}
|
||||
if os.path.isfile(audio_path):
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
|
||||
|
||||
|
|
@ -45,4 +46,11 @@ class ValidateInstance(pyblish.api.InstancePlugin):
|
|||
"Instance asset is not the same as current asset:"
|
||||
f"\nInstance: {instance_asset}\nCurrent: {current_asset}"
|
||||
)
|
||||
assert instance_asset == current_asset, msg
|
||||
|
||||
formatting_data = {
|
||||
"found": instance_asset,
|
||||
"expected": current_asset
|
||||
}
|
||||
if instance_asset != current_asset:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import re
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.harmony.api as harmony
|
||||
import openpype.hosts.harmony
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateSceneSettingsRepair(pyblish.api.Action):
|
||||
|
|
@ -19,12 +19,12 @@ class ValidateSceneSettingsRepair(pyblish.api.Action):
|
|||
|
||||
def process(self, context, plugin):
|
||||
"""Repair action entry point."""
|
||||
expected = openpype.hosts.harmony.api.get_asset_settings()
|
||||
expected = harmony.get_asset_settings()
|
||||
asset_settings = _update_frames(dict.copy(expected))
|
||||
asset_settings["frameStart"] = 1
|
||||
asset_settings["frameEnd"] = asset_settings["frameEnd"] + \
|
||||
asset_settings["handleEnd"]
|
||||
openpype.hosts.harmony.api.set_scene_settings(asset_settings)
|
||||
harmony.set_scene_settings(asset_settings)
|
||||
if not os.path.exists(context.data["scenePath"]):
|
||||
self.log.info("correcting scene name")
|
||||
scene_dir = os.path.dirname(context.data["currentFile"])
|
||||
|
|
@ -55,7 +55,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
expected_settings = openpype.hosts.harmony.api.get_asset_settings()
|
||||
expected_settings = harmony.get_asset_settings()
|
||||
self.log.info("scene settings from DB:".format(expected_settings))
|
||||
|
||||
expected_settings = _update_frames(dict.copy(expected_settings))
|
||||
|
|
@ -102,13 +102,13 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
self.log.debug("current scene settings {}".format(current_settings))
|
||||
|
||||
invalid_settings = []
|
||||
invalid_keys = set()
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
invalid_settings.append({
|
||||
"name": key,
|
||||
"expected": value,
|
||||
"current": current_settings[key]
|
||||
})
|
||||
invalid_settings.append(
|
||||
"{} expected: {} found: {}".format(key, value,
|
||||
current_settings[key]))
|
||||
invalid_keys.add(key)
|
||||
|
||||
if ((expected_settings["handleStart"]
|
||||
or expected_settings["handleEnd"])
|
||||
|
|
@ -120,10 +120,30 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
msg = "Found invalid settings:\n{}".format(
|
||||
json.dumps(invalid_settings, sort_keys=True, indent=4)
|
||||
)
|
||||
assert not invalid_settings, msg
|
||||
assert os.path.exists(instance.context.data.get("scenePath")), (
|
||||
"Scene file not found (saved under wrong name)"
|
||||
)
|
||||
|
||||
if invalid_settings:
|
||||
invalid_keys_str = ",".join(invalid_keys)
|
||||
break_str = "<br/>"
|
||||
invalid_setting_str = "<b>Found invalid settings:</b><br/>{}".\
|
||||
format(break_str.join(invalid_settings))
|
||||
|
||||
formatting_data = {
|
||||
"invalid_setting_str": invalid_setting_str,
|
||||
"invalid_keys_str": invalid_keys_str
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
scene_url = instance.context.data.get("scenePath")
|
||||
if not os.path.exists(scene_url):
|
||||
msg = "Scene file {} not found (saved under wrong name)".format(
|
||||
scene_url
|
||||
)
|
||||
formatting_data = {
|
||||
"scene_url": scene_url
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg, key="file_not_found",
|
||||
formatting_data=formatting_data)
|
||||
|
||||
|
||||
def _update_frames(expected_settings):
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from avalon import api as avalon
|
|||
from avalon import schema
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.tools.utils import host_tools
|
||||
from . import lib, menu, events
|
||||
|
||||
|
|
@ -45,7 +46,7 @@ def install():
|
|||
pyblish.register_host("hiero")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
@ -67,7 +68,7 @@ def uninstall():
|
|||
pyblish.deregister_host("hiero")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import re
|
||||
import os
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import hiero
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
from avalon.vendor import qargparse
|
||||
import qargparse
|
||||
import avalon.api as avalon
|
||||
import openpype.api as openpype
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from . import lib
|
||||
from copy import deepcopy
|
||||
|
||||
log = openpype.Logger().get_logger(__name__)
|
||||
|
||||
|
|
@ -589,7 +592,7 @@ class ClipLoader:
|
|||
return track_item
|
||||
|
||||
|
||||
class Creator(openpype.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator class wrapper
|
||||
"""
|
||||
clip_color = "Purple"
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import avalon.api
|
|||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.lib import find_submodule
|
||||
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.houdini
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
|
@ -48,7 +49,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
# avalon.on("init", on_init)
|
||||
|
|
|
|||
|
|
@ -2,11 +2,12 @@
|
|||
"""Houdini specific Avalon/Pyblish plugin definitions."""
|
||||
import sys
|
||||
import six
|
||||
import avalon.api
|
||||
from avalon.api import CreatorError
|
||||
|
||||
import hou
|
||||
from openpype.api import PypeCreatorMixin
|
||||
from openpype.pipeline import (
|
||||
CreatorError,
|
||||
LegacyCreator
|
||||
)
|
||||
from .lib import imprint
|
||||
|
||||
|
||||
|
|
@ -14,7 +15,7 @@ class OpenPypeCreatorError(CreatorError):
|
|||
pass
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, avalon.api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator plugin to create instances in Houdini
|
||||
|
||||
To support the wide range of node types for render output (Alembic, VDB,
|
||||
|
|
|
|||
|
|
@ -37,17 +37,17 @@ class ToolWindows:
|
|||
|
||||
|
||||
def edit_shader_definitions():
|
||||
from avalon.tools import lib
|
||||
from Qt import QtWidgets
|
||||
from openpype.hosts.maya.api.shader_definition_editor import (
|
||||
ShaderDefinitionsEditor
|
||||
)
|
||||
from openpype.tools.utils import qt_app_context
|
||||
|
||||
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
main_window = next(widget for widget in top_level_widgets
|
||||
if widget.objectName() == "MayaWindow")
|
||||
|
||||
with lib.application():
|
||||
with qt_app_context():
|
||||
window = ToolWindows.get_window("shader_definition_editor")
|
||||
if not window:
|
||||
window = ShaderDefinitionsEditor(parent=main_window)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ def install():
|
|||
return
|
||||
|
||||
def deferred():
|
||||
from avalon.tools import publish
|
||||
pyblish_icon = host_tools.get_pyblish_icon()
|
||||
parent_widget = get_main_window()
|
||||
cmds.menu(
|
||||
MENU_NAME,
|
||||
|
|
@ -80,7 +80,7 @@ def install():
|
|||
command=lambda *args: host_tools.show_publish(
|
||||
parent=parent_widget
|
||||
),
|
||||
image=publish.ICON
|
||||
image=pyblish_icon
|
||||
)
|
||||
|
||||
cmds.menuItem(
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import os
|
|||
import sys
|
||||
import errno
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
from maya import utils, cmds, OpenMaya
|
||||
import maya.api.OpenMaya as om
|
||||
|
|
@ -17,6 +16,7 @@ import openpype.hosts.maya
|
|||
from openpype.tools.utils import host_tools
|
||||
from openpype.lib import any_outdated
|
||||
from openpype.lib.path_tools import HostDirmap
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
from . import menu, lib
|
||||
|
||||
|
|
@ -50,7 +50,7 @@ def install():
|
|||
pyblish.api.register_host("maya")
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("maya")
|
||||
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(
|
||||
avalon.api.InventoryAction, INVENTORY_PATH
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,10 +2,11 @@ import os
|
|||
|
||||
from maya import cmds
|
||||
|
||||
import qargparse
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import qargparse
|
||||
from openpype.api import PypeCreatorMixin
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from openpype.pipeline import LegacyCreator
|
||||
|
||||
from .pipeline import containerise
|
||||
from . import lib
|
||||
|
|
@ -78,7 +79,7 @@ def get_reference_node_parents(ref):
|
|||
return parents
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
defaults = ['Main']
|
||||
|
||||
def process(self):
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ from openpype.api import (
|
|||
get_project_settings,
|
||||
get_asset)
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import CreatorError
|
||||
|
||||
from avalon.api import Session
|
||||
from avalon.api import CreatorError
|
||||
|
||||
|
||||
class CreateRender(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -19,10 +19,10 @@ from openpype.api import (
|
|||
get_project_settings
|
||||
)
|
||||
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
from avalon.api import Session
|
||||
from avalon.api import CreatorError
|
||||
|
||||
|
||||
class CreateVRayScene(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from avalon import api
|
|||
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.lib import get_creator_by_name
|
||||
from openpype.pipeline import legacy_create
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
|
@ -158,7 +159,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
creator_plugin = get_creator_by_name(self.animation_creator_name)
|
||||
with maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
api.create(
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=namespace,
|
||||
asset=asset,
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ class LoadVDBtoVRay(api.Loader):
|
|||
fname = files[0]
|
||||
else:
|
||||
# Sequence
|
||||
from avalon.vendor import clique
|
||||
import clique
|
||||
# todo: check support for negative frames as input
|
||||
collections, remainder = clique.assemble(files)
|
||||
assert len(collections) == 1, (
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import os
|
|||
import sys
|
||||
import json
|
||||
import tempfile
|
||||
import platform
|
||||
import contextlib
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
|
@ -64,10 +63,6 @@ def maketx(source, destination, *args):
|
|||
|
||||
maketx_path = get_oiio_tools_path("maketx")
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
# Ensure .exe extension
|
||||
maketx_path += ".exe"
|
||||
|
||||
if not os.path.exists(maketx_path):
|
||||
print(
|
||||
"OIIO tool not found in {}".format(maketx_path))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import six
|
||||
import platform
|
||||
import contextlib
|
||||
|
|
@ -679,10 +678,10 @@ def get_render_path(node):
|
|||
}
|
||||
|
||||
nuke_imageio_writes = get_created_node_imageio_setting(**data_preset)
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
|
||||
application = lib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
data.update({
|
||||
"application": application,
|
||||
"app": host_name,
|
||||
"nuke_imageio_writes": nuke_imageio_writes
|
||||
})
|
||||
|
||||
|
|
@ -805,18 +804,14 @@ def create_write_node(name, data, input=None, prenodes=None,
|
|||
'''
|
||||
|
||||
imageio_writes = get_created_node_imageio_setting(**data)
|
||||
app_manager = ApplicationManager()
|
||||
app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if app_name:
|
||||
app = app_manager.applications.get(app_name)
|
||||
|
||||
for knob in imageio_writes["knobs"]:
|
||||
if knob["name"] == "file_type":
|
||||
representation = knob["value"]
|
||||
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
try:
|
||||
data.update({
|
||||
"app": app.host_name,
|
||||
"app": host_name,
|
||||
"imageio_writes": imageio_writes,
|
||||
"representation": representation,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from openpype.api import (
|
|||
BuildWorkfile,
|
||||
get_current_project_settings
|
||||
)
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .command import viewer_update_and_undo_stop
|
||||
|
|
@ -98,7 +99,7 @@ def install():
|
|||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
|
|
@ -124,7 +125,7 @@ def uninstall():
|
|||
pyblish.deregister_host("nuke")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,8 @@ import nuke
|
|||
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import (
|
||||
get_current_project_settings,
|
||||
PypeCreatorMixin
|
||||
)
|
||||
from openpype.api import get_current_project_settings
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .lib import (
|
||||
Knobby,
|
||||
check_subsetname_exists,
|
||||
|
|
@ -20,7 +18,7 @@ from .lib import (
|
|||
)
|
||||
|
||||
|
||||
class OpenPypeCreator(PypeCreatorMixin, avalon.api.Creator):
|
||||
class OpenPypeCreator(LegacyCreator):
|
||||
"""Pype Nuke Creator class wrapper"""
|
||||
node_color = "0xdfea5dff"
|
||||
|
||||
|
|
@ -152,6 +150,7 @@ class ExporterReview(object):
|
|||
|
||||
"""
|
||||
data = None
|
||||
publish_on_farm = False
|
||||
|
||||
def __init__(self,
|
||||
klass,
|
||||
|
|
@ -210,6 +209,9 @@ class ExporterReview(object):
|
|||
if self.multiple_presets:
|
||||
repre["outputName"] = self.name
|
||||
|
||||
if self.publish_on_farm:
|
||||
repre["tags"].append("publish_on_farm")
|
||||
|
||||
self.data["representations"].append(repre)
|
||||
|
||||
def get_view_input_process_node(self):
|
||||
|
|
@ -446,6 +448,9 @@ class ExporterReviewMov(ExporterReview):
|
|||
return path
|
||||
|
||||
def generate_mov(self, farm=False, **kwargs):
|
||||
self.publish_on_farm = farm
|
||||
reformat_node_add = kwargs["reformat_node_add"]
|
||||
reformat_node_config = kwargs["reformat_node_config"]
|
||||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
bake_viewer_input_process_node = kwargs[
|
||||
"bake_viewer_input_process"]
|
||||
|
|
@ -483,6 +488,30 @@ class ExporterReviewMov(ExporterReview):
|
|||
self.previous_node = r_node
|
||||
self.log.debug("Read... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# add reformat node
|
||||
if reformat_node_add:
|
||||
# append reformated tag
|
||||
add_tags.append("reformated")
|
||||
|
||||
rf_node = nuke.createNode("Reformat")
|
||||
for kn_conf in reformat_node_config:
|
||||
_type = kn_conf["type"]
|
||||
k_name = str(kn_conf["name"])
|
||||
k_value = kn_conf["value"]
|
||||
|
||||
# to remove unicode as nuke doesn't like it
|
||||
if _type == "string":
|
||||
k_value = str(kn_conf["value"])
|
||||
|
||||
rf_node[k_name].setValue(k_value)
|
||||
|
||||
# connect
|
||||
rf_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes[subset].append(rf_node)
|
||||
self.previous_node = rf_node
|
||||
self.log.debug(
|
||||
"Reformat... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
if bake_viewer_process:
|
||||
if bake_viewer_input_process_node:
|
||||
|
|
@ -537,7 +566,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
# ---------- end nodes creation
|
||||
|
||||
# ---------- render or save to nk
|
||||
if farm:
|
||||
if self.publish_on_farm:
|
||||
nuke.scriptSave()
|
||||
path_nk = self.save_file()
|
||||
self.data.update({
|
||||
|
|
@ -547,11 +576,12 @@ class ExporterReviewMov(ExporterReview):
|
|||
})
|
||||
else:
|
||||
self.render(write_node.name())
|
||||
# ---------- generate representation data
|
||||
self.get_representation_data(
|
||||
tags=["review", "delete"] + add_tags,
|
||||
range=True
|
||||
)
|
||||
|
||||
# ---------- generate representation data
|
||||
self.get_representation_data(
|
||||
tags=["review", "delete"] + add_tags,
|
||||
range=True
|
||||
)
|
||||
|
||||
self.log.debug("Representation... `{}`".format(self.data))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import nuke
|
||||
from avalon.vendor import qargparse
|
||||
import qargparse
|
||||
from avalon import api, io
|
||||
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import re
|
||||
import nuke
|
||||
|
||||
from avalon.vendor import qargparse
|
||||
import qargparse
|
||||
from avalon import api, io
|
||||
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
import openpype
|
||||
from openpype.hosts.nuke.api import plugin
|
||||
|
|
@ -25,6 +26,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
task_type = instance.context.data["taskType"]
|
||||
subset = instance.data["subset"]
|
||||
self.log.info("Creating staging dir...")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -46,6 +48,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
for o_name, o_data in self.outputs.items():
|
||||
f_families = o_data["filter"]["families"]
|
||||
f_task_types = o_data["filter"]["task_types"]
|
||||
f_subsets = o_data["filter"]["sebsets"]
|
||||
|
||||
# test if family found in context
|
||||
test_families = any([
|
||||
|
|
@ -69,11 +72,25 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
bool(not f_task_types)
|
||||
])
|
||||
|
||||
# test subsets from filter
|
||||
test_subsets = any([
|
||||
# check if any of subset filter inputs
|
||||
# converted to regex patern is not found in subset
|
||||
# we keep strict case sensitivity
|
||||
bool(next((
|
||||
s for s in f_subsets
|
||||
if re.search(re.compile(s), subset)
|
||||
), None)),
|
||||
# but if no subsets were set then make this acuntable too
|
||||
bool(not f_subsets)
|
||||
])
|
||||
|
||||
# we need all filters to be positive for this
|
||||
# preset to be activated
|
||||
test_all = all([
|
||||
test_families,
|
||||
test_task_types
|
||||
test_task_types,
|
||||
test_subsets
|
||||
])
|
||||
|
||||
# if it is not positive then skip this preset
|
||||
|
|
@ -113,13 +130,22 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
})
|
||||
else:
|
||||
data = exporter.generate_mov(**o_data)
|
||||
generated_repres.extend(data["representations"])
|
||||
|
||||
self.log.info(generated_repres)
|
||||
# add representation generated by exporter
|
||||
generated_repres.extend(data["representations"])
|
||||
self.log.debug(
|
||||
"__ generated_repres: {}".format(generated_repres))
|
||||
|
||||
if generated_repres:
|
||||
# assign to representations
|
||||
instance.data["representations"] += generated_repres
|
||||
else:
|
||||
instance.data["families"].remove("review")
|
||||
self.log.info((
|
||||
"Removing `review` from families. "
|
||||
"Not available baking profile."
|
||||
))
|
||||
self.log.debug(instance.data["families"])
|
||||
|
||||
self.log.debug(
|
||||
"_ representations: {}".format(
|
||||
|
|
|
|||
|
|
@ -48,8 +48,13 @@ class ExtractSlateFrame(openpype.api.Extractor):
|
|||
self.log.info(
|
||||
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
|
||||
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
frame_length = int(
|
||||
instance.data["frameEnd"] - instance.data["frameStart"] + 1
|
||||
(frame_end - frame_start + 1) + (handle_start + handle_end)
|
||||
)
|
||||
|
||||
temporary_nodes = []
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
# Add all nodes in group instances.
|
||||
if node.Class() == "Group":
|
||||
# only alter families for render family
|
||||
if "write" in families_ak.lower():
|
||||
if families_ak and "write" in families_ak.lower():
|
||||
target = node["render"].value()
|
||||
if target == "Use existing frames":
|
||||
# Local rendering
|
||||
|
|
|
|||
|
|
@ -34,9 +34,9 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin):
|
|||
# test if render in family test knob
|
||||
# and only one item should be available
|
||||
assert len(family_test) == 1, msg + " > More avalon attributes"
|
||||
assert "render" in node[family_test[0]].value(), msg + \
|
||||
assert "render" in node[family_test[0]].value() \
|
||||
or "still" in node[family_test[0]].value(), msg + \
|
||||
" > Not correct family"
|
||||
|
||||
# test if `file` knob in node, this way old
|
||||
# non-group-node write could be detected
|
||||
assert "file" not in node.knobs(), msg + \
|
||||
|
|
@ -74,6 +74,8 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin):
|
|||
Create_name = "CreateWriteRender"
|
||||
elif family == "prerender":
|
||||
Create_name = "CreateWritePrerender"
|
||||
elif family == "still":
|
||||
Create_name = "CreateWriteStill"
|
||||
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ from .pipeline import (
|
|||
)
|
||||
from .plugin import (
|
||||
PhotoshopLoader,
|
||||
Creator,
|
||||
get_unique_layer_name
|
||||
)
|
||||
from .workio import (
|
||||
|
|
@ -42,11 +41,11 @@ __all__ = [
|
|||
"list_instances",
|
||||
"remove_instance",
|
||||
"install",
|
||||
"uninstall",
|
||||
"containerise",
|
||||
|
||||
# Plugin
|
||||
"PhotoshopLoader",
|
||||
"Creator",
|
||||
"get_unique_layer_name",
|
||||
|
||||
# workfiles
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import sys
|
||||
from Qt import QtWidgets
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -7,6 +6,7 @@ import avalon.api
|
|||
from avalon import pipeline, io
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.photoshop
|
||||
|
||||
from . import lib
|
||||
|
|
@ -68,7 +68,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -81,7 +81,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def ls():
|
||||
|
|
|
|||
|
|
@ -33,37 +33,3 @@ class PhotoshopLoader(avalon.api.Loader):
|
|||
@staticmethod
|
||||
def get_stub():
|
||||
return stub()
|
||||
|
||||
|
||||
class Creator(avalon.api.Creator):
|
||||
"""Creator plugin to create instances in Photoshop
|
||||
|
||||
A LayerSet is created to support any number of layers in an instance. If
|
||||
the selection is used, these layers will be added to the LayerSet.
|
||||
"""
|
||||
|
||||
def process(self):
|
||||
# Photoshop can have multiple LayerSets with the same name, which does
|
||||
# not work with Avalon.
|
||||
msg = "Instance with name \"{}\" already exists.".format(self.name)
|
||||
stub = lib.stub() # only after Photoshop is up
|
||||
for layer in stub.get_layers():
|
||||
if self.name.lower() == layer.Name.lower():
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg.setText(msg)
|
||||
msg.exec_()
|
||||
return False
|
||||
|
||||
# Store selection because adding a group will change selection.
|
||||
with lib.maintained_selection():
|
||||
|
||||
# Add selection to group.
|
||||
if (self.options or {}).get("useSelection"):
|
||||
group = stub.group_selected_layers(self.name)
|
||||
else:
|
||||
group = stub.create_group(self.name)
|
||||
|
||||
stub.imprint(group, self.data)
|
||||
|
||||
return group
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from Qt import QtWidgets
|
||||
import openpype.api
|
||||
from openpype.pipeline import create
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
|
||||
|
||||
class CreateImage(openpype.api.Creator):
|
||||
class CreateImage(create.LegacyCreator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
|
||||
import qargparse
|
||||
from avalon.pipeline import get_representation_path_from_context
|
||||
from avalon.vendor import qargparse
|
||||
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.hosts.photoshop.api import get_unique_layer_name
|
||||
|
|
@ -92,4 +92,3 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader):
|
|||
def remove(self, container):
|
||||
"""No update possible, not containerized."""
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from avalon import schema
|
|||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from . import lib
|
||||
from . import PLUGINS_DIR
|
||||
from openpype.tools.utils import host_tools
|
||||
|
|
@ -42,7 +43,7 @@ def install():
|
|||
log.info("Registering DaVinci Resovle plug-ins..")
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
@ -67,7 +68,7 @@ def uninstall():
|
|||
log.info("Deregistering DaVinci Resovle plug-ins..")
|
||||
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import re
|
||||
import uuid
|
||||
|
||||
import qargparse
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from avalon import api
|
||||
import openpype.api as pype
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts import resolve
|
||||
from avalon.vendor import qargparse
|
||||
from . import lib
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
|
||||
class CreatorWidget(QtWidgets.QDialog):
|
||||
|
|
@ -493,7 +496,7 @@ class TimelineItemLoader(api.Loader):
|
|||
pass
|
||||
|
||||
|
||||
class Creator(pype.PypeCreatorMixin, api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator class wrapper
|
||||
"""
|
||||
marker_color = "Purple"
|
||||
|
|
|
|||
|
|
@ -81,14 +81,10 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
parsed_subset = instance.data["subset"].replace(
|
||||
instance.data["family"], '')
|
||||
|
||||
fill_pairs = {
|
||||
explicit_data = {
|
||||
"subset": parsed_subset
|
||||
}
|
||||
|
||||
fill_pairs = prepare_template_data(fill_pairs)
|
||||
workfile_subset = format_template_with_optional_keys(
|
||||
fill_pairs, self.workfile_subset_template)
|
||||
|
||||
processed_instance = False
|
||||
for repre in instance.data["representations"]:
|
||||
ext = repre["ext"].replace('.', '')
|
||||
|
|
@ -102,6 +98,21 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
if ext in self.main_workfile_extensions or \
|
||||
ext in self.other_workfile_extensions:
|
||||
|
||||
formatting_data = self._get_parsed_groups(
|
||||
repre_file,
|
||||
self.input_naming_patterns["workfile"],
|
||||
self.input_naming_groups["workfile"],
|
||||
self.color_space
|
||||
)
|
||||
self.log.info("Parsed groups from workfile "
|
||||
"name '{}': {}".format(repre_file,
|
||||
formatting_data))
|
||||
|
||||
formatting_data.update(explicit_data)
|
||||
fill_pairs = prepare_template_data(formatting_data)
|
||||
workfile_subset = format_template_with_optional_keys(
|
||||
fill_pairs, self.workfile_subset_template)
|
||||
|
||||
asset_build = self._get_asset_build(
|
||||
repre_file,
|
||||
self.input_naming_patterns["workfile"],
|
||||
|
|
@ -148,11 +159,23 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
resource_files[workfile_subset].append(item)
|
||||
|
||||
if ext in self.texture_extensions:
|
||||
formatting_data = self._get_parsed_groups(
|
||||
repre_file,
|
||||
self.input_naming_patterns["textures"],
|
||||
self.input_naming_groups["textures"],
|
||||
self.color_space
|
||||
)
|
||||
|
||||
self.log.info("Parsed groups from texture "
|
||||
"name '{}': {}".format(repre_file,
|
||||
formatting_data))
|
||||
|
||||
c_space = self._get_color_space(
|
||||
repre_file,
|
||||
self.color_space
|
||||
)
|
||||
|
||||
# optional value
|
||||
channel = self._get_channel_name(
|
||||
repre_file,
|
||||
self.input_naming_patterns["textures"],
|
||||
|
|
@ -160,6 +183,7 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
self.color_space
|
||||
)
|
||||
|
||||
# optional value
|
||||
shader = self._get_shader_name(
|
||||
repre_file,
|
||||
self.input_naming_patterns["textures"],
|
||||
|
|
@ -167,13 +191,15 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
self.color_space
|
||||
)
|
||||
|
||||
formatting_data = {
|
||||
explicit_data = {
|
||||
"color_space": c_space or '', # None throws exception
|
||||
"channel": channel or '',
|
||||
"shader": shader or '',
|
||||
"subset": parsed_subset or ''
|
||||
}
|
||||
|
||||
formatting_data.update(explicit_data)
|
||||
|
||||
fill_pairs = prepare_template_data(formatting_data)
|
||||
subset = format_template_with_optional_keys(
|
||||
fill_pairs, self.texture_subset_template)
|
||||
|
|
@ -243,6 +269,13 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
for asset_build, version, subset, family in asset_builds:
|
||||
if not main_version:
|
||||
main_version = version
|
||||
|
||||
try:
|
||||
version_int = int(version or main_version or 1)
|
||||
except ValueError:
|
||||
self.log.error("Parsed version {} is not "
|
||||
"an number".format(version))
|
||||
|
||||
new_instance = context.create_instance(subset)
|
||||
new_instance.data.update(
|
||||
{
|
||||
|
|
@ -251,7 +284,7 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
"label": subset,
|
||||
"name": subset,
|
||||
"family": family,
|
||||
"version": int(version or main_version or 1),
|
||||
"version": version_int,
|
||||
"asset_build": asset_build # remove in validator
|
||||
}
|
||||
)
|
||||
|
|
@ -320,13 +353,14 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
asset_name = "NOT_AVAIL"
|
||||
|
||||
return self._parse(name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, 'asset') or asset_name
|
||||
return (self._parse_key(name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces, 'asset') or
|
||||
asset_name)
|
||||
|
||||
def _get_version(self, name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces):
|
||||
found = self._parse(name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, 'version')
|
||||
found = self._parse_key(name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces, 'version')
|
||||
|
||||
if found:
|
||||
return found.replace('v', '')
|
||||
|
|
@ -336,8 +370,8 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
def _get_udim(self, name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces):
|
||||
"""Parses from 'name' udim value."""
|
||||
found = self._parse(name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, 'udim')
|
||||
found = self._parse_key(name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces, 'udim')
|
||||
if found:
|
||||
return found
|
||||
|
||||
|
|
@ -375,12 +409,15 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
Unknown format of channel name and color spaces >> cs are known
|
||||
list - 'color_space' used as a placeholder
|
||||
"""
|
||||
found = self._parse(name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, 'shader')
|
||||
if found:
|
||||
return found
|
||||
found = None
|
||||
try:
|
||||
found = self._parse_key(name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces,
|
||||
'shader')
|
||||
except ValueError:
|
||||
self.log.warning("Didn't find shader in {}".format(name))
|
||||
|
||||
self.log.warning("Didn't find shader in {}".format(name))
|
||||
return found
|
||||
|
||||
def _get_channel_name(self, name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces):
|
||||
|
|
@ -389,15 +426,18 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
Unknown format of channel name and color spaces >> cs are known
|
||||
list - 'color_space' used as a placeholder
|
||||
"""
|
||||
found = self._parse(name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, 'channel')
|
||||
if found:
|
||||
return found
|
||||
found = None
|
||||
try:
|
||||
found = self._parse_key(name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces,
|
||||
'channel')
|
||||
except ValueError:
|
||||
self.log.warning("Didn't find channel in {}".format(name))
|
||||
|
||||
self.log.warning("Didn't find channel in {}".format(name))
|
||||
return found
|
||||
|
||||
def _parse(self, name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, key):
|
||||
def _parse_key(self, name, input_naming_patterns, input_naming_groups,
|
||||
color_spaces, key):
|
||||
"""Universal way to parse 'name' with configurable regex groups.
|
||||
|
||||
Args:
|
||||
|
|
@ -411,23 +451,47 @@ class CollectTextures(pyblish.api.ContextPlugin):
|
|||
Raises:
|
||||
ValueError - if broken 'input_naming_groups'
|
||||
"""
|
||||
parsed_groups = self._get_parsed_groups(name,
|
||||
input_naming_patterns,
|
||||
input_naming_groups,
|
||||
color_spaces)
|
||||
|
||||
try:
|
||||
parsed_value = parsed_groups[key]
|
||||
return parsed_value
|
||||
except (IndexError, KeyError):
|
||||
msg = ("'Textures group positions' must " +
|
||||
"have '{}' key".format(key))
|
||||
raise ValueError(msg)
|
||||
|
||||
def _get_parsed_groups(self, name, input_naming_patterns,
|
||||
input_naming_groups, color_spaces):
|
||||
"""Universal way to parse 'name' with configurable regex groups.
|
||||
|
||||
Args:
|
||||
name (str): workfile name or texture name
|
||||
input_naming_patterns (list):
|
||||
[workfile_pattern] or [texture_pattern]
|
||||
input_naming_groups (list)
|
||||
ordinal position of regex groups matching to input_naming..
|
||||
color_spaces (list) - predefined color spaces
|
||||
|
||||
Returns:
|
||||
(dict) {group_name:parsed_value}
|
||||
"""
|
||||
for input_pattern in input_naming_patterns:
|
||||
for cs in color_spaces:
|
||||
pattern = input_pattern.replace('{color_space}', cs)
|
||||
regex_result = re.findall(pattern, name)
|
||||
if regex_result:
|
||||
idx = list(input_naming_groups).index(key)
|
||||
if idx < 0:
|
||||
msg = "input_naming_groups must " +\
|
||||
"have '{}' key".format(key)
|
||||
raise ValueError(msg)
|
||||
if len(regex_result[0]) == len(input_naming_groups):
|
||||
return dict(zip(input_naming_groups, regex_result[0]))
|
||||
else:
|
||||
self.log.warning("No of parsed groups doesn't match "
|
||||
"no of group labels")
|
||||
|
||||
try:
|
||||
parsed_value = regex_result[0][idx]
|
||||
return parsed_value
|
||||
except IndexError:
|
||||
self.log.warning("Wrong index, probably "
|
||||
"wrong name {}".format(name))
|
||||
raise ValueError("Name '{}' cannot be parsed by any "
|
||||
"'{}' patterns".format(name, input_naming_patterns))
|
||||
|
||||
def _update_representations(self, upd_representations):
|
||||
"""Frames dont have sense for textures, add collected udims instead."""
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Missing source video file</title>
|
||||
<description>
|
||||
## No attached video file found
|
||||
|
||||
Process expects presence of source video file with same name prefix as an editorial file in same folder.
|
||||
(example `simple_editorial_setup_Layer1.edl` expects `simple_editorial_setup.mp4` in same folder)
|
||||
|
||||
|
||||
### How to repair?
|
||||
|
||||
Copy source video file to the folder next to `.edl` file. (On a disk, do not put it into Standalone Publisher.)
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Invalid frame range</title>
|
||||
<description>
|
||||
## Invalid frame range
|
||||
|
||||
Expected duration or '{duration}' frames set in database, workfile contains only '{found}' frames.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Modify configuration in the database or tweak frame range in the workfile.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Duplicate shots</title>
|
||||
<description>
|
||||
## Duplicate shot names
|
||||
|
||||
Process contains duplicated shot names '{duplicates_str}'.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Remove shot duplicates.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Files not found</title>
|
||||
<description>
|
||||
## Source files not found
|
||||
|
||||
Process contains duplicated shot names:
|
||||
'{files_not_found}'
|
||||
|
||||
### How to repair?
|
||||
|
||||
Add missing files or run Publish again to collect new publishable files.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Task not found</title>
|
||||
<description>
|
||||
## Task not found in database
|
||||
|
||||
Process contains tasks that don't exist in database:
|
||||
'{task_not_found}'
|
||||
|
||||
### How to repair?
|
||||
|
||||
Remove set task or add task into database into proper place.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>No texture files found</title>
|
||||
<description>
|
||||
## Batch doesn't contain texture files
|
||||
|
||||
Batch must contain at least one texture file.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Add texture file to the batch or check name if it follows naming convention to match texture files to the batch.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>No workfile found</title>
|
||||
<description>
|
||||
## Batch should contain workfile
|
||||
|
||||
It is expected that published contains workfile that served as a source for textures.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Add workfile to the batch, or disable this validator if you do not want workfile published.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Asset name not found</title>
|
||||
<description>
|
||||
## Couldn't parse asset name from a file
|
||||
|
||||
Unable to parse asset name from '{file_name}'. File name doesn't match configured naming convention.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Check Settings: project_settings/standalonepublisher/publish/CollectTextures for naming convention.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error happens when parsing cannot figure out name of asset texture files belong under.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="missing_values">
|
||||
<title>Missing keys</title>
|
||||
<description>
|
||||
## Texture file name is missing some required keys
|
||||
|
||||
Texture '{file_name}' is missing values for {missing_str} keys.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Fix name of texture file and Publish again.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Texture version</title>
|
||||
<description>
|
||||
## Texture version mismatch with workfile
|
||||
|
||||
Workfile '{file_name}' version doesn't match with '{version}' of a texture.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Rename either workfile or texture to contain matching versions
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are trying to publish textures for older version of workfile (or the other way).
|
||||
(Eg. publishing 'workfile_v001' and 'texture_file_v002')
|
||||
</detail>
|
||||
</error>
|
||||
<error id="too_many">
|
||||
<title>Too many versions</title>
|
||||
<description>
|
||||
## Too many versions published at same time
|
||||
|
||||
It is currently expected to publish only batch with single version.
|
||||
|
||||
Found {found} versions.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Please remove files with different version and split publishing into multiple steps.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>No secondary workfile</title>
|
||||
<description>
|
||||
## No secondary workfile found
|
||||
|
||||
Current process expects that primary workfile (for example with a extension '{extension}') will contain also 'secondary' workfile.
|
||||
|
||||
Secondary workfile for '{file_name}' wasn't found.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Attach secondary workfile or disable this validator and Publish again.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This process was implemented for a possible use case of first workfile coming from Mari, secondary workfile for textures from Substance.
|
||||
Publish should contain both if primary workfile is present.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateEditorialResources(pyblish.api.InstancePlugin):
|
||||
|
|
@ -19,5 +20,7 @@ class ValidateEditorialResources(pyblish.api.InstancePlugin):
|
|||
f"Instance: {instance}, Families: "
|
||||
f"{[instance.data['family']] + instance.data['families']}")
|
||||
check_file = instance.data["editorialSourcePath"]
|
||||
msg = f"Missing \"{check_file}\"."
|
||||
assert check_file, msg
|
||||
msg = "Missing source video file."
|
||||
|
||||
if not check_file:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype import lib
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateFrameRange(pyblish.api.InstancePlugin):
|
||||
|
|
@ -48,9 +50,15 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
files = [files]
|
||||
frames = len(files)
|
||||
|
||||
err_msg = "Frame duration from DB:'{}' ". format(int(duration)) +\
|
||||
" doesn't match number of files:'{}'".format(frames) +\
|
||||
" Please change frame range for Asset or limit no. of files"
|
||||
assert frames == duration, err_msg
|
||||
msg = "Frame duration from DB:'{}' ". format(int(duration)) +\
|
||||
" doesn't match number of files:'{}'".format(frames) +\
|
||||
" Please change frame range for Asset or limit no. of files"
|
||||
|
||||
self.log.debug("Valid ranges {} - {}".format(int(duration), frames))
|
||||
formatting_data = {"duration": duration,
|
||||
"found": frames}
|
||||
if frames != duration:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
self.log.debug("Valid ranges expected '{}' - found '{}'".
|
||||
format(int(duration), frames))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
class ValidateShotDuplicates(pyblish.api.ContextPlugin):
|
||||
"""Validating no duplicate names are in context."""
|
||||
|
|
@ -20,4 +21,8 @@ class ValidateShotDuplicates(pyblish.api.ContextPlugin):
|
|||
shot_names.append(name)
|
||||
|
||||
msg = "There are duplicate shot names:\n{}".format(duplicate_names)
|
||||
assert not duplicate_names, msg
|
||||
|
||||
formatting_data = {"duplicates_str": ','.join(duplicate_names)}
|
||||
if duplicate_names:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateSources(pyblish.api.InstancePlugin):
|
||||
"""Validates source files.
|
||||
|
|
@ -11,7 +13,6 @@ class ValidateSources(pyblish.api.InstancePlugin):
|
|||
got deleted between starting of SP and now.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
label = "Check source files"
|
||||
|
||||
|
|
@ -22,6 +23,7 @@ class ValidateSources(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
self.log.info("instance {}".format(instance.data))
|
||||
|
||||
missing_files = set()
|
||||
for repre in instance.data.get("representations") or []:
|
||||
files = []
|
||||
if isinstance(repre["files"], str):
|
||||
|
|
@ -34,4 +36,10 @@ class ValidateSources(pyblish.api.InstancePlugin):
|
|||
file_name)
|
||||
|
||||
if not os.path.exists(source_file):
|
||||
raise ValueError("File {} not found".format(source_file))
|
||||
missing_files.add(source_file)
|
||||
|
||||
msg = "Files '{}' not found".format(','.join(missing_files))
|
||||
formatting_data = {"files_not_found": ' - {}'.join(missing_files)}
|
||||
if missing_files:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateTaskExistence(pyblish.api.ContextPlugin):
|
||||
"""Validating tasks on instances are filled and existing."""
|
||||
|
|
@ -53,4 +55,9 @@ class ValidateTaskExistence(pyblish.api.ContextPlugin):
|
|||
"Asset: \"{}\" Task: \"{}\"".format(*missing_pair)
|
||||
)
|
||||
|
||||
raise AssertionError(msg.format("\n".join(pair_msgs)))
|
||||
msg = msg.format("\n".join(pair_msgs))
|
||||
|
||||
formatting_data = {"task_not_found": ' - {}'.join(pair_msgs)}
|
||||
if pair_msgs:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateTextureBatch(pyblish.api.InstancePlugin):
|
||||
"""Validates that some texture files are present."""
|
||||
|
|
@ -15,8 +17,10 @@ class ValidateTextureBatch(pyblish.api.InstancePlugin):
|
|||
present = False
|
||||
for instance in instance.context:
|
||||
if instance.data["family"] == "textures":
|
||||
self.log.info("Some textures present.")
|
||||
self.log.info("At least some textures present.")
|
||||
|
||||
return
|
||||
|
||||
assert present, "No textures found in published batch!"
|
||||
msg = "No textures found in published batch!"
|
||||
if not present:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,4 +19,6 @@ class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
wfile = instance.data["versionData"].get("workfile")
|
||||
|
||||
assert wfile, "Textures are missing attached workfile"
|
||||
msg = "Textures are missing attached workfile"
|
||||
if not wfile:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
class ValidateTextureBatchNaming(pyblish.api.InstancePlugin):
|
||||
"""Validates that all instances had properly formatted name."""
|
||||
|
|
@ -19,9 +20,13 @@ class ValidateTextureBatchNaming(pyblish.api.InstancePlugin):
|
|||
msg = "Couldn't find asset name in '{}'\n".format(file_name) + \
|
||||
"File name doesn't follow configured pattern.\n" + \
|
||||
"Please rename the file."
|
||||
assert "NOT_AVAIL" not in instance.data["asset_build"], msg
|
||||
|
||||
instance.data.pop("asset_build")
|
||||
formatting_data = {"file_name": file_name}
|
||||
if "NOT_AVAIL" in instance.data["asset_build"]:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
instance.data.pop("asset_build") # not needed anymore
|
||||
|
||||
if instance.data["family"] == "textures":
|
||||
file_name = instance.data["representations"][0]["files"][0]
|
||||
|
|
@ -47,4 +52,10 @@ class ValidateTextureBatchNaming(pyblish.api.InstancePlugin):
|
|||
"Name of the texture file doesn't match expected pattern.\n" + \
|
||||
"Please rename file(s) {}".format(file_name)
|
||||
|
||||
assert not missing_key_values, msg
|
||||
missing_str = ','.join(["'{}'".format(key)
|
||||
for key in missing_key_values])
|
||||
formatting_data = {"file_name": file_name,
|
||||
"missing_str": missing_str}
|
||||
if missing_key_values:
|
||||
raise PublishXmlValidationError(self, msg, key="missing_values",
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateTextureBatchVersions(pyblish.api.InstancePlugin):
|
||||
|
|
@ -25,14 +27,21 @@ class ValidateTextureBatchVersions(pyblish.api.InstancePlugin):
|
|||
self.log.info("No workfile present for textures")
|
||||
return
|
||||
|
||||
msg = "Not matching version: texture v{:03d} - workfile {}"
|
||||
assert version_str in wfile, \
|
||||
if version_str not in wfile:
|
||||
msg = "Not matching version: texture v{:03d} - workfile {}"
|
||||
msg.format(
|
||||
instance.data["version"], wfile
|
||||
)
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
||||
present_versions = set()
|
||||
for instance in instance.context:
|
||||
present_versions.add(instance.data["version"])
|
||||
|
||||
assert len(present_versions) == 1, "Too many versions in a batch!"
|
||||
if len(present_versions) != 1:
|
||||
msg = "Too many versions in a batch!"
|
||||
found = ','.join(["'{}'".format(val) for val in present_versions])
|
||||
formatting_data = {"found": found}
|
||||
|
||||
raise PublishXmlValidationError(self, msg, key="too_many",
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin):
|
||||
"""Validates that textures workfile has collected resources (optional).
|
||||
|
||||
Collected recourses means secondary workfiles (in most cases).
|
||||
Collected resources means secondary workfiles (in most cases).
|
||||
"""
|
||||
|
||||
label = "Validate Texture Workfile Has Resources"
|
||||
|
|
@ -24,6 +26,13 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin):
|
|||
self.log.warning("Only secondary workfile present!")
|
||||
return
|
||||
|
||||
msg = "No secondary workfiles present for workfile {}".\
|
||||
format(instance.data["name"])
|
||||
assert instance.data.get("resources"), msg
|
||||
if not instance.data.get("resources"):
|
||||
msg = "No secondary workfile present for workfile '{}'". \
|
||||
format(instance.data["name"])
|
||||
ext = self.main_workfile_extensions[0]
|
||||
formatting_data = {"file_name": instance.data["name"],
|
||||
"extension": ext}
|
||||
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class CollectContextDataTestHost(
|
|||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_instance_attr_defs(cls):
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.BoolDef(
|
||||
"test_bool",
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class CollectInstanceOneTestHost(
|
|||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_instance_attr_defs(cls):
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.NumberDef(
|
||||
"version",
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from avalon.pipeline import AVALON_CONTAINER_ID
|
|||
|
||||
from openpype.hosts import tvpaint
|
||||
from openpype.api import get_current_project_settings
|
||||
from openpype.pipeline import LegacyCreator
|
||||
|
||||
from .lib import (
|
||||
execute_george,
|
||||
|
|
@ -76,7 +77,7 @@ def install():
|
|||
pyblish.api.register_host("tvpaint")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
registered_callbacks = (
|
||||
pyblish.api.registered_callbacks().get("instanceToggled") or []
|
||||
|
|
@ -98,7 +99,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("tvpaint")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def containerise(
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ import uuid
|
|||
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import PypeCreatorMixin
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.tvpaint.api import (
|
||||
pipeline,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, avalon.api.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Creator, self).__init__(*args, **kwargs)
|
||||
# Add unified identifier created with `uuid` module
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from avalon.api import CreatorError
|
||||
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.hosts.tvpaint.api import (
|
||||
plugin,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from avalon.api import CreatorError
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.hosts.tvpaint.api import (
|
||||
plugin,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from avalon.vendor import qargparse
|
||||
import qargparse
|
||||
from openpype.hosts.tvpaint.api import lib, plugin
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import collections
|
||||
import qargparse
|
||||
from avalon.pipeline import get_representation_context
|
||||
from avalon.vendor import qargparse
|
||||
from openpype.hosts.tvpaint.api import lib, pipeline, plugin
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<description>## Invalid subset context
|
||||
|
||||
Context of the given subset doesn't match your current scene.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Yout can fix this with "Repair" button on the right. This will use '{expected_asset}' asset name and overwrite '{found_asset}' asset name in scene metadata.
|
||||
|
||||
After that restart publishing with Reload button.
|
||||
</description>
|
||||
<detail>
|
||||
### How could this happen?
|
||||
|
||||
The subset was created in different scene with different context
|
||||
or the scene file was copy pasted from different context.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Layer names</title>
|
||||
<description>## Duplicated layer names
|
||||
|
||||
Can't determine which layers should be published because there are duplicated layer names in the scene.
|
||||
|
||||
### Duplicated layer names
|
||||
|
||||
{layer_names}
|
||||
|
||||
*Check layer names for all subsets in list on left side.*
|
||||
|
||||
### How to repair?
|
||||
|
||||
Hide/rename/remove layers that should not be published.
|
||||
|
||||
If all of them should be published then you have duplicated subset names in the scene. In that case you have to recrete them and use different variant name.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Layers visiblity</title>
|
||||
<description>## All layers are not visible
|
||||
|
||||
Layers visibility was changed during publishing which caused that all layers for subset "{instance_name}" are hidden.
|
||||
|
||||
### Layer names for **{instance_name}**
|
||||
|
||||
{layer_names}
|
||||
|
||||
*Check layer names for all subsets in the list on the left side.*
|
||||
|
||||
### How to repair?
|
||||
|
||||
Reset publishing and do not change visibility of layers after hitting publish button.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Frame range</title>
|
||||
<description>## Invalid render frame range
|
||||
|
||||
Scene frame range which will be rendered is defined by MarkIn and MarkOut. Expected frame range is {expected_frame_range} and current frame range is {current_frame_range}.
|
||||
|
||||
It is also required that MarkIn and MarkOut are enabled in the scene. Their color is highlighted on timeline when are enabled.
|
||||
|
||||
- MarkIn is {mark_in_enable_state}
|
||||
- MarkOut is {mark_out_enable_state}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Yout can fix this with "Repair" button on the right. That will change MarkOut to {expected_mark_out}.
|
||||
|
||||
Or you can manually modify MarkIn and MarkOut in the scene timeline.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Missing layers</title>
|
||||
<description>## Missing layers for render pass
|
||||
|
||||
Render pass subset "{instance_name}" has stored layer names that belong to it's rendering scope but layers were not found in scene.
|
||||
|
||||
### Missing layer names
|
||||
|
||||
{layer_names}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Find layers that belong to subset {instance_name} and rename them back to expected layer names or remove the subset and create new with right layers.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Render pass group</title>
|
||||
<description>## Invalid group of Render Pass layers
|
||||
|
||||
Layers of Render Pass {instance_name} belong to Render Group which is defined by TVPaint color group {expected_group}. But the layers are not in the group.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change the color group to {expected_group} on layers {layer_names}.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene settings</title>
|
||||
<description>## Invalid scene settings
|
||||
|
||||
Scene settings do not match to expected values.
|
||||
|
||||
**FPS**
|
||||
- Expected value: {expected_fps}
|
||||
- Current value: {current_fps}
|
||||
|
||||
**Resolution**
|
||||
- Expected value: {expected_width}x{expected_height}
|
||||
- Current value: {current_width}x{current_height}
|
||||
|
||||
**Pixel ratio**
|
||||
- Expected value: {expected_pixel_ratio}
|
||||
- Current value: {current_pixel_ratio}
|
||||
|
||||
### How to repair?
|
||||
|
||||
FPS and Pixel ratio can be modified in scene setting. Wrong resolution can be fixed with changing resolution of scene but due to TVPaint limitations it is possible that you will need to create new scene.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>First frame</title>
|
||||
<description>## MarkIn is not set to 0
|
||||
|
||||
MarkIn in your scene must start from 0 fram index but MarkIn is set to {current_start_frame}.
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can modify MarkIn manually or hit the "Repair" button on the right which will change MarkIn to 0 (does not change MarkOut).
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue