mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' of https://github.com/pypeclub/OpenPype into refactor_integrator
This commit is contained in:
commit
e37b9f0141
151 changed files with 5719 additions and 2295 deletions
100
CHANGELOG.md
100
CHANGELOG.md
|
|
@ -1,24 +1,76 @@
|
|||
# Changelog
|
||||
|
||||
## [3.9.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.9.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...HEAD)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999)
|
||||
- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951)
|
||||
- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992)
|
||||
- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919)
|
||||
- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916)
|
||||
- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906)
|
||||
- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901)
|
||||
- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001)
|
||||
- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000)
|
||||
- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985)
|
||||
- General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980)
|
||||
- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975)
|
||||
- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967)
|
||||
- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945)
|
||||
- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943)
|
||||
- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942)
|
||||
- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925)
|
||||
- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923)
|
||||
- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911)
|
||||
- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905)
|
||||
- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875)
|
||||
- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004)
|
||||
- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998)
|
||||
- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991)
|
||||
- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990)
|
||||
- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989)
|
||||
- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986)
|
||||
- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981)
|
||||
- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969)
|
||||
- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965)
|
||||
- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958)
|
||||
- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956)
|
||||
- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950)
|
||||
- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949)
|
||||
- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948)
|
||||
- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947)
|
||||
- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944)
|
||||
- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941)
|
||||
- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939)
|
||||
- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936)
|
||||
- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934)
|
||||
- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932)
|
||||
- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926)
|
||||
- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914)
|
||||
- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935)
|
||||
- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931)
|
||||
- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927)
|
||||
- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918)
|
||||
- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973)
|
||||
- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954)
|
||||
- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953)
|
||||
- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952)
|
||||
|
||||
## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18)
|
||||
|
||||
|
|
@ -52,10 +104,6 @@
|
|||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0)
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878)
|
||||
|
|
@ -66,14 +114,6 @@
|
|||
- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867)
|
||||
- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863)
|
||||
- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859)
|
||||
- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841)
|
||||
- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837)
|
||||
- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836)
|
||||
- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822)
|
||||
- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817)
|
||||
- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812)
|
||||
- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811)
|
||||
- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
|
|
@ -87,31 +127,11 @@
|
|||
- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855)
|
||||
- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853)
|
||||
- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852)
|
||||
- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851)
|
||||
- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847)
|
||||
- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842)
|
||||
- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840)
|
||||
- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832)
|
||||
- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828)
|
||||
- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827)
|
||||
- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826)
|
||||
- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825)
|
||||
- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824)
|
||||
- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821)
|
||||
- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820)
|
||||
- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819)
|
||||
- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818)
|
||||
- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876)
|
||||
- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854)
|
||||
- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848)
|
||||
- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846)
|
||||
- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839)
|
||||
- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829)
|
||||
- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823)
|
||||
|
||||
## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,20 +2,16 @@
|
|||
"""Pype module."""
|
||||
import os
|
||||
import platform
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from .settings import get_project_settings
|
||||
from .lib import (
|
||||
Anatomy,
|
||||
filter_pyblish_plugins,
|
||||
set_plugin_attributes_from_settings,
|
||||
change_timer_to_current_context,
|
||||
register_event_callback,
|
||||
)
|
||||
|
||||
pyblish = avalon = _original_discover = None
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -27,60 +23,17 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
|
||||
|
||||
def import_wrapper(func):
|
||||
"""Wrap module imports to specific functions."""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
global pyblish
|
||||
global avalon
|
||||
global _original_discover
|
||||
if pyblish is None:
|
||||
from pyblish import api as pyblish
|
||||
from avalon import api as avalon
|
||||
|
||||
# we are monkey patching `avalon.api.discover()` to allow us to
|
||||
# load plugin presets on plugins being discovered by avalon.
|
||||
# Little bit of hacking, but it allows us to add out own features
|
||||
# without need to modify upstream code.
|
||||
|
||||
_original_discover = avalon.discover
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
@import_wrapper
|
||||
def patched_discover(superclass):
|
||||
"""Patch `avalon.api.discover()`.
|
||||
|
||||
Monkey patched version of :func:`avalon.api.discover()`. It allows
|
||||
us to load presets on plugins being discovered.
|
||||
"""
|
||||
# run original discover and get plugins
|
||||
plugins = _original_discover(superclass)
|
||||
filtered_plugins = [
|
||||
plugin
|
||||
for plugin in plugins
|
||||
if issubclass(plugin, superclass)
|
||||
]
|
||||
|
||||
set_plugin_attributes_from_settings(filtered_plugins, superclass)
|
||||
|
||||
return filtered_plugins
|
||||
|
||||
|
||||
@import_wrapper
|
||||
def install():
|
||||
"""Install Pype to Avalon."""
|
||||
"""Install OpenPype to Avalon."""
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from pyblish.lib import MessageHandler
|
||||
from openpype.modules import load_modules
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_inventory_action,
|
||||
register_creator_plugin_path,
|
||||
)
|
||||
from avalon import pipeline
|
||||
|
||||
# Make sure modules are loaded
|
||||
load_modules()
|
||||
|
|
@ -93,8 +46,8 @@ def install():
|
|||
MessageHandler.emit = modified_emit
|
||||
|
||||
log.info("Registering global plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
pyblish.register_discovery_filter(filter_pyblish_plugins)
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
|
|
@ -103,7 +56,7 @@ def install():
|
|||
if project_name:
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy.set_root_environments()
|
||||
avalon.register_root(anatomy.roots)
|
||||
avalon.api.register_root(anatomy.roots)
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
platform_name = platform.system().lower()
|
||||
|
|
@ -122,17 +75,14 @@ def install():
|
|||
if not path or not os.path.exists(path):
|
||||
continue
|
||||
|
||||
pyblish.register_plugin_path(path)
|
||||
pyblish.api.register_plugin_path(path)
|
||||
register_loader_plugin_path(path)
|
||||
avalon.register_plugin_path(LegacyCreator, path)
|
||||
register_creator_plugin_path(path)
|
||||
register_inventory_action(path)
|
||||
|
||||
# apply monkey patched discover to original one
|
||||
log.info("Patching discovery")
|
||||
|
||||
avalon.discover = patched_discover
|
||||
pipeline.discover = patched_discover
|
||||
|
||||
register_event_callback("taskChanged", _on_task_change)
|
||||
|
||||
|
||||
|
|
@ -140,16 +90,13 @@ def _on_task_change():
|
|||
change_timer_to_current_context()
|
||||
|
||||
|
||||
@import_wrapper
|
||||
def uninstall():
|
||||
"""Uninstall Pype from Avalon."""
|
||||
import pyblish.api
|
||||
from openpype.pipeline import deregister_loader_plugin_path
|
||||
|
||||
log.info("Deregistering global plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
pyblish.deregister_discovery_filter(filter_pyblish_plugins)
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.deregister_discovery_filter(filter_pyblish_plugins)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
log.info("Global plug-ins unregistred")
|
||||
|
||||
# restore original discover
|
||||
avalon.discover = _original_discover
|
||||
|
|
|
|||
|
|
@ -1,35 +0,0 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class PrePython2Vendor(PreLaunchHook):
|
||||
"""Prepend python 2 dependencies for py2 hosts."""
|
||||
order = 10
|
||||
|
||||
def execute(self):
|
||||
if not self.application.use_python_2:
|
||||
return
|
||||
|
||||
# Prepare vendor dir path
|
||||
self.log.info("adding global python 2 vendor")
|
||||
pype_root = os.getenv("OPENPYPE_REPOS_ROOT")
|
||||
python_2_vendor = os.path.join(
|
||||
pype_root,
|
||||
"openpype",
|
||||
"vendor",
|
||||
"python",
|
||||
"python_2"
|
||||
)
|
||||
|
||||
# Add Python 2 modules
|
||||
python_paths = [
|
||||
python_2_vendor
|
||||
]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = self.launch_context.env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
|
@ -5,15 +5,15 @@ from Qt import QtWidgets
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon import io
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.aftereffects
|
||||
|
|
@ -73,7 +73,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -86,7 +86,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -5,14 +5,6 @@ from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
|||
from .launch_logic import get_stub
|
||||
|
||||
|
||||
def _active_document():
|
||||
document_name = get_stub().get_active_document_name()
|
||||
if not document_name:
|
||||
return None
|
||||
|
||||
return document_name
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["aftereffects"]
|
||||
|
||||
|
|
@ -39,7 +31,8 @@ def current_file():
|
|||
full_name = get_stub().get_active_document_full_name()
|
||||
if full_name and full_name != "null":
|
||||
return os.path.normpath(full_name).replace("\\", "/")
|
||||
except Exception:
|
||||
except ValueError:
|
||||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return None
|
||||
|
|
@ -47,3 +40,15 @@ def current_file():
|
|||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
|
||||
|
||||
def _active_document():
|
||||
# TODO merge with current_file - even in extension
|
||||
document_name = None
|
||||
try:
|
||||
document_name = get_stub().get_active_document_name()
|
||||
except ValueError:
|
||||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return document_name
|
||||
|
|
@ -1,12 +1,14 @@
|
|||
from openpype.pipeline import create
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.pipeline import (
|
||||
CreatorError,
|
||||
LegacyCreator
|
||||
)
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(create.LegacyCreator):
|
||||
class CreateRender(LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
|
|
|
|||
|
|
@ -29,12 +29,12 @@ def add_implementation_envs(env, _app):
|
|||
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
|
||||
)
|
||||
for path in openpype_blender_user_scripts.split(os.pathsep):
|
||||
if path and os.path.exists(path):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
||||
for path in blender_user_scripts.split(os.pathsep):
|
||||
if path and os.path.exists(path):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
# Remove implementation path from user script paths as is set to
|
||||
|
|
|
|||
|
|
@ -14,9 +14,10 @@ import avalon.api
|
|||
from avalon import io, schema
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.api import Logger
|
||||
|
|
@ -54,7 +55,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
avalon.api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
lib.append_user_scripts()
|
||||
|
||||
|
|
@ -76,7 +77,7 @@ def uninstall():
|
|||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
if not IS_HEADLESS:
|
||||
ops.unregister()
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
||||
|
||||
|
||||
class CTX:
|
||||
# singleton used for passing data between api modules
|
||||
app_framework = None
|
||||
|
|
@ -538,9 +539,17 @@ def get_segment_attributes(segment):
|
|||
|
||||
# head and tail with forward compatibility
|
||||
if segment.head:
|
||||
clip_data["segment_head"] = int(segment.head)
|
||||
# `infinite` can be also returned
|
||||
if isinstance(segment.head, str):
|
||||
clip_data["segment_head"] = 0
|
||||
else:
|
||||
clip_data["segment_head"] = int(segment.head)
|
||||
if segment.tail:
|
||||
clip_data["segment_tail"] = int(segment.tail)
|
||||
# `infinite` can be also returned
|
||||
if isinstance(segment.tail, str):
|
||||
clip_data["segment_tail"] = 0
|
||||
else:
|
||||
clip_data["segment_tail"] = int(segment.tail)
|
||||
|
||||
# add all available shot tokens
|
||||
shot_tokens = _get_shot_tokens_values(segment, [
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ Basic avalon integration
|
|||
"""
|
||||
import os
|
||||
import contextlib
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from .lib import (
|
||||
|
|
@ -37,7 +37,7 @@ def install():
|
|||
pyblish.register_host("flame")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("OpenPype Flame plug-ins registred ...")
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
@ -52,7 +52,7 @@ def uninstall():
|
|||
log.info("Deregistering Flame plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -422,7 +422,13 @@ class WireTapCom(object):
|
|||
color_policy = color_policy or "Legacy"
|
||||
|
||||
# check if the colour policy in custom dir
|
||||
if not os.path.exists(color_policy):
|
||||
if "/" in color_policy:
|
||||
# if unlikelly full path was used make it redundant
|
||||
color_policy = color_policy.replace("/syncolor/policies/", "")
|
||||
# expecting input is `Shared/NameOfPolicy`
|
||||
color_policy = "/syncolor/policies/{}".format(
|
||||
color_policy)
|
||||
else:
|
||||
color_policy = "/syncolor/policies/Autodesk/{}".format(
|
||||
color_policy)
|
||||
|
||||
|
|
|
|||
|
|
@ -34,119 +34,125 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
project = context.data["flameProject"]
|
||||
sequence = context.data["flameSequence"]
|
||||
selected_segments = context.data["flameSelectedSegments"]
|
||||
self.log.debug("__ selected_segments: {}".format(selected_segments))
|
||||
|
||||
self.otio_timeline = context.data["otioTimeline"]
|
||||
self.clips_in_reels = opfapi.get_clips_in_reels(project)
|
||||
self.fps = context.data["fps"]
|
||||
|
||||
# process all sellected
|
||||
with opfapi.maintained_segment_selection(sequence) as segments:
|
||||
for segment in segments:
|
||||
comment_attributes = self._get_comment_attributes(segment)
|
||||
self.log.debug("_ comment_attributes: {}".format(
|
||||
pformat(comment_attributes)))
|
||||
for segment in selected_segments:
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
self.log.debug("__ marker_data: {}".format(
|
||||
pformat(marker_data)))
|
||||
|
||||
clip_data = opfapi.get_segment_attributes(segment)
|
||||
clip_name = clip_data["segment_name"]
|
||||
self.log.debug("clip_name: {}".format(clip_name))
|
||||
if not marker_data:
|
||||
continue
|
||||
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
self.log.debug("__ marker_data: {}".format(
|
||||
pformat(marker_data)))
|
||||
if marker_data.get("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
|
||||
if not marker_data:
|
||||
continue
|
||||
self.log.debug("__ segment.name: {}".format(
|
||||
segment.name
|
||||
))
|
||||
|
||||
if marker_data.get("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
comment_attributes = self._get_comment_attributes(segment)
|
||||
|
||||
# get file path
|
||||
file_path = clip_data["fpath"]
|
||||
self.log.debug("_ comment_attributes: {}".format(
|
||||
pformat(comment_attributes)))
|
||||
|
||||
# get source clip
|
||||
source_clip = self._get_reel_clip(file_path)
|
||||
clip_data = opfapi.get_segment_attributes(segment)
|
||||
clip_name = clip_data["segment_name"]
|
||||
self.log.debug("clip_name: {}".format(clip_name))
|
||||
|
||||
first_frame = opfapi.get_frame_from_filename(file_path) or 0
|
||||
# get file path
|
||||
file_path = clip_data["fpath"]
|
||||
|
||||
head, tail = self._get_head_tail(clip_data, first_frame)
|
||||
# get source clip
|
||||
source_clip = self._get_reel_clip(file_path)
|
||||
|
||||
# solve handles length
|
||||
marker_data["handleStart"] = min(
|
||||
marker_data["handleStart"], head)
|
||||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
first_frame = opfapi.get_frame_from_filename(file_path) or 0
|
||||
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
head, tail = self._get_head_tail(clip_data, first_frame)
|
||||
|
||||
# add marker data to instance data
|
||||
inst_data = dict(marker_data.items())
|
||||
# solve handles length
|
||||
marker_data["handleStart"] = min(
|
||||
marker_data["handleStart"], head)
|
||||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
|
||||
# insert family into families
|
||||
family = marker_data["family"]
|
||||
families = [str(f) for f in marker_data["families"]]
|
||||
families.insert(0, str(family))
|
||||
# add marker data to instance data
|
||||
inst_data = dict(marker_data.items())
|
||||
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {}".format(subset)
|
||||
label += " {}".format("[" + ", ".join(families) + "]")
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
|
||||
inst_data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"item": segment,
|
||||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
"fps": self.fps,
|
||||
"flameSourceClip": source_clip,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"path": file_path
|
||||
})
|
||||
# insert family into families
|
||||
family = marker_data["family"]
|
||||
families = [str(f) for f in marker_data["families"]]
|
||||
families.insert(0, str(family))
|
||||
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {} [{}]".format(subset, ", ".join(families))
|
||||
|
||||
# add to instance data
|
||||
inst_data.update(otio_data)
|
||||
self.log.debug("__ inst_data: {}".format(pformat(inst_data)))
|
||||
inst_data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"item": segment,
|
||||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
"fps": self.fps,
|
||||
"flameSourceClip": source_clip,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"path": file_path
|
||||
})
|
||||
|
||||
# add resolution
|
||||
self._get_resolution_to_data(inst_data, context)
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
|
||||
# add comment attributes if any
|
||||
inst_data.update(comment_attributes)
|
||||
# add to instance data
|
||||
inst_data.update(otio_data)
|
||||
self.log.debug("__ inst_data: {}".format(pformat(inst_data)))
|
||||
|
||||
# create instance
|
||||
instance = context.create_instance(**inst_data)
|
||||
# add resolution
|
||||
self._get_resolution_to_data(inst_data, context)
|
||||
|
||||
# add colorspace data
|
||||
instance.data.update({
|
||||
"versionData": {
|
||||
"colorspace": clip_data["colour_space"],
|
||||
}
|
||||
})
|
||||
# add comment attributes if any
|
||||
inst_data.update(comment_attributes)
|
||||
|
||||
# create shot instance for shot attributes create/update
|
||||
self._create_shot_instance(context, clip_name, **inst_data)
|
||||
# create instance
|
||||
instance = context.create_instance(**inst_data)
|
||||
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.info(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
# add colorspace data
|
||||
instance.data.update({
|
||||
"versionData": {
|
||||
"colorspace": clip_data["colour_space"],
|
||||
}
|
||||
})
|
||||
|
||||
if not with_audio:
|
||||
continue
|
||||
# create shot instance for shot attributes create/update
|
||||
self._create_shot_instance(context, clip_name, **inst_data)
|
||||
|
||||
# add audioReview attribute to plate instance data
|
||||
# if reviewTrack is on
|
||||
if marker_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.info(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
if not with_audio:
|
||||
continue
|
||||
|
||||
# add audioReview attribute to plate instance data
|
||||
# if reviewTrack is on
|
||||
if marker_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
|
||||
def _get_comment_attributes(self, segment):
|
||||
comment = segment.comment.get_value()
|
||||
|
|
@ -188,7 +194,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# get pattern defined by type
|
||||
pattern = TXT_PATERN
|
||||
if a_type in ("number" , "float"):
|
||||
if a_type in ("number", "float"):
|
||||
pattern = NUM_PATERN
|
||||
|
||||
res_goup = pattern.findall(value)
|
||||
|
|
|
|||
|
|
@ -31,27 +31,28 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
)
|
||||
|
||||
# adding otio timeline to context
|
||||
with opfapi.maintained_segment_selection(sequence):
|
||||
with opfapi.maintained_segment_selection(sequence) as selected_seg:
|
||||
otio_timeline = flame_export.create_otio_timeline(sequence)
|
||||
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset_name,
|
||||
"family": "workfile"
|
||||
}
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset_name,
|
||||
"family": "workfile"
|
||||
}
|
||||
|
||||
# create instance with workfile
|
||||
instance = context.create_instance(**instance_data)
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
# create instance with workfile
|
||||
instance = context.create_instance(**instance_data)
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
|
||||
# update context with main project attributes
|
||||
context.data.update({
|
||||
"flameProject": project,
|
||||
"flameSequence": sequence,
|
||||
"otioTimeline": otio_timeline,
|
||||
"currentFile": "Flame/{}/{}".format(
|
||||
project.name, sequence.name
|
||||
),
|
||||
"fps": float(str(sequence.frame_rate)[:-4])
|
||||
})
|
||||
# update context with main project attributes
|
||||
context.data.update({
|
||||
"flameProject": project,
|
||||
"flameSequence": sequence,
|
||||
"otioTimeline": otio_timeline,
|
||||
"currentFile": "Flame/{}/{}".format(
|
||||
project.name, sequence.name
|
||||
),
|
||||
"flameSelectedSegments": selected_seg,
|
||||
"fps": float(str(sequence.frame_rate)[:-4])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -7,14 +7,14 @@ import logging
|
|||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -70,7 +70,7 @@ def install():
|
|||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -94,7 +94,7 @@ def uninstall():
|
|||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class CreateOpenEXRSaver(create.LegacyCreator):
|
||||
class CreateOpenEXRSaver(LegacyCreator):
|
||||
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ from bson.objectid import ObjectId
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
import avalon.api
|
||||
|
||||
from openpype import lib
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.harmony
|
||||
|
|
@ -108,9 +108,8 @@ def check_inventory():
|
|||
if not lib.any_outdated():
|
||||
return
|
||||
|
||||
host = avalon.api.registered_host()
|
||||
outdated_containers = []
|
||||
for container in host.ls():
|
||||
for container in ls():
|
||||
representation = container['representation']
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
|
|
@ -186,7 +185,7 @@ def install():
|
|||
pyblish.api.register_host("harmony")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
# Register callbacks.
|
||||
|
|
@ -200,7 +199,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ def add_implementation_envs(env, _app):
|
|||
]
|
||||
old_hiero_path = env.get("HIERO_PLUGIN_PATH") or ""
|
||||
for path in old_hiero_path.split(os.pathsep):
|
||||
if not path or not os.path.exists(path):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
|
|
|
|||
|
|
@ -5,13 +5,13 @@ import os
|
|||
import contextlib
|
||||
from collections import OrderedDict
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -50,7 +50,7 @@ def install():
|
|||
pyblish.register_host("hiero")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
@ -71,7 +71,7 @@ def uninstall():
|
|||
pyblish.deregister_host("hiero")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ def add_implementation_envs(env, _app):
|
|||
old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or ""
|
||||
|
||||
for path in old_houdini_path.split(os.pathsep):
|
||||
if not path or not os.path.exists(path):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
|
|
@ -23,7 +23,7 @@ def add_implementation_envs(env, _app):
|
|||
new_houdini_path.append(norm_path)
|
||||
|
||||
for path in old_houdini_menu_path.split(os.pathsep):
|
||||
if not path or not os.path.exists(path):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import avalon.api
|
|||
from avalon.lib import find_submodule
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -54,7 +54,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ def add_implementation_envs(env, _app):
|
|||
]
|
||||
old_python_path = env.get("PYTHONPATH") or ""
|
||||
for path in old_python_path.split(os.pathsep):
|
||||
if not path or not os.path.exists(path):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
|
|
|
|||
|
|
@ -1511,7 +1511,7 @@ def get_container_members(container):
|
|||
|
||||
members = cmds.sets(container, query=True) or []
|
||||
members = cmds.ls(members, long=True, objectsOnly=True) or []
|
||||
members = set(members)
|
||||
all_members = set(members)
|
||||
|
||||
# Include any referenced nodes from any reference in the container
|
||||
# This is required since we've removed adding ALL nodes of a reference
|
||||
|
|
@ -1530,9 +1530,9 @@ def get_container_members(container):
|
|||
reference_members = cmds.ls(reference_members,
|
||||
long=True,
|
||||
objectsOnly=True)
|
||||
members.update(reference_members)
|
||||
all_members.update(reference_members)
|
||||
|
||||
return members
|
||||
return list(all_members)
|
||||
|
||||
|
||||
# region LOOKDEV
|
||||
|
|
|
|||
|
|
@ -23,8 +23,10 @@ from openpype.pipeline import (
|
|||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
|
|
@ -60,7 +62,7 @@ def install():
|
|||
pyblish.api.register_host("maya")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
|
|
@ -189,7 +191,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("maya")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
|
|
|||
|
|
@ -22,4 +22,6 @@ class CreateLook(plugin.Creator):
|
|||
self.data["maketx"] = self.make_tx
|
||||
|
||||
# Enable users to force a copy.
|
||||
# - on Windows is "forceCopy" always changed to `True` because of
|
||||
# windows implementation of hardlinks
|
||||
self.data["forceCopy"] = False
|
||||
|
|
|
|||
51
openpype/hosts/maya/plugins/create/create_multiverse_usd.py
Normal file
51
openpype/hosts/maya/plugins/create/create_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsd(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
|
||||
name = "usdMain"
|
||||
label = "Multiverse USD"
|
||||
family = "usd"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsd, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["writeAncestors"] = True
|
||||
self.data["flattenParentXforms"] = False
|
||||
self.data["writeSparseOverrides"] = False
|
||||
self.data["useMetaPrimPath"] = False
|
||||
self.data["customRootPath"] = ''
|
||||
self.data["customAttributes"] = ''
|
||||
self.data["nodeTypesToIgnore"] = ''
|
||||
self.data["writeMeshes"] = True
|
||||
self.data["writeCurves"] = True
|
||||
self.data["writeParticles"] = True
|
||||
self.data["writeCameras"] = False
|
||||
self.data["writeLights"] = False
|
||||
self.data["writeJoints"] = False
|
||||
self.data["writeCollections"] = False
|
||||
self.data["writePositions"] = True
|
||||
self.data["writeNormals"] = True
|
||||
self.data["writeUVs"] = True
|
||||
self.data["writeColorSets"] = False
|
||||
self.data["writeTangents"] = False
|
||||
self.data["writeRefPositions"] = False
|
||||
self.data["writeBlendShapes"] = False
|
||||
self.data["writeDisplayColor"] = False
|
||||
self.data["writeSkinWeights"] = False
|
||||
self.data["writeMaterialAssignment"] = False
|
||||
self.data["writeHardwareShader"] = False
|
||||
self.data["writeShadingNetworks"] = False
|
||||
self.data["writeTransformMatrix"] = True
|
||||
self.data["writeUsdAttributes"] = False
|
||||
self.data["timeVaryingTopology"] = False
|
||||
self.data["customMaterialNamespace"] = ''
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsdComp(plugin.Creator):
|
||||
"""Create Multiverse USD Composition"""
|
||||
|
||||
name = "usdCompositionMain"
|
||||
label = "Multiverse USD Composition"
|
||||
family = "usdComposition"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsdComp, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["flattenContent"] = False
|
||||
self.data["writePendingOverrides"] = False
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsdOver(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
|
||||
name = "usdOverrideMain"
|
||||
label = "Multiverse USD Override"
|
||||
family = "usdOverride"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsdOver, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["writeAll"] = False
|
||||
self.data["writeTransforms"] = True
|
||||
self.data["writeVisibility"] = True
|
||||
self.data["writeAttributes"] = True
|
||||
self.data["writeMaterials"] = True
|
||||
self.data["writeVariants"] = True
|
||||
self.data["writeVariantsDefinition"] = True
|
||||
self.data["writeActiveState"] = True
|
||||
self.data["writeNamespaces"] = False
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -15,6 +15,14 @@ class CreateReview(plugin.Creator):
|
|||
keepImages = False
|
||||
isolate = False
|
||||
imagePlane = True
|
||||
transparency = [
|
||||
"preset",
|
||||
"simple",
|
||||
"object sorting",
|
||||
"weighted average",
|
||||
"depth peeling",
|
||||
"alpha cut"
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
|
@ -28,5 +36,6 @@ class CreateReview(plugin.Creator):
|
|||
data["isolate"] = self.isolate
|
||||
data["keepImages"] = self.keepImages
|
||||
data["imagePlane"] = self.imagePlane
|
||||
data["transparency"] = self.transparency
|
||||
|
||||
self.data = data
|
||||
|
|
|
|||
102
openpype/hosts/maya/plugins/load/load_multiverse_usd.py
Normal file
102
openpype/hosts/maya/plugins/load/load_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import maya.cmds as cmds
|
||||
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
namespaced,
|
||||
unique_namespace
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
class MultiverseUsdLoader(load.LoaderPlugin):
|
||||
"""Load the USD by Multiverse"""
|
||||
|
||||
families = ["model", "usd", "usdComposition", "usdOverride",
|
||||
"pointcache", "animation"]
|
||||
representations = ["usd", "usda", "usdc", "usdz", "abc"]
|
||||
|
||||
label = "Read USD by Multiverse"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Create the shape
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
shape = None
|
||||
transform = None
|
||||
with maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
import multiverse
|
||||
shape = multiverse.CreateUsdCompound(self.fname)
|
||||
transform = cmds.listRelatives(
|
||||
shape, parent=True, fullPath=True)[0]
|
||||
|
||||
# Lock the shape node so the user cannot delete it.
|
||||
cmds.lockNode(shape, lock=True)
|
||||
|
||||
nodes = [transform, shape]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
# type: (dict, dict) -> None
|
||||
"""Update container with specified representation."""
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
members = cmds.sets(node, query=True) or []
|
||||
shapes = cmds.ls(members, type="mvUsdCompoundShape")
|
||||
assert shapes, "Cannot find mvUsdCompoundShape in container"
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
import multiverse
|
||||
for shape in shapes:
|
||||
multiverse.SetUsdCompoundAssetPaths(shape, [path])
|
||||
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
# type: (dict) -> None
|
||||
"""Remove loaded container."""
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Remove the namespace, if empty
|
||||
namespace = container['namespace']
|
||||
if cmds.namespace(exists=namespace):
|
||||
members = cmds.namespaceInfo(namespace, listNamespace=True)
|
||||
if not members:
|
||||
cmds.namespace(removeNamespace=namespace)
|
||||
else:
|
||||
self.log.warning("Namespace not deleted because it "
|
||||
"still has members: %s", namespace)
|
||||
|
|
@ -4,6 +4,7 @@ import os
|
|||
import sys
|
||||
import json
|
||||
import tempfile
|
||||
import platform
|
||||
import contextlib
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
|
@ -334,7 +335,14 @@ class ExtractLook(openpype.api.Extractor):
|
|||
transfers = []
|
||||
hardlinks = []
|
||||
hashes = {}
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
# Temporary fix to NOT create hardlinks on windows machines
|
||||
if platform.system().lower() == "windows":
|
||||
self.log.info(
|
||||
"Forcing copy instead of hardlink due to issues on Windows..."
|
||||
)
|
||||
force_copy = True
|
||||
else:
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
|
||||
for filepath in files_metadata:
|
||||
|
||||
|
|
|
|||
210
openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py
Normal file
210
openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
import os
|
||||
import six
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractMultiverseUsd(openpype.api.Extractor):
|
||||
"""Extractor for USD by Multiverse."""
|
||||
|
||||
label = "Extract Multiverse USD"
|
||||
hosts = ["maya"]
|
||||
families = ["usd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"writeAncestors": bool,
|
||||
"flattenParentXforms": bool,
|
||||
"writeSparseOverrides": bool,
|
||||
"useMetaPrimPath": bool,
|
||||
"customRootPath": str,
|
||||
"customAttributes": str,
|
||||
"nodeTypesToIgnore": str,
|
||||
"writeMeshes": bool,
|
||||
"writeCurves": bool,
|
||||
"writeParticles": bool,
|
||||
"writeCameras": bool,
|
||||
"writeLights": bool,
|
||||
"writeJoints": bool,
|
||||
"writeCollections": bool,
|
||||
"writePositions": bool,
|
||||
"writeNormals": bool,
|
||||
"writeUVs": bool,
|
||||
"writeColorSets": bool,
|
||||
"writeTangents": bool,
|
||||
"writeRefPositions": bool,
|
||||
"writeBlendShapes": bool,
|
||||
"writeDisplayColor": bool,
|
||||
"writeSkinWeights": bool,
|
||||
"writeMaterialAssignment": bool,
|
||||
"writeHardwareShader": bool,
|
||||
"writeShadingNetworks": bool,
|
||||
"writeTransformMatrix": bool,
|
||||
"writeUsdAttributes": bool,
|
||||
"timeVaryingTopology": bool,
|
||||
"customMaterialNamespace": str,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"stripNamespaces": False,
|
||||
"mergeTransformAndShape": False,
|
||||
"writeAncestors": True,
|
||||
"flattenParentXforms": False,
|
||||
"writeSparseOverrides": False,
|
||||
"useMetaPrimPath": False,
|
||||
"customRootPath": str(),
|
||||
"customAttributes": str(),
|
||||
"nodeTypesToIgnore": str(),
|
||||
"writeMeshes": True,
|
||||
"writeCurves": True,
|
||||
"writeParticles": True,
|
||||
"writeCameras": False,
|
||||
"writeLights": False,
|
||||
"writeJoints": False,
|
||||
"writeCollections": False,
|
||||
"writePositions": True,
|
||||
"writeNormals": True,
|
||||
"writeUVs": True,
|
||||
"writeColorSets": False,
|
||||
"writeTangents": False,
|
||||
"writeRefPositions": False,
|
||||
"writeBlendShapes": False,
|
||||
"writeDisplayColor": False,
|
||||
"writeSkinWeights": False,
|
||||
"writeMaterialAssignment": False,
|
||||
"writeHardwareShader": False,
|
||||
"writeShadingNetworks": False,
|
||||
"writeTransformMatrix": True,
|
||||
"writeUsdAttributes": False,
|
||||
"timeVaryingTopology": False,
|
||||
"customMaterialNamespace": str(),
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def parse_overrides(self, instance, options):
|
||||
"""Inspect data of instance to determine overridden options"""
|
||||
|
||||
for key in instance.data:
|
||||
if key not in self.options:
|
||||
continue
|
||||
|
||||
# Ensure the data is of correct type
|
||||
value = instance.data[key]
|
||||
if isinstance(value, six.text_type):
|
||||
value = str(value)
|
||||
if not isinstance(value, self.options[key]):
|
||||
self.log.warning(
|
||||
"Overridden attribute {key} was of "
|
||||
"the wrong type: {invalid_type} "
|
||||
"- should have been {valid_type}".format(
|
||||
key=key,
|
||||
invalid_type=type(value).__name__,
|
||||
valid_type=self.options[key].__name__))
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type=("mesh"),
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data['frameStart']
|
||||
frame_end = instance.data['frameEnd']
|
||||
handle_start = instance.data['handleStart']
|
||||
handle_end = instance.data['handleEnd']
|
||||
step = instance.data['step']
|
||||
fps = instance.data['fps']
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
asset_write_opts = multiverse.AssetWriteOptions(time_opts)
|
||||
options_discard_keys = {
|
||||
'numTimeSamples',
|
||||
'timeSamplesSpan',
|
||||
'frameStart',
|
||||
'frameEnd',
|
||||
'handleStart',
|
||||
'handleEnd',
|
||||
'step',
|
||||
'fps'
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(asset_write_opts, key, value)
|
||||
|
||||
multiverse.WriteAsset(file_path, members, asset_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
||||
"""Extractor of Multiverse USD Composition."""
|
||||
|
||||
label = "Extract Multiverse USD Composition"
|
||||
hosts = ["maya"]
|
||||
families = ["usdComposition"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"flattenContent": bool,
|
||||
"writePendingOverrides": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"stripNamespaces": True,
|
||||
"mergeTransformAndShape": False,
|
||||
"flattenContent": False,
|
||||
"writePendingOverrides": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def parse_overrides(self, instance, options):
|
||||
"""Inspect data of instance to determine overridden options"""
|
||||
|
||||
for key in instance.data:
|
||||
if key not in self.options:
|
||||
continue
|
||||
|
||||
# Ensure the data is of correct type
|
||||
value = instance.data[key]
|
||||
if not isinstance(value, self.options[key]):
|
||||
self.log.warning(
|
||||
"Overridden attribute {key} was of "
|
||||
"the wrong type: {invalid_type} "
|
||||
"- should have been {valid_type}".format(
|
||||
key=key,
|
||||
invalid_type=type(value).__name__,
|
||||
valid_type=self.options[key].__name__))
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data['frameStart']
|
||||
frame_end = instance.data['frameEnd']
|
||||
handle_start = instance.data['handleStart']
|
||||
handle_end = instance.data['handleEnd']
|
||||
step = instance.data['step']
|
||||
fps = instance.data['fps']
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
comp_write_opts = multiverse.CompositionWriteOptions()
|
||||
options_discard_keys = {
|
||||
'numTimeSamples',
|
||||
'timeSamplesSpan',
|
||||
'frameStart',
|
||||
'frameEnd',
|
||||
'handleStart',
|
||||
'handleEnd',
|
||||
'step',
|
||||
'fps'
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(comp_write_opts, key, value)
|
||||
|
||||
multiverse.WriteComposition(file_path, members, comp_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
||||
"""Extractor for USD Override by Multiverse."""
|
||||
|
||||
label = "Extract Multiverse USD Override"
|
||||
hosts = ["maya"]
|
||||
families = ["usdOverride"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"writeAll": bool,
|
||||
"writeTransforms": bool,
|
||||
"writeVisibility": bool,
|
||||
"writeAttributes": bool,
|
||||
"writeMaterials": bool,
|
||||
"writeVariants": bool,
|
||||
"writeVariantsDefinition": bool,
|
||||
"writeActiveState": bool,
|
||||
"writeNamespaces": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"writeAll": False,
|
||||
"writeTransforms": True,
|
||||
"writeVisibility": True,
|
||||
"writeAttributes": True,
|
||||
"writeMaterials": True,
|
||||
"writeVariants": True,
|
||||
"writeVariantsDefinition": True,
|
||||
"writeActiveState": True,
|
||||
"writeNamespaces": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usda".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info("Collected object {}".format(members))
|
||||
|
||||
# TODO: Deal with asset, composition, overide with options.
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
step = instance.data["step"]
|
||||
fps = instance.data["fps"]
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
over_write_opts = multiverse.OverridesWriteOptions(time_opts)
|
||||
options_discard_keys = {
|
||||
"numTimeSamples",
|
||||
"timeSamplesSpan",
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"step",
|
||||
"fps"
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(over_write_opts, key, value)
|
||||
|
||||
for member in members:
|
||||
multiverse.WriteOverrides(file_path, member, over_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "usd",
|
||||
"ext": "usd",
|
||||
"files": file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -73,6 +73,11 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt - 1, edit=True)
|
||||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
# Override transparency if requested.
|
||||
transparency = instance.data.get("transparency", 0)
|
||||
if transparency != 0:
|
||||
preset["viewport2_options"]["transparencyAlgorithm"] = transparency
|
||||
|
||||
# Isolate view is requested by having objects in the set besides a
|
||||
# camera.
|
||||
if preset.pop("isolate_view", False) and instance.data.get("isolate"):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ def add_implementation_envs(env, _app):
|
|||
]
|
||||
old_nuke_path = env.get("NUKE_PATH") or ""
|
||||
for path in old_nuke_path.split(os.pathsep):
|
||||
if not path or not os.path.exists(path):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openpype.tools.utils import host_tools
|
|||
from openpype.lib.path_tools import HostDirmap
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
|
||||
from .workio import (
|
||||
save_file,
|
||||
|
|
@ -1902,7 +1903,7 @@ def recreate_instance(origin_node, avalon_data=None):
|
|||
# create new node
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
for Creator in discover_legacy_creator_plugins():
|
||||
if Creator.__name__ == data["creator"]:
|
||||
creator_plugin = Creator
|
||||
break
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ from collections import OrderedDict
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
import openpype
|
||||
from openpype.api import (
|
||||
|
|
@ -15,10 +14,11 @@ from openpype.api import (
|
|||
)
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -106,7 +106,7 @@ def install():
|
|||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
|
|
@ -132,7 +132,7 @@ def uninstall():
|
|||
pyblish.deregister_host("nuke")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
|
|
|
|||
|
|
@ -450,6 +450,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
|
||||
def generate_mov(self, farm=False, **kwargs):
|
||||
self.publish_on_farm = farm
|
||||
read_raw = kwargs["read_raw"]
|
||||
reformat_node_add = kwargs["reformat_node_add"]
|
||||
reformat_node_config = kwargs["reformat_node_config"]
|
||||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
|
|
@ -484,6 +485,9 @@ class ExporterReviewMov(ExporterReview):
|
|||
r_node["origlast"].setValue(self.last_frame)
|
||||
r_node["colorspace"].setValue(self.write_colorspace)
|
||||
|
||||
if read_raw:
|
||||
r_node["raw"].setValue(1)
|
||||
|
||||
# connect
|
||||
self._temp_nodes[subset].append(r_node)
|
||||
self.previous_node = r_node
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class LoadEffects(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -188,7 +188,7 @@ class LoadEffects(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -330,11 +330,11 @@ class LoadEffects(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
elif isinstance(input, str):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -194,7 +194,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -350,11 +350,11 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
elif isinstance(input, str):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
|
|
|
|||
47
openpype/hosts/nuke/plugins/publish/extract_review_data.py
Normal file
47
openpype/hosts/nuke/plugins/publish/extract_review_data.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class ExtractReviewData(openpype.api.Extractor):
|
||||
"""Extracts review tag into available representation
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
# order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Extract Review Data"
|
||||
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, instance):
|
||||
fpath = instance.data["path"]
|
||||
ext = os.path.splitext(fpath)[-1][1:]
|
||||
|
||||
representations = instance.data.get("representations", [])
|
||||
|
||||
# review can be removed since `ProcessSubmittedJobOnFarm` will create
|
||||
# reviable representation if needed
|
||||
if (
|
||||
"render.farm" in instance.data["families"]
|
||||
and "review" in instance.data["families"]
|
||||
):
|
||||
instance.data["families"].remove("review")
|
||||
|
||||
# iterate representations and add `review` tag
|
||||
for repre in representations:
|
||||
if ext != repre["ext"]:
|
||||
continue
|
||||
|
||||
if not repre.get("tags"):
|
||||
repre["tags"] = []
|
||||
|
||||
if "review" not in repre["tags"]:
|
||||
repre["tags"].append("review")
|
||||
|
||||
self.log.debug("Matching representation: {}".format(
|
||||
pformat(repre)
|
||||
))
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
|
@ -24,7 +24,11 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
outputs = {}
|
||||
|
||||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
families = set(instance.data["families"])
|
||||
|
||||
# add main family to make sure all families are compared
|
||||
families.add(instance.data["family"])
|
||||
|
||||
task_type = instance.context.data["taskType"]
|
||||
subset = instance.data["subset"]
|
||||
self.log.info("Creating staging dir...")
|
||||
|
|
@ -50,51 +54,31 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
f_task_types = o_data["filter"]["task_types"]
|
||||
f_subsets = o_data["filter"]["sebsets"]
|
||||
|
||||
self.log.debug(
|
||||
"f_families `{}` > families: {}".format(
|
||||
f_families, families))
|
||||
|
||||
self.log.debug(
|
||||
"f_task_types `{}` > task_type: {}".format(
|
||||
f_task_types, task_type))
|
||||
|
||||
self.log.debug(
|
||||
"f_subsets `{}` > subset: {}".format(
|
||||
f_subsets, subset))
|
||||
|
||||
# test if family found in context
|
||||
test_families = any([
|
||||
# first if exact family set is matching
|
||||
# make sure only interesetion of list is correct
|
||||
bool(set(families).intersection(f_families)),
|
||||
# and if famiies are set at all
|
||||
# if not then return True because we want this preset
|
||||
# to be active if nothig is set
|
||||
bool(not f_families)
|
||||
])
|
||||
# using intersection to make sure all defined
|
||||
# families are present in combination
|
||||
if f_families and not families.intersection(f_families):
|
||||
continue
|
||||
|
||||
# test task types from filter
|
||||
test_task_types = any([
|
||||
# check if actual task type is defined in task types
|
||||
# set in preset's filter
|
||||
bool(task_type in f_task_types),
|
||||
# and if taskTypes are defined in preset filter
|
||||
# if not then return True, because we want this filter
|
||||
# to be active if no taskType is set
|
||||
bool(not f_task_types)
|
||||
])
|
||||
if f_task_types and task_type not in f_task_types:
|
||||
continue
|
||||
|
||||
# test subsets from filter
|
||||
test_subsets = any([
|
||||
# check if any of subset filter inputs
|
||||
# converted to regex patern is not found in subset
|
||||
# we keep strict case sensitivity
|
||||
bool(next((
|
||||
s for s in f_subsets
|
||||
if re.search(re.compile(s), subset)
|
||||
), None)),
|
||||
# but if no subsets were set then make this acuntable too
|
||||
bool(not f_subsets)
|
||||
])
|
||||
|
||||
# we need all filters to be positive for this
|
||||
# preset to be activated
|
||||
test_all = all([
|
||||
test_families,
|
||||
test_task_types,
|
||||
test_subsets
|
||||
])
|
||||
|
||||
# if it is not positive then skip this preset
|
||||
if not test_all:
|
||||
if f_subsets and not any(
|
||||
re.search(s, subset) for s in f_subsets):
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class RepairNukeWriteDeadlineTab(pyblish.api.Action):
|
|||
|
||||
# Remove existing knobs.
|
||||
knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names()
|
||||
for name, knob in group_node.knobs().iteritems():
|
||||
for name, knob in group_node.knobs().items():
|
||||
if name in knob_names:
|
||||
group_node.removeKnob(knob)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import toml
|
||||
|
||||
import nuke
|
||||
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import discover_creator_plugins
|
||||
from openpype.hosts.nuke.api.lib import get_avalon_knob_data
|
||||
|
||||
|
||||
|
|
@ -79,7 +78,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin):
|
|||
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
for Creator in discover_creator_plugins():
|
||||
if Creator.__name__ != Create_name:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,10 @@ from avalon import io
|
|||
from openpype.api import Logger
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.photoshop
|
||||
|
|
@ -75,7 +76,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -88,7 +89,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def ls():
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from Qt import QtWidgets
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
|
||||
|
||||
class CreateImage(create.LegacyCreator):
|
||||
class CreateImage(LegacyCreator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
from avalon import api
|
||||
import pyblish.api
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.lib import prepare_template_data
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -9,6 +12,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
This collector takes into account assets that are associated with
|
||||
an LayerSet and marked with a unique identifier;
|
||||
|
||||
If no image instances are explicitly created, it looks if there is value
|
||||
in `flatten_subset_template` (configurable in Settings), in that case it
|
||||
produces flatten image with all visible layers.
|
||||
|
||||
Identifier:
|
||||
id (str): "pyblish.avalon.instance"
|
||||
"""
|
||||
|
|
@ -19,13 +26,17 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
families_mapping = {
|
||||
"image": []
|
||||
}
|
||||
# configurable in Settings
|
||||
flatten_subset_template = ""
|
||||
|
||||
def process(self, context):
|
||||
stub = photoshop.stub()
|
||||
layers = stub.get_layers()
|
||||
layers_meta = stub.get_layers_metadata()
|
||||
instance_names = []
|
||||
all_layer_ids = []
|
||||
for layer in layers:
|
||||
all_layer_ids.append(layer.id)
|
||||
layer_data = stub.read(layer, layers_meta)
|
||||
|
||||
# Skip layers without metadata.
|
||||
|
|
@ -59,3 +70,33 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if len(instance_names) != len(set(instance_names)):
|
||||
self.log.warning("Duplicate instances found. " +
|
||||
"Remove unwanted via SubsetManager")
|
||||
|
||||
if len(instance_names) == 0 and self.flatten_subset_template:
|
||||
project_name = context.data["projectEntity"]["name"]
|
||||
variants = get_project_settings(project_name).get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"CreateImage", {}).get(
|
||||
"defaults", [''])
|
||||
family = "image"
|
||||
task_name = api.Session["AVALON_TASK"]
|
||||
asset_name = context.data["assetEntity"]["name"]
|
||||
|
||||
fill_pairs = {
|
||||
"variant": variants[0],
|
||||
"family": family,
|
||||
"task": task_name
|
||||
}
|
||||
|
||||
subset = self.flatten_subset_template.format(
|
||||
**prepare_template_data(fill_pairs))
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data["family"] = family
|
||||
instance.data["asset"] = asset_name
|
||||
instance.data["subset"] = subset
|
||||
instance.data["ids"] = all_layer_ids
|
||||
instance.data["families"] = self.families_mapping[family]
|
||||
instance.data["publish"] = True
|
||||
|
||||
self.log.info("flatten instance: {} ".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -2,18 +2,26 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Gather the active document as review instance."""
|
||||
|
||||
label = "Review"
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["photoshop"]
|
||||
|
||||
def process(self, context):
|
||||
family = "review"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
subset = family + task.capitalize()
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
"",
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"]
|
||||
)
|
||||
|
||||
file_path = context.data["currentFile"]
|
||||
base_name = os.path.basename(file_path)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
|
@ -11,8 +13,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
family = "workfile"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
subset = family + task.capitalize()
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
"",
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"]
|
||||
)
|
||||
|
||||
file_path = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
|
|
|
|||
|
|
@ -26,8 +26,10 @@ class ExtractImage(openpype.api.Extractor):
|
|||
with photoshop.maintained_selection():
|
||||
self.log.info("Extracting %s" % str(list(instance)))
|
||||
with photoshop.maintained_visibility():
|
||||
ids = set()
|
||||
layer = instance.data.get("layer")
|
||||
ids = set([layer.id])
|
||||
if layer:
|
||||
ids.add(layer.id)
|
||||
add_ids = instance.data.pop("ids", None)
|
||||
if add_ids:
|
||||
ids.update(set(add_ids))
|
||||
|
|
|
|||
|
|
@ -155,6 +155,9 @@ class ExtractReview(openpype.api.Extractor):
|
|||
for image_instance in instance.context:
|
||||
if image_instance.data["family"] != "image":
|
||||
continue
|
||||
if not image_instance.data.get("layer"):
|
||||
# dummy instance for flatten image
|
||||
continue
|
||||
layers.append(image_instance.data.get("layer"))
|
||||
|
||||
return sorted(layers)
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
stub = photoshop.stub()
|
||||
for instance in instances:
|
||||
self.log.info("validate_naming instance {}".format(instance))
|
||||
metadata = stub.read(instance[0])
|
||||
layer_item = instance.data["layer"]
|
||||
metadata = stub.read(layer_item)
|
||||
self.log.info("metadata instance {}".format(metadata))
|
||||
layer_name = None
|
||||
if metadata.get("uuid"):
|
||||
|
|
@ -43,11 +44,11 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
stub.rename_layer(instance.data["uuid"], layer_name)
|
||||
|
||||
subset_name = re.sub(invalid_chars, replace_char,
|
||||
instance.data["name"])
|
||||
instance.data["subset"])
|
||||
|
||||
instance[0].Name = layer_name or subset_name
|
||||
layer_item.name = layer_name or subset_name
|
||||
metadata["subset"] = subset_name
|
||||
stub.imprint(instance[0], metadata)
|
||||
stub.imprint(layer_item, metadata)
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -4,14 +4,17 @@ Basic avalon integration
|
|||
import os
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from avalon import schema
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from . import lib
|
||||
|
|
@ -46,7 +49,7 @@ def install():
|
|||
log.info("Registering DaVinci Resovle plug-ins..")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
@ -70,7 +73,7 @@ def uninstall():
|
|||
log.info("Deregistering DaVinci Resovle plug-ins..")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from avalon import io
|
||||
from openpype.lib import NumberDef
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
from avalon import io
|
||||
|
||||
|
||||
class MyAutoCreator(AutoCreator):
|
||||
|
|
@ -13,7 +13,7 @@ class MyAutoCreator(AutoCreator):
|
|||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key", label="Number")
|
||||
NumberDef("number_key", label="Number")
|
||||
]
|
||||
return output
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ class MyAutoCreator(AutoCreator):
|
|||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def create(self, options=None):
|
||||
def create(self):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,16 @@
|
|||
import json
|
||||
from openpype import resources
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.lib import (
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
FileDef,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -54,17 +60,17 @@ class TestCreatorOne(Creator):
|
|||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key", label="Number"),
|
||||
NumberDef("number_key", label="Number"),
|
||||
]
|
||||
return output
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
lib.BoolDef("use_selection", label="Use selection"),
|
||||
lib.UISeparatorDef(),
|
||||
lib.UILabelDef("Testing label"),
|
||||
lib.FileDef("filepath", folders=True, label="Filepath"),
|
||||
lib.FileDef(
|
||||
BoolDef("use_selection", label="Use selection"),
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Testing label"),
|
||||
FileDef("filepath", folders=True, label="Filepath"),
|
||||
FileDef(
|
||||
"filepath_2", multipath=True, folders=True, label="Filepath 2"
|
||||
)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from openpype.lib import NumberDef, TextDef
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -40,8 +40,8 @@ class TestCreatorTwo(Creator):
|
|||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key"),
|
||||
lib.TextDef("text_key")
|
||||
NumberDef("number_key"),
|
||||
TextDef("text_key")
|
||||
]
|
||||
return output
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
OpenPypePyblishPluginMixin,
|
||||
attribute_definitions
|
||||
)
|
||||
from openpype.lib import attribute_definitions
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class CollectInstanceOneTestHost(
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from openpype.hosts.traypublisher.api import pipeline
|
||||
from openpype.lib import FileDef
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ class WorkfileCreator(Creator):
|
|||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.FileDef(
|
||||
FileDef(
|
||||
"filepath",
|
||||
folders=False,
|
||||
extensions=self.extensions,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.pipeline import PublishValidationError
|
|||
class ValidateWorkfilePath(pyblish.api.InstancePlugin):
|
||||
"""Validate existence of workfile instance existence."""
|
||||
|
||||
label = "Collect Workfile"
|
||||
label = "Validate Workfile"
|
||||
order = pyblish.api.ValidatorOrder - 0.49
|
||||
families = ["workfile"]
|
||||
hosts = ["traypublisher"]
|
||||
|
|
|
|||
|
|
@ -15,9 +15,10 @@ from openpype.hosts import tvpaint
|
|||
from openpype.api import get_current_project_settings
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
||||
|
|
@ -82,7 +83,7 @@ def install():
|
|||
pyblish.api.register_host("tvpaint")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
registered_callbacks = (
|
||||
pyblish.api.registered_callbacks().get("instanceToggled") or []
|
||||
|
|
@ -104,7 +105,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("tvpaint")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def containerise(
|
||||
|
|
|
|||
|
|
@ -20,21 +20,30 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
json.dumps(workfile_instances, indent=4)
|
||||
))
|
||||
|
||||
filtered_instance_data = []
|
||||
# Backwards compatibility for workfiles that already have review
|
||||
# instance in metadata.
|
||||
review_instance_exist = False
|
||||
for instance_data in workfile_instances:
|
||||
if instance_data["family"] == "review":
|
||||
family = instance_data["family"]
|
||||
if family == "review":
|
||||
review_instance_exist = True
|
||||
break
|
||||
|
||||
elif family not in ("renderPass", "renderLayer"):
|
||||
self.log.info("Unknown family \"{}\". Skipping {}".format(
|
||||
family, json.dumps(instance_data, indent=4)
|
||||
))
|
||||
continue
|
||||
|
||||
filtered_instance_data.append(instance_data)
|
||||
|
||||
# Fake review instance if review was not found in metadata families
|
||||
if not review_instance_exist:
|
||||
workfile_instances.append(
|
||||
filtered_instance_data.append(
|
||||
self._create_review_instance_data(context)
|
||||
)
|
||||
|
||||
for instance_data in workfile_instances:
|
||||
for instance_data in filtered_instance_data:
|
||||
instance_data["fps"] = context.data["sceneFps"]
|
||||
|
||||
# Store workfile instance data to instance data
|
||||
|
|
@ -42,8 +51,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# Global instance data modifications
|
||||
# Fill families
|
||||
family = instance_data["family"]
|
||||
families = [family]
|
||||
if family != "review":
|
||||
families.append("review")
|
||||
# Add `review` family for thumbnail integration
|
||||
instance_data["families"] = [family, "review"]
|
||||
instance_data["families"] = families
|
||||
|
||||
# Instance name
|
||||
subset_name = instance_data["subset"]
|
||||
|
|
@ -78,7 +90,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# Project name from workfile context
|
||||
project_name = context.data["workfile_context"]["project"]
|
||||
# Host name from environment variable
|
||||
host_name = os.environ["AVALON_APP"]
|
||||
host_name = context.data["hostName"]
|
||||
# Use empty variant value
|
||||
variant = ""
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
|
|
@ -106,12 +118,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
instance = self.create_render_pass_instance(
|
||||
context, instance_data
|
||||
)
|
||||
else:
|
||||
raise AssertionError(
|
||||
"Instance with unknown family \"{}\": {}".format(
|
||||
family, instance_data
|
||||
)
|
||||
)
|
||||
|
||||
if instance is None:
|
||||
continue
|
||||
|
|
|
|||
110
openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py
Normal file
110
openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
import json
|
||||
import copy
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectRenderScene(pyblish.api.ContextPlugin):
|
||||
"""Collect instance which renders whole scene in PNG.
|
||||
|
||||
Creates instance with family 'renderScene' which will have all layers
|
||||
to render which will be composite into one result. The instance is not
|
||||
collected from scene.
|
||||
|
||||
Scene will be rendered with all visible layers similar way like review is.
|
||||
|
||||
Instance is disabled if there are any created instances of 'renderLayer'
|
||||
or 'renderPass'. That is because it is expected that this instance is
|
||||
used as lazy publish of TVPaint file.
|
||||
|
||||
Subset name is created similar way like 'renderLayer' family. It can use
|
||||
`renderPass` and `renderLayer` keys which can be set using settings and
|
||||
`variant` is filled using `renderPass` value.
|
||||
"""
|
||||
label = "Collect Render Scene"
|
||||
order = pyblish.api.CollectorOrder - 0.39
|
||||
hosts = ["tvpaint"]
|
||||
|
||||
# Value of 'render_pass' in subset name template
|
||||
render_pass = "beauty"
|
||||
|
||||
# Settings attributes
|
||||
enabled = False
|
||||
# Value of 'render_layer' and 'variant' in subset name template
|
||||
render_layer = "Main"
|
||||
|
||||
def process(self, context):
|
||||
# Check if there are created instances of renderPass and renderLayer
|
||||
# - that will define if renderScene instance is enabled after
|
||||
# collection
|
||||
any_created_instance = False
|
||||
for instance in context:
|
||||
family = instance.data["family"]
|
||||
if family in ("renderPass", "renderLayer"):
|
||||
any_created_instance = True
|
||||
break
|
||||
|
||||
# Global instance data modifications
|
||||
# Fill families
|
||||
family = "renderScene"
|
||||
# Add `review` family for thumbnail integration
|
||||
families = [family, "review"]
|
||||
|
||||
# Collect asset doc to get asset id
|
||||
# - not sure if it's good idea to require asset id in
|
||||
# get_subset_name?
|
||||
workfile_context = context.data["workfile_context"]
|
||||
asset_name = workfile_context["asset"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
# Project name from workfile context
|
||||
project_name = context.data["workfile_context"]["project"]
|
||||
# Host name from environment variable
|
||||
host_name = context.data["hostName"]
|
||||
# Variant is using render pass name
|
||||
variant = self.render_layer
|
||||
dynamic_data = {
|
||||
"render_layer": self.render_layer,
|
||||
"render_pass": self.render_pass
|
||||
}
|
||||
|
||||
task_name = workfile_context["task"]
|
||||
subset_name = get_subset_name_with_asset_doc(
|
||||
"render",
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name,
|
||||
dynamic_data=dynamic_data
|
||||
)
|
||||
|
||||
instance_data = {
|
||||
"family": family,
|
||||
"families": families,
|
||||
"fps": context.data["sceneFps"],
|
||||
"subset": subset_name,
|
||||
"name": subset_name,
|
||||
"label": "{} [{}-{}]".format(
|
||||
subset_name,
|
||||
context.data["sceneMarkIn"] + 1,
|
||||
context.data["sceneMarkOut"] + 1
|
||||
),
|
||||
"active": not any_created_instance,
|
||||
"publish": not any_created_instance,
|
||||
"representations": [],
|
||||
"layers": copy.deepcopy(context.data["layersData"]),
|
||||
"asset": asset_name,
|
||||
"task": task_name
|
||||
}
|
||||
|
||||
instance = context.create_instance(**instance_data)
|
||||
|
||||
self.log.debug("Created instance: {}\n{}".format(
|
||||
instance, json.dumps(instance.data, indent=4)
|
||||
))
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
"""Plugin converting png files from ExtractSequence into exrs.
|
||||
|
||||
Requires:
|
||||
ExtractSequence - source of PNG
|
||||
ExtractReview - review was already created so we can convert to any exr
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import (
|
||||
get_oiio_tools_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class ExtractConvertToEXR(pyblish.api.InstancePlugin):
|
||||
# Offset to get after ExtractSequence plugin.
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract Sequence EXR"
|
||||
hosts = ["tvpaint"]
|
||||
families = ["render"]
|
||||
|
||||
enabled = False
|
||||
|
||||
# Replace source PNG files or just add
|
||||
replace_pngs = True
|
||||
# EXR compression
|
||||
exr_compression = "ZIP"
|
||||
|
||||
def process(self, instance):
|
||||
repres = instance.data.get("representations")
|
||||
if not repres:
|
||||
return
|
||||
|
||||
oiio_path = get_oiio_tools_path()
|
||||
# Raise an exception when oiiotool is not available
|
||||
# - this can currently happen on MacOS machines
|
||||
if not os.path.exists(oiio_path):
|
||||
KnownPublishError(
|
||||
"OpenImageIO tool is not available on this machine."
|
||||
)
|
||||
|
||||
new_repres = []
|
||||
for repre in repres:
|
||||
if repre["name"] != "png":
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
"Processing representation: {}".format(
|
||||
json.dumps(repre, sort_keys=True, indent=4)
|
||||
)
|
||||
)
|
||||
|
||||
src_filepaths = set()
|
||||
new_filenames = []
|
||||
for src_filename in repre["files"]:
|
||||
dst_filename = os.path.splitext(src_filename)[0] + ".exr"
|
||||
new_filenames.append(dst_filename)
|
||||
|
||||
src_filepath = os.path.join(repre["stagingDir"], src_filename)
|
||||
dst_filepath = os.path.join(repre["stagingDir"], dst_filename)
|
||||
|
||||
src_filepaths.add(src_filepath)
|
||||
|
||||
args = [
|
||||
oiio_path, src_filepath,
|
||||
"--compression", self.exr_compression,
|
||||
# TODO how to define color conversion?
|
||||
"--colorconvert", "sRGB", "linear",
|
||||
"-o", dst_filepath
|
||||
]
|
||||
run_subprocess(args)
|
||||
|
||||
new_repres.append(
|
||||
{
|
||||
"name": "exr",
|
||||
"ext": "exr",
|
||||
"files": new_filenames,
|
||||
"stagingDir": repre["stagingDir"],
|
||||
"tags": list(repre["tags"])
|
||||
}
|
||||
)
|
||||
|
||||
if self.replace_pngs:
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
for filepath in src_filepaths:
|
||||
instance.context.data["cleanupFullPaths"].append(filepath)
|
||||
|
||||
instance.data["representations"].extend(new_repres)
|
||||
self.log.info(
|
||||
"Representations: {}".format(
|
||||
json.dumps(
|
||||
instance.data["representations"], sort_keys=True, indent=4
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
@ -12,14 +12,13 @@ from openpype.hosts.tvpaint.lib import (
|
|||
fill_reference_frames,
|
||||
composite_rendered_layers,
|
||||
rename_filepaths_by_frame_start,
|
||||
composite_images
|
||||
)
|
||||
|
||||
|
||||
class ExtractSequence(pyblish.api.Extractor):
|
||||
label = "Extract Sequence"
|
||||
hosts = ["tvpaint"]
|
||||
families = ["review", "renderPass", "renderLayer"]
|
||||
families = ["review", "renderPass", "renderLayer", "renderScene"]
|
||||
|
||||
# Modifiable with settings
|
||||
review_bg = [255, 255, 255, 255]
|
||||
|
|
@ -160,7 +159,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
|
||||
# Fill tags and new families
|
||||
tags = []
|
||||
if family_lowered in ("review", "renderlayer"):
|
||||
if family_lowered in ("review", "renderlayer", "renderscene"):
|
||||
tags.append("review")
|
||||
|
||||
# Sequence of one frame
|
||||
|
|
@ -186,7 +185,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
if family_lowered in ("renderpass", "renderlayer"):
|
||||
if family_lowered in ("renderpass", "renderlayer", "renderscene"):
|
||||
# Change family to render
|
||||
instance.data["family"] = "render"
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class ValidateLayersVisiblity(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Validate Layers Visibility"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["review", "renderPass", "renderLayer"]
|
||||
families = ["review", "renderPass", "renderLayer", "renderScene"]
|
||||
|
||||
def process(self, instance):
|
||||
layer_names = set()
|
||||
|
|
|
|||
|
|
@ -7,9 +7,10 @@ import pyblish.api
|
|||
from avalon import api
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
|
|
@ -49,7 +50,7 @@ def install():
|
|||
logger.info("installing OpenPype for Unreal")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
_register_callbacks()
|
||||
_register_events()
|
||||
|
||||
|
|
@ -58,7 +59,7 @@ def uninstall():
|
|||
"""Uninstall Unreal configuration for Avalon."""
|
||||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
|
||||
def _register_callbacks():
|
||||
|
|
|
|||
|
|
@ -29,6 +29,21 @@ from .vendor_bin_utils import (
|
|||
is_oiio_supported
|
||||
)
|
||||
|
||||
from .attribute_definitions import (
|
||||
AbtractAttrDef,
|
||||
|
||||
UIDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
|
||||
UnknownDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
BoolDef,
|
||||
FileDef,
|
||||
)
|
||||
|
||||
from .env_tools import (
|
||||
env_value_to_bool,
|
||||
get_paths_from_environ,
|
||||
|
|
@ -233,6 +248,19 @@ __all__ = [
|
|||
"get_ffmpeg_tool_path",
|
||||
"is_oiio_supported",
|
||||
|
||||
"AbtractAttrDef",
|
||||
|
||||
"UIDef",
|
||||
"UISeparatorDef",
|
||||
"UILabelDef",
|
||||
|
||||
"UnknownDef",
|
||||
"NumberDef",
|
||||
"TextDef",
|
||||
"EnumDef",
|
||||
"BoolDef",
|
||||
"FileDef",
|
||||
|
||||
"import_filepath",
|
||||
"modules_from_path",
|
||||
"recursive_bases_from_class",
|
||||
|
|
|
|||
|
|
@ -211,6 +211,7 @@ class ApplicationGroup:
|
|||
data (dict): Group defying data loaded from settings.
|
||||
manager (ApplicationManager): Manager that created the group.
|
||||
"""
|
||||
|
||||
def __init__(self, name, data, manager):
|
||||
self.name = name
|
||||
self.manager = manager
|
||||
|
|
@ -374,6 +375,7 @@ class ApplicationManager:
|
|||
will always use these values. Gives ability to create manager
|
||||
using different settings.
|
||||
"""
|
||||
|
||||
def __init__(self, system_settings=None):
|
||||
self.log = PypeLogger.get_logger(self.__class__.__name__)
|
||||
|
||||
|
|
@ -530,13 +532,13 @@ class EnvironmentToolGroup:
|
|||
variants = data.get("variants") or {}
|
||||
label_by_key = variants.pop(M_DYNAMIC_KEY_LABEL, {})
|
||||
variants_by_name = {}
|
||||
for variant_name, variant_env in variants.items():
|
||||
for variant_name, variant_data in variants.items():
|
||||
if variant_name in METADATA_KEYS:
|
||||
continue
|
||||
|
||||
variant_label = label_by_key.get(variant_name) or variant_name
|
||||
tool = EnvironmentTool(
|
||||
variant_name, variant_label, variant_env, self
|
||||
variant_name, variant_label, variant_data, self
|
||||
)
|
||||
variants_by_name[variant_name] = tool
|
||||
self.variants = variants_by_name
|
||||
|
|
@ -560,15 +562,30 @@ class EnvironmentTool:
|
|||
|
||||
Args:
|
||||
name (str): Name of the tool.
|
||||
environment (dict): Variant environments.
|
||||
variant_data (dict): Variant data with environments and
|
||||
host and app variant filters.
|
||||
group (str): Name of group which wraps tool.
|
||||
"""
|
||||
|
||||
def __init__(self, name, label, environment, group):
|
||||
def __init__(self, name, label, variant_data, group):
|
||||
# Backwards compatibility 3.9.1 - 3.9.2
|
||||
# - 'variant_data' contained only environments but contain also host
|
||||
# and application variant filters
|
||||
host_names = variant_data.get("host_names", [])
|
||||
app_variants = variant_data.get("app_variants", [])
|
||||
|
||||
if "environment" in variant_data:
|
||||
environment = variant_data["environment"]
|
||||
else:
|
||||
environment = variant_data
|
||||
|
||||
self.host_names = host_names
|
||||
self.app_variants = app_variants
|
||||
self.name = name
|
||||
self.variant_label = label
|
||||
self.label = " ".join((group.label, label))
|
||||
self.group = group
|
||||
|
||||
self._environment = environment
|
||||
self.full_name = "/".join((group.name, name))
|
||||
|
||||
|
|
@ -579,6 +596,19 @@ class EnvironmentTool:
|
|||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
def is_valid_for_app(self, app):
|
||||
"""Is tool valid for application.
|
||||
|
||||
Args:
|
||||
app (Application): Application for which are prepared environments.
|
||||
"""
|
||||
if self.app_variants and app.full_name not in self.app_variants:
|
||||
return False
|
||||
|
||||
if self.host_names and app.host_name not in self.host_names:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class ApplicationExecutable:
|
||||
"""Representation of executable loaded from settings."""
|
||||
|
|
@ -1319,6 +1349,41 @@ def _merge_env(env, current_env):
|
|||
return result
|
||||
|
||||
|
||||
def _add_python_version_paths(app, env, logger):
|
||||
"""Add vendor packages specific for a Python version."""
|
||||
|
||||
# Skip adding if host name is not set
|
||||
if not app.host_name:
|
||||
return
|
||||
|
||||
# Add Python 2/3 modules
|
||||
openpype_root = os.getenv("OPENPYPE_REPOS_ROOT")
|
||||
python_vendor_dir = os.path.join(
|
||||
openpype_root,
|
||||
"openpype",
|
||||
"vendor",
|
||||
"python"
|
||||
)
|
||||
if app.use_python_2:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_2")
|
||||
else:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_3")
|
||||
|
||||
if not os.path.exists(pythonpath):
|
||||
return
|
||||
|
||||
logger.debug("Adding Python version specific paths to PYTHONPATH")
|
||||
python_paths = [pythonpath]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
||||
def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
|
|
@ -1331,6 +1396,8 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
|||
app = data["app"]
|
||||
log = data["log"]
|
||||
|
||||
_add_python_version_paths(app, data["env"], log)
|
||||
|
||||
# `added_env_keys` has debug purpose
|
||||
added_env_keys = {app.group.name, app.name}
|
||||
# Environments for application
|
||||
|
|
@ -1347,7 +1414,7 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
|||
# Make sure each tool group can be added only once
|
||||
for key in asset_doc["data"].get("tools_env") or []:
|
||||
tool = app.manager.tools.get(key)
|
||||
if not tool:
|
||||
if not tool or not tool.is_valid_for_app(app):
|
||||
continue
|
||||
groups_by_name[tool.group.name] = tool.group
|
||||
tool_by_group_name[tool.group.name][tool.name] = tool
|
||||
|
|
|
|||
|
|
@ -1604,13 +1604,13 @@ def get_creator_by_name(creator_name, case_sensitive=False):
|
|||
Returns:
|
||||
Creator: Return first matching plugin or `None`.
|
||||
"""
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
|
||||
# Lower input creator name if is not case sensitive
|
||||
if not case_sensitive:
|
||||
creator_name = creator_name.lower()
|
||||
|
||||
for creator_plugin in avalon.api.discover(LegacyCreator):
|
||||
for creator_plugin in discover_legacy_creator_plugins():
|
||||
_creator_name = creator_plugin.__name__
|
||||
|
||||
# Lower creator plugin name if is not case sensitive
|
||||
|
|
@ -1705,7 +1705,7 @@ def _get_task_context_data_for_anatomy(
|
|||
"task": {
|
||||
"name": task_name,
|
||||
"type": task_type,
|
||||
"short_name": project_task_type_data["short_name"]
|
||||
"short": project_task_type_data["short_name"]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1965,6 +1965,7 @@ def get_last_workfile(
|
|||
data.pop("comment", None)
|
||||
if not data.get("ext"):
|
||||
data["ext"] = extensions[0]
|
||||
data["ext"] = data["ext"].replace('.', '')
|
||||
filename = StringTemplate.format_strict_template(file_template, data)
|
||||
|
||||
if full_path:
|
||||
|
|
|
|||
|
|
@ -5,8 +5,9 @@ import importlib
|
|||
import inspect
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
def import_filepath(filepath, module_name=None):
|
||||
|
|
@ -28,7 +29,7 @@ def import_filepath(filepath, module_name=None):
|
|||
# Prepare module object where content of file will be parsed
|
||||
module = types.ModuleType(module_name)
|
||||
|
||||
if PY3:
|
||||
if six.PY3:
|
||||
# Use loader so module has full specs
|
||||
module_loader = importlib.machinery.SourceFileLoader(
|
||||
module_name, filepath
|
||||
|
|
@ -38,7 +39,7 @@ def import_filepath(filepath, module_name=None):
|
|||
# Execute module code and store content to module
|
||||
with open(filepath) as _stream:
|
||||
# Execute content and store it to module object
|
||||
exec(_stream.read(), module.__dict__)
|
||||
six.exec_(_stream.read(), module.__dict__)
|
||||
|
||||
module.__file__ = filepath
|
||||
return module
|
||||
|
|
@ -129,20 +130,12 @@ def classes_from_module(superclass, module):
|
|||
for name in dir(module):
|
||||
# It could be anything at this point
|
||||
obj = getattr(module, name)
|
||||
if not inspect.isclass(obj):
|
||||
if not inspect.isclass(obj) or obj is superclass:
|
||||
continue
|
||||
|
||||
# These are subclassed from nothing, not even `object`
|
||||
if not len(obj.__bases__) > 0:
|
||||
continue
|
||||
if issubclass(obj, superclass):
|
||||
classes.append(obj)
|
||||
|
||||
# Use string comparison rather than `issubclass`
|
||||
# in order to support reloading of this module.
|
||||
bases = recursive_bases_from_class(obj)
|
||||
if not any(base.__name__ == superclass.__name__ for base in bases):
|
||||
continue
|
||||
|
||||
classes.append(obj)
|
||||
return classes
|
||||
|
||||
|
||||
|
|
@ -228,7 +221,7 @@ def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None):
|
|||
dst_module_name(str): Parent module name under which can be loaded
|
||||
module added.
|
||||
"""
|
||||
if PY3:
|
||||
if six.PY3:
|
||||
module = _import_module_from_dirpath_py3(
|
||||
dirpath, folder_name, dst_module_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -478,8 +478,14 @@ def convert_for_ffmpeg(
|
|||
oiio_cmd.extend(["--eraseattrib", attr_name])
|
||||
|
||||
# Add last argument - path to output
|
||||
base_file_name = os.path.basename(first_input_path)
|
||||
output_path = os.path.join(output_dir, base_file_name)
|
||||
if is_sequence:
|
||||
ext = os.path.splitext(first_input_path)[1]
|
||||
base_filename = "tmp.%{:0>2}d{}".format(
|
||||
len(str(input_frame_end)), ext
|
||||
)
|
||||
else:
|
||||
base_filename = os.path.basename(first_input_path)
|
||||
output_path = os.path.join(output_dir, base_filename)
|
||||
oiio_cmd.extend([
|
||||
"-o", output_path
|
||||
])
|
||||
|
|
|
|||
|
|
@ -28,26 +28,15 @@ from openpype.settings.lib import (
|
|||
)
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
|
||||
DEFAULT_OPENPYPE_MODULES = (
|
||||
"avalon_apps",
|
||||
"clockify",
|
||||
"log_viewer",
|
||||
"deadline",
|
||||
"muster",
|
||||
"royalrender",
|
||||
"python_console_interpreter",
|
||||
"ftrack",
|
||||
"slack",
|
||||
"webserver",
|
||||
"launcher_action",
|
||||
"project_manager_action",
|
||||
"settings_action",
|
||||
"standalonepublish_action",
|
||||
"traypublish_action",
|
||||
"job_queue",
|
||||
"timers_manager",
|
||||
"sync_server",
|
||||
# Files that will be always ignored on modules import
|
||||
IGNORED_FILENAMES = (
|
||||
"__pycache__",
|
||||
)
|
||||
# Files ignored on modules import from "./openpype/modules"
|
||||
IGNORED_DEFAULT_FILENAMES = (
|
||||
"__init__.py",
|
||||
"base.py",
|
||||
"interfaces.py",
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -146,9 +135,16 @@ class _LoadCache:
|
|||
|
||||
def get_default_modules_dir():
|
||||
"""Path to default OpenPype modules."""
|
||||
|
||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
return os.path.join(current_dir, "default_modules")
|
||||
output = []
|
||||
for folder_name in ("default_modules", ):
|
||||
path = os.path.join(current_dir, folder_name)
|
||||
if os.path.exists(path) and os.path.isdir(path):
|
||||
output.append(path)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_dynamic_modules_dirs():
|
||||
|
|
@ -186,7 +182,7 @@ def get_dynamic_modules_dirs():
|
|||
def get_module_dirs():
|
||||
"""List of paths where OpenPype modules can be found."""
|
||||
_dirpaths = []
|
||||
_dirpaths.append(get_default_modules_dir())
|
||||
_dirpaths.extend(get_default_modules_dir())
|
||||
_dirpaths.extend(get_dynamic_modules_dirs())
|
||||
|
||||
dirpaths = []
|
||||
|
|
@ -292,25 +288,45 @@ def _load_modules():
|
|||
|
||||
log = PypeLogger.get_logger("ModulesLoader")
|
||||
|
||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
processed_paths = set()
|
||||
processed_paths.add(current_dir)
|
||||
# Import default modules imported from 'openpype.modules'
|
||||
for default_module_name in DEFAULT_OPENPYPE_MODULES:
|
||||
for filename in os.listdir(current_dir):
|
||||
# Ignore filenames
|
||||
if (
|
||||
filename in IGNORED_FILENAMES
|
||||
or filename in IGNORED_DEFAULT_FILENAMES
|
||||
):
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(current_dir, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
|
||||
if not os.path.isdir(fullpath) and ext not in (".py", ):
|
||||
continue
|
||||
|
||||
try:
|
||||
import_str = "openpype.modules.{}".format(default_module_name)
|
||||
new_import_str = "{}.{}".format(modules_key, default_module_name)
|
||||
import_str = "openpype.modules.{}".format(basename)
|
||||
new_import_str = "{}.{}".format(modules_key, basename)
|
||||
default_module = __import__(import_str, fromlist=("", ))
|
||||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, default_module_name, default_module)
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
except Exception:
|
||||
msg = (
|
||||
"Failed to import default module '{}'."
|
||||
).format(default_module_name)
|
||||
).format(basename)
|
||||
log.error(msg, exc_info=True)
|
||||
|
||||
# Look for OpenPype modules in paths defined with `get_module_dirs`
|
||||
# - dynamically imported OpenPype modules and addons
|
||||
dirpaths = get_module_dirs()
|
||||
for dirpath in dirpaths:
|
||||
for dirpath in get_module_dirs():
|
||||
# Skip already processed paths
|
||||
if dirpath in processed_paths:
|
||||
continue
|
||||
processed_paths.add(dirpath)
|
||||
|
||||
if not os.path.exists(dirpath):
|
||||
log.warning((
|
||||
"Could not find path when loading OpenPype modules \"{}\""
|
||||
|
|
@ -319,12 +335,15 @@ def _load_modules():
|
|||
|
||||
for filename in os.listdir(dirpath):
|
||||
# Ignore filenames
|
||||
if filename in ("__pycache__", ):
|
||||
if filename in IGNORED_FILENAMES:
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(dirpath, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
|
||||
if not os.path.isdir(fullpath) and ext not in (".py", ):
|
||||
continue
|
||||
|
||||
# TODO add more logic how to define if folder is module or not
|
||||
# - check manifest and content of manifest
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -492,7 +492,8 @@ class DeleteOldVersions(BaseAction):
|
|||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
remainders.remove(file_path_base)
|
||||
if file_path_base in remainders:
|
||||
remainders.remove(file_path_base)
|
||||
continue
|
||||
|
||||
seq_path_base = os.path.split(seq_path)[1]
|
||||
|
|
|
|||
|
|
@ -286,21 +286,6 @@ def from_dict_to_set(data, is_project):
|
|||
return result
|
||||
|
||||
|
||||
def get_avalon_project_template(project_name):
|
||||
"""Get avalon template
|
||||
Args:
|
||||
project_name: (string)
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
templates = Anatomy(project_name).templates
|
||||
return {
|
||||
"workfile": templates["avalon"]["workfile"],
|
||||
"work": templates["avalon"]["work"],
|
||||
"publish": templates["avalon"]["publish"]
|
||||
}
|
||||
|
||||
|
||||
def get_project_apps(in_app_list):
|
||||
""" Application definitions for app name.
|
||||
|
||||
|
|
|
|||
|
|
@ -26,3 +26,12 @@ class LogsWindow(QtWidgets.QWidget):
|
|||
self.log_detail = log_detail
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self._first_show = True
|
||||
|
||||
def showEvent(self, event):
|
||||
super(LogsWindow, self).showEvent(event)
|
||||
|
||||
if self._first_show:
|
||||
self._first_show = False
|
||||
self.logs_widget.refresh()
|
||||
|
|
|
|||
|
|
@ -155,6 +155,11 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
QtCore.Qt.DescendingOrder
|
||||
)
|
||||
|
||||
refresh_triggered_timer = QtCore.QTimer()
|
||||
refresh_triggered_timer.setSingleShot(True)
|
||||
refresh_triggered_timer.setInterval(200)
|
||||
|
||||
refresh_triggered_timer.timeout.connect(self._on_refresh_timeout)
|
||||
view.selectionModel().selectionChanged.connect(self._on_index_change)
|
||||
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
||||
|
||||
|
|
@ -169,10 +174,12 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
self.detail_widget = detail_widget
|
||||
self.refresh_btn = refresh_btn
|
||||
|
||||
# prepare
|
||||
self.refresh()
|
||||
self._refresh_triggered_timer = refresh_triggered_timer
|
||||
|
||||
def refresh(self):
|
||||
self._refresh_triggered_timer.start()
|
||||
|
||||
def _on_refresh_timeout(self):
|
||||
self.model.refresh()
|
||||
self.detail_widget.refresh()
|
||||
|
||||
|
|
|
|||
|
|
@ -35,20 +35,25 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
# make slack publishable
|
||||
if profile:
|
||||
self.log.info("Found profile: {}".format(profile))
|
||||
if instance.data.get('families'):
|
||||
instance.data['families'].append('slack')
|
||||
else:
|
||||
instance.data['families'] = ['slack']
|
||||
if not profile:
|
||||
return
|
||||
|
||||
instance.data["slack_channel_message_profiles"] = \
|
||||
profile["channel_messages"]
|
||||
self.log.info("Found profile: {}".format(profile))
|
||||
if instance.data.get('families'):
|
||||
instance.data['families'].append('slack')
|
||||
else:
|
||||
instance.data['families'] = ['slack']
|
||||
|
||||
slack_token = (instance.context.data["project_settings"]
|
||||
["slack"]
|
||||
["token"])
|
||||
instance.data["slack_token"] = slack_token
|
||||
selected_profiles = profile["channel_messages"]
|
||||
for prof in selected_profiles:
|
||||
prof["review_upload_limit"] = profile.get("review_upload_limit",
|
||||
50)
|
||||
instance.data["slack_channel_message_profiles"] = selected_profiles
|
||||
|
||||
slack_token = (instance.context.data["project_settings"]
|
||||
["slack"]
|
||||
["token"])
|
||||
instance.data["slack_token"] = slack_token
|
||||
|
||||
def main_family_from_instance(self, instance): # TODO yank from integrate
|
||||
"""Returns main family of entered instance."""
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
message = self._get_filled_message(message_profile["message"],
|
||||
instance,
|
||||
review_path)
|
||||
self.log.info("message:: {}".format(message))
|
||||
self.log.debug("message:: {}".format(message))
|
||||
if not message:
|
||||
return
|
||||
|
||||
|
|
@ -43,7 +43,8 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
publish_files.add(thumbnail_path)
|
||||
|
||||
if message_profile["upload_review"] and review_path:
|
||||
publish_files.add(review_path)
|
||||
message, publish_files = self._handle_review_upload(
|
||||
message, message_profile, publish_files, review_path)
|
||||
|
||||
project = instance.context.data["anatomyData"]["project"]["code"]
|
||||
for channel in message_profile["channels"]:
|
||||
|
|
@ -75,6 +76,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
dbcon = mongo_client[database_name]["notification_messages"]
|
||||
dbcon.insert_one(msg)
|
||||
|
||||
def _handle_review_upload(self, message, message_profile, publish_files,
|
||||
review_path):
|
||||
"""Check if uploaded file is not too large"""
|
||||
review_file_size_MB = os.path.getsize(review_path) / 1024 / 1024
|
||||
file_limit = message_profile.get("review_upload_limit", 50)
|
||||
if review_file_size_MB > file_limit:
|
||||
message += "\nReview upload omitted because of file size."
|
||||
if review_path not in message:
|
||||
message += "\nFile located at: {}".format(review_path)
|
||||
else:
|
||||
publish_files.add(review_path)
|
||||
return message, publish_files
|
||||
|
||||
def _get_filled_message(self, message_templ, instance, review_path=None):
|
||||
"""Use message_templ and data from instance to get message content.
|
||||
|
||||
|
|
@ -210,6 +224,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
# You will get a SlackApiError if "ok" is False
|
||||
error_str = self._enrich_error(str(e.response["error"]), channel)
|
||||
self.log.warning("Error happened {}".format(error_str))
|
||||
except Exception as e:
|
||||
error_str = self._enrich_error(str(e), channel)
|
||||
self.log.warning("Not SlackAPI error", exc_info=True)
|
||||
|
||||
return None, []
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,6 @@ from .constants import (
|
|||
HOST_WORKFILE_EXTENSIONS,
|
||||
)
|
||||
|
||||
from .lib import attribute_definitions
|
||||
|
||||
from .create import (
|
||||
BaseCreator,
|
||||
Creator,
|
||||
|
|
@ -15,6 +13,13 @@ from .create import (
|
|||
|
||||
LegacyCreator,
|
||||
legacy_create,
|
||||
|
||||
discover_creator_plugins,
|
||||
discover_legacy_creator_plugins,
|
||||
register_creator_plugin,
|
||||
deregister_creator_plugin,
|
||||
register_creator_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
)
|
||||
|
||||
from .load import (
|
||||
|
|
@ -43,7 +48,8 @@ from .publish import (
|
|||
PublishValidationError,
|
||||
PublishXmlValidationError,
|
||||
KnownPublishError,
|
||||
OpenPypePyblishPluginMixin
|
||||
OpenPypePyblishPluginMixin,
|
||||
OptionalPyblishPluginMixin,
|
||||
)
|
||||
|
||||
from .actions import (
|
||||
|
|
@ -81,6 +87,13 @@ __all__ = (
|
|||
"LegacyCreator",
|
||||
"legacy_create",
|
||||
|
||||
"discover_creator_plugins",
|
||||
"discover_legacy_creator_plugins",
|
||||
"register_creator_plugin",
|
||||
"deregister_creator_plugin",
|
||||
"register_creator_plugin_path",
|
||||
"deregister_creator_plugin_path",
|
||||
|
||||
# --- Load ---
|
||||
"HeroVersionType",
|
||||
"IncompatibleLoaderError",
|
||||
|
|
@ -107,6 +120,7 @@ __all__ = (
|
|||
"PublishXmlValidationError",
|
||||
"KnownPublishError",
|
||||
"OpenPypePyblishPluginMixin",
|
||||
"OptionalPyblishPluginMixin",
|
||||
|
||||
# --- Actions ---
|
||||
"LauncherAction",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,11 @@
|
|||
import logging
|
||||
from openpype.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
|
||||
|
||||
class LauncherAction(object):
|
||||
|
|
@ -90,57 +97,39 @@ class InventoryAction(object):
|
|||
|
||||
# Launcher action
|
||||
def discover_launcher_actions():
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.discover(LauncherAction)
|
||||
return discover(LauncherAction)
|
||||
|
||||
|
||||
def register_launcher_action(plugin):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin(LauncherAction, plugin)
|
||||
return register_plugin(LauncherAction, plugin)
|
||||
|
||||
|
||||
def register_launcher_action_path(path):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin_path(LauncherAction, path)
|
||||
return register_plugin_path(LauncherAction, path)
|
||||
|
||||
|
||||
# Inventory action
|
||||
def discover_inventory_actions():
|
||||
import avalon.api
|
||||
|
||||
actions = avalon.api.discover(InventoryAction)
|
||||
actions = discover(InventoryAction)
|
||||
filtered_actions = []
|
||||
for action in actions:
|
||||
if action is not InventoryAction:
|
||||
print("DISCOVERED", action)
|
||||
filtered_actions.append(action)
|
||||
else:
|
||||
print("GOT SOURCE")
|
||||
|
||||
return filtered_actions
|
||||
|
||||
|
||||
def register_inventory_action(plugin):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin(InventoryAction, plugin)
|
||||
return register_plugin(InventoryAction, plugin)
|
||||
|
||||
|
||||
def deregister_inventory_action(plugin):
|
||||
import avalon.api
|
||||
|
||||
avalon.api.deregister_plugin(InventoryAction, plugin)
|
||||
deregister_plugin(InventoryAction, plugin)
|
||||
|
||||
|
||||
def register_inventory_action_path(path):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin_path(InventoryAction, path)
|
||||
return register_plugin_path(InventoryAction, path)
|
||||
|
||||
|
||||
def deregister_inventory_action_path(path):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.deregister_plugin_path(InventoryAction, path)
|
||||
return deregister_plugin_path(InventoryAction, path)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,14 @@ from .creator_plugins import (
|
|||
|
||||
BaseCreator,
|
||||
Creator,
|
||||
AutoCreator
|
||||
AutoCreator,
|
||||
|
||||
discover_creator_plugins,
|
||||
discover_legacy_creator_plugins,
|
||||
register_creator_plugin,
|
||||
deregister_creator_plugin,
|
||||
register_creator_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
)
|
||||
|
||||
from .context import (
|
||||
|
|
@ -29,6 +36,13 @@ __all__ = (
|
|||
"Creator",
|
||||
"AutoCreator",
|
||||
|
||||
"discover_creator_plugins",
|
||||
"discover_legacy_creator_plugins",
|
||||
"register_creator_plugin",
|
||||
"deregister_creator_plugin",
|
||||
"register_creator_plugin_path",
|
||||
"deregister_creator_plugin_path",
|
||||
|
||||
"CreatedInstance",
|
||||
"CreateContext",
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ import inspect
|
|||
from uuid import uuid4
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ..lib import UnknownDef
|
||||
from .creator_plugins import (
|
||||
BaseCreator,
|
||||
Creator,
|
||||
AutoCreator
|
||||
AutoCreator,
|
||||
discover_creator_plugins,
|
||||
)
|
||||
|
||||
from openpype.api import (
|
||||
|
|
@ -18,6 +18,8 @@ from openpype.api import (
|
|||
get_project_settings
|
||||
)
|
||||
|
||||
UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"])
|
||||
|
||||
|
||||
class ImmutableKeyError(TypeError):
|
||||
"""Accessed key is immutable so does not allow changes or removements."""
|
||||
|
|
@ -87,6 +89,8 @@ class AttributeValues:
|
|||
origin_data(dict): Values loaded from host before conversion.
|
||||
"""
|
||||
def __init__(self, attr_defs, values, origin_data=None):
|
||||
from openpype.lib.attribute_definitions import UnknownDef
|
||||
|
||||
if origin_data is None:
|
||||
origin_data = copy.deepcopy(values)
|
||||
self._origin_data = origin_data
|
||||
|
|
@ -842,7 +846,7 @@ class CreateContext:
|
|||
creators = {}
|
||||
autocreators = {}
|
||||
manual_creators = {}
|
||||
for creator_class in avalon.api.discover(BaseCreator):
|
||||
for creator_class in discover_creator_plugins():
|
||||
if inspect.isabstract(creator_class):
|
||||
self.log.info(
|
||||
"Skipping abstract Creator {}".format(str(creator_class))
|
||||
|
|
@ -1080,7 +1084,7 @@ class CreateContext:
|
|||
for instance in cretor_instances:
|
||||
instance_changes = instance.changes()
|
||||
if instance_changes:
|
||||
update_list.append((instance, instance_changes))
|
||||
update_list.append(UpdateData(instance, instance_changes))
|
||||
|
||||
creator = self.creators[identifier]
|
||||
if update_list:
|
||||
|
|
|
|||
|
|
@ -8,7 +8,19 @@ from abc import (
|
|||
)
|
||||
import six
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
from openpype.lib import (
|
||||
get_subset_name_with_asset_doc,
|
||||
set_plugin_attributes_from_settings,
|
||||
)
|
||||
from openpype.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
|
||||
from .legacy_create import LegacyCreator
|
||||
|
||||
|
||||
class CreatorError(Exception):
|
||||
|
|
@ -46,6 +58,11 @@ class BaseCreator:
|
|||
# - may not be used if `get_icon` is reimplemented
|
||||
icon = None
|
||||
|
||||
# Instance attribute definitions that can be changed per instance
|
||||
# - returns list of attribute definitions from
|
||||
# `openpype.pipeline.attribute_definitions`
|
||||
instance_attr_defs = []
|
||||
|
||||
def __init__(
|
||||
self, create_context, system_settings, project_settings, headless=False
|
||||
):
|
||||
|
|
@ -56,10 +73,13 @@ class BaseCreator:
|
|||
# - we may use UI inside processing this attribute should be checked
|
||||
self.headless = headless
|
||||
|
||||
@abstractproperty
|
||||
@property
|
||||
def identifier(self):
|
||||
"""Identifier of creator (must be unique)."""
|
||||
pass
|
||||
"""Identifier of creator (must be unique).
|
||||
|
||||
Default implementation returns plugin's family.
|
||||
"""
|
||||
return self.family
|
||||
|
||||
@abstractproperty
|
||||
def family(self):
|
||||
|
|
@ -90,11 +110,39 @@ class BaseCreator:
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def collect_instances(self, attr_plugins=None):
|
||||
def collect_instances(self):
|
||||
"""Collect existing instances related to this creator plugin.
|
||||
|
||||
The implementation differs on host abilities. The creator has to
|
||||
collect metadata about instance and create 'CreatedInstance' object
|
||||
which should be added to 'CreateContext'.
|
||||
|
||||
Example:
|
||||
```python
|
||||
def collect_instances(self):
|
||||
# Getting existing instances is different per host implementation
|
||||
for instance_data in pipeline.list_instances():
|
||||
# Process only instances that were created by this creator
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
# Create instance object from existing data
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
# Add instance to create context
|
||||
self._add_instance_to_context(instance)
|
||||
```
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_instances(self, update_list):
|
||||
"""Store changes of existing instances so they can be recollected.
|
||||
|
||||
Args:
|
||||
update_list(list<UpdateData>): Gets list of tuples. Each item
|
||||
contain changed instance and it's changes.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -178,7 +226,7 @@ class BaseCreator:
|
|||
list<AbtractAttrDef>: Attribute definitions that can be tweaked for
|
||||
created instance.
|
||||
"""
|
||||
return []
|
||||
return self.instance_attr_defs
|
||||
|
||||
|
||||
class Creator(BaseCreator):
|
||||
|
|
@ -191,6 +239,9 @@ class Creator(BaseCreator):
|
|||
# - default_variants may not be used if `get_default_variants` is overriden
|
||||
default_variants = []
|
||||
|
||||
# Default variant used in 'get_default_variant'
|
||||
default_variant = None
|
||||
|
||||
# Short description of family
|
||||
# - may not be used if `get_description` is overriden
|
||||
description = None
|
||||
|
|
@ -204,6 +255,10 @@ class Creator(BaseCreator):
|
|||
# e.g. for buld creators
|
||||
create_allow_context_change = True
|
||||
|
||||
# Precreate attribute definitions showed before creation
|
||||
# - similar to instance attribute definitions
|
||||
pre_create_attr_defs = []
|
||||
|
||||
@abstractmethod
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
"""Create new instance and store it.
|
||||
|
|
@ -263,7 +318,7 @@ class Creator(BaseCreator):
|
|||
`get_default_variants` should be used.
|
||||
"""
|
||||
|
||||
return None
|
||||
return self.default_variant
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
"""Plugin attribute definitions needed for creation.
|
||||
|
|
@ -276,7 +331,7 @@ class Creator(BaseCreator):
|
|||
list<AbtractAttrDef>: Attribute definitions that can be tweaked for
|
||||
created instance.
|
||||
"""
|
||||
return []
|
||||
return self.pre_create_attr_defs
|
||||
|
||||
|
||||
class AutoCreator(BaseCreator):
|
||||
|
|
@ -284,6 +339,43 @@ class AutoCreator(BaseCreator):
|
|||
|
||||
Can be used e.g. for `workfile`.
|
||||
"""
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Skip removement."""
|
||||
pass
|
||||
|
||||
|
||||
def discover_creator_plugins():
|
||||
return discover(BaseCreator)
|
||||
|
||||
|
||||
def discover_legacy_creator_plugins():
|
||||
plugins = discover(LegacyCreator)
|
||||
set_plugin_attributes_from_settings(plugins, LegacyCreator)
|
||||
return plugins
|
||||
|
||||
|
||||
def register_creator_plugin(plugin):
|
||||
if issubclass(plugin, BaseCreator):
|
||||
register_plugin(BaseCreator, plugin)
|
||||
|
||||
elif issubclass(plugin, LegacyCreator):
|
||||
register_plugin(LegacyCreator, plugin)
|
||||
|
||||
|
||||
def deregister_creator_plugin(plugin):
|
||||
if issubclass(plugin, BaseCreator):
|
||||
deregister_plugin(BaseCreator, plugin)
|
||||
|
||||
elif issubclass(plugin, LegacyCreator):
|
||||
deregister_plugin(LegacyCreator, plugin)
|
||||
|
||||
|
||||
def register_creator_plugin_path(path):
|
||||
register_plugin_path(BaseCreator, path)
|
||||
register_plugin_path(LegacyCreator, path)
|
||||
|
||||
|
||||
def deregister_creator_plugin_path(path):
|
||||
deregister_plugin_path(BaseCreator, path)
|
||||
deregister_plugin_path(LegacyCreator, path)
|
||||
|
|
|
|||
|
|
@ -1,30 +0,0 @@
|
|||
from .attribute_definitions import (
|
||||
AbtractAttrDef,
|
||||
|
||||
UIDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
|
||||
UnknownDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
BoolDef,
|
||||
FileDef,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AbtractAttrDef",
|
||||
|
||||
"UIDef",
|
||||
"UISeparatorDef",
|
||||
"UILabelDef",
|
||||
|
||||
"UnknownDef",
|
||||
"NumberDef",
|
||||
"TextDef",
|
||||
"EnumDef",
|
||||
"BoolDef",
|
||||
"FileDef",
|
||||
)
|
||||
|
|
@ -1,5 +1,13 @@
|
|||
import logging
|
||||
|
||||
from openpype.lib import set_plugin_attributes_from_settings
|
||||
from openpype.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
from .utils import get_representation_path_from_context
|
||||
|
||||
|
||||
|
|
@ -102,30 +110,22 @@ class SubsetLoaderPlugin(LoaderPlugin):
|
|||
|
||||
|
||||
def discover_loader_plugins():
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.discover(LoaderPlugin)
|
||||
plugins = discover(LoaderPlugin)
|
||||
set_plugin_attributes_from_settings(plugins, LoaderPlugin)
|
||||
return plugins
|
||||
|
||||
|
||||
def register_loader_plugin(plugin):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin(LoaderPlugin, plugin)
|
||||
|
||||
|
||||
def deregister_loader_plugin_path(path):
|
||||
import avalon.api
|
||||
|
||||
avalon.api.deregister_plugin_path(LoaderPlugin, path)
|
||||
|
||||
|
||||
def register_loader_plugin_path(path):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin_path(LoaderPlugin, path)
|
||||
return register_plugin(LoaderPlugin, plugin)
|
||||
|
||||
|
||||
def deregister_loader_plugin(plugin):
|
||||
import avalon.api
|
||||
deregister_plugin(LoaderPlugin, plugin)
|
||||
|
||||
avalon.api.deregister_plugin(LoaderPlugin, plugin)
|
||||
|
||||
def deregister_loader_plugin_path(path):
|
||||
deregister_plugin_path(LoaderPlugin, path)
|
||||
|
||||
|
||||
def register_loader_plugin_path(path):
|
||||
return register_plugin_path(LoaderPlugin, path)
|
||||
|
|
|
|||
298
openpype/pipeline/plugin_discover.py
Normal file
298
openpype/pipeline/plugin_discover.py
Normal file
|
|
@ -0,0 +1,298 @@
|
|||
import os
|
||||
import inspect
|
||||
import traceback
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib.python_module_tools import (
|
||||
modules_from_path,
|
||||
classes_from_module,
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class DiscoverResult:
|
||||
"""Result of Plug-ins discovery of a single superclass type.
|
||||
|
||||
Stores discovered, duplicated, ignored and abstract plugins and file paths
|
||||
which crashed on execution of file.
|
||||
"""
|
||||
|
||||
def __init__(self, superclass):
|
||||
self.superclass = superclass
|
||||
self.plugins = []
|
||||
self.crashed_file_paths = {}
|
||||
self.duplicated_plugins = []
|
||||
self.abstract_plugins = []
|
||||
self.ignored_plugins = set()
|
||||
# Store loaded modules to keep them in memory
|
||||
self._modules = set()
|
||||
|
||||
def __iter__(self):
|
||||
for plugin in self.plugins:
|
||||
yield plugin
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.plugins[item]
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
self.plugins[item] = value
|
||||
|
||||
def add_module(self, module):
|
||||
"""Add dynamically loaded python module to keep it in memory."""
|
||||
self._modules.add(module)
|
||||
|
||||
def get_report(self, only_errors=True, exc_info=True, full_report=False):
|
||||
lines = []
|
||||
if not only_errors:
|
||||
# Successfully discovered plugins
|
||||
if self.plugins or full_report:
|
||||
lines.append(
|
||||
"*** Discovered {} plugins".format(len(self.plugins))
|
||||
)
|
||||
for cls in self.plugins:
|
||||
lines.append("- {}".format(cls.__class__.__name__))
|
||||
|
||||
# Plugin that were defined to be ignored
|
||||
if self.ignored_plugins or full_report:
|
||||
lines.append("*** Ignored plugins {}".format(len(
|
||||
self.ignored_plugins
|
||||
)))
|
||||
for cls in self.ignored_plugins:
|
||||
lines.append("- {}".format(cls.__class__.__name__))
|
||||
|
||||
# Abstract classes
|
||||
if self.abstract_plugins or full_report:
|
||||
lines.append("*** Discovered {} abstract plugins".format(len(
|
||||
self.abstract_plugins
|
||||
)))
|
||||
for cls in self.abstract_plugins:
|
||||
lines.append("- {}".format(cls.__class__.__name__))
|
||||
|
||||
# Abstract classes
|
||||
if self.duplicated_plugins or full_report:
|
||||
lines.append("*** There were {} duplicated plugins".format(len(
|
||||
self.duplicated_plugins
|
||||
)))
|
||||
for cls in self.duplicated_plugins:
|
||||
lines.append("- {}".format(cls.__class__.__name__))
|
||||
|
||||
if self.crashed_file_paths or full_report:
|
||||
lines.append("*** Failed to load {} files".format(len(
|
||||
self.crashed_file_paths
|
||||
)))
|
||||
for path, exc_info_args in self.crashed_file_paths.items():
|
||||
lines.append("- {}".format(path))
|
||||
if exc_info:
|
||||
lines.append(10 * "*")
|
||||
lines.extend(traceback.format_exception(*exc_info_args))
|
||||
lines.append(10 * "*")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def log_report(self, only_errors=True, exc_info=True):
|
||||
report = self.get_report(only_errors, exc_info)
|
||||
if report:
|
||||
log.info(report)
|
||||
|
||||
|
||||
class PluginDiscoverContext(object):
|
||||
"""Store and discover registered types nad registered paths to types.
|
||||
|
||||
Keeps in memory all registered types and their paths. Paths are dynamically
|
||||
loaded on discover so different discover calls won't return the same
|
||||
class objects even if were loaded from same file.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._registered_plugins = {}
|
||||
self._registered_plugin_paths = {}
|
||||
self._last_discovered_plugins = {}
|
||||
# Store the last result to memory
|
||||
self._last_discovered_results = {}
|
||||
|
||||
def get_last_discovered_plugins(self, superclass):
|
||||
"""Access last discovered plugin by a subperclass.
|
||||
|
||||
Returns:
|
||||
None: When superclass was not discovered yet.
|
||||
list: Lastly discovered plugins of the superclass.
|
||||
"""
|
||||
|
||||
return self._last_discovered_plugins.get(superclass)
|
||||
|
||||
def discover(
|
||||
self,
|
||||
superclass,
|
||||
allow_duplicates=True,
|
||||
ignore_classes=None,
|
||||
return_report=False
|
||||
):
|
||||
"""Find and return subclasses of `superclass`
|
||||
|
||||
Args:
|
||||
superclass (type): Class which determines discovered subclasses.
|
||||
allow_duplicates (bool): Validate class name duplications.
|
||||
ignore_classes (list): List of classes that will be ignored
|
||||
and not added to result.
|
||||
|
||||
Returns:
|
||||
DiscoverResult: Object holding succesfully discovered plugins,
|
||||
ignored plugins, plugins with missing abstract implementation
|
||||
and duplicated plugin.
|
||||
"""
|
||||
|
||||
if not ignore_classes:
|
||||
ignore_classes = []
|
||||
|
||||
result = DiscoverResult(superclass)
|
||||
plugin_names = set()
|
||||
registered_classes = self._registered_plugins.get(superclass) or []
|
||||
registered_paths = self._registered_plugin_paths.get(superclass) or []
|
||||
for cls in registered_classes:
|
||||
if cls is superclass or cls in ignore_classes:
|
||||
result.ignored_plugins.add(cls)
|
||||
continue
|
||||
|
||||
if inspect.isabstract(cls):
|
||||
result.abstract_plugins.append(cls)
|
||||
continue
|
||||
|
||||
class_name = cls.__name__
|
||||
if class_name in plugin_names:
|
||||
result.duplicated_plugins.append(cls)
|
||||
continue
|
||||
plugin_names.add(class_name)
|
||||
result.plugins.append(cls)
|
||||
|
||||
# Include plug-ins from registered paths
|
||||
for path in registered_paths:
|
||||
modules, crashed = modules_from_path(path)
|
||||
for item in crashed:
|
||||
filepath, exc_info = item
|
||||
result.crashed_file_paths[filepath] = exc_info
|
||||
|
||||
for item in modules:
|
||||
filepath, module = item
|
||||
result.add_module(module)
|
||||
for cls in classes_from_module(superclass, module):
|
||||
if cls is superclass or cls in ignore_classes:
|
||||
result.ignored_plugins.add(cls)
|
||||
continue
|
||||
|
||||
if inspect.isabstract(cls):
|
||||
result.abstract_plugins.append(cls)
|
||||
continue
|
||||
|
||||
if not allow_duplicates:
|
||||
class_name = cls.__name__
|
||||
if class_name in plugin_names:
|
||||
result.duplicated_plugins.append(cls)
|
||||
continue
|
||||
plugin_names.add(class_name)
|
||||
|
||||
result.plugins.append(cls)
|
||||
|
||||
# Store in memory last result to keep in memory loaded modules
|
||||
self._last_discovered_results[superclass] = result
|
||||
self._last_discovered_plugins[superclass] = list(
|
||||
result.plugins
|
||||
)
|
||||
result.log_report()
|
||||
if return_report:
|
||||
return result
|
||||
return result.plugins
|
||||
|
||||
def register_plugin(self, superclass, cls):
|
||||
"""Register a directory containing plug-ins of type `superclass`
|
||||
|
||||
Arguments:
|
||||
superclass (type): Superclass of plug-in
|
||||
cls (object): Subclass of `superclass`
|
||||
"""
|
||||
|
||||
if superclass not in self._registered_plugins:
|
||||
self._registered_plugins[superclass] = list()
|
||||
|
||||
if cls not in self._registered_plugins[superclass]:
|
||||
self._registered_plugins[superclass].append(cls)
|
||||
|
||||
def register_plugin_path(self, superclass, path):
|
||||
"""Register a directory of one or more plug-ins
|
||||
|
||||
Arguments:
|
||||
superclass (type): Superclass of plug-ins to look for during
|
||||
discovery
|
||||
path (str): Absolute path to directory in which to discover
|
||||
plug-ins
|
||||
"""
|
||||
|
||||
if superclass not in self._registered_plugin_paths:
|
||||
self._registered_plugin_paths[superclass] = list()
|
||||
|
||||
path = os.path.normpath(path)
|
||||
if path not in self._registered_plugin_paths[superclass]:
|
||||
self._registered_plugin_paths[superclass].append(path)
|
||||
|
||||
def registered_plugin_paths(self):
|
||||
"""Return all currently registered plug-in paths"""
|
||||
# Return shallow copy so we the original data can't be changed
|
||||
return {
|
||||
superclass: paths[:]
|
||||
for superclass, paths in self._registered_plugin_paths.items()
|
||||
}
|
||||
|
||||
def deregister_plugin(self, superclass, plugin):
|
||||
"""Opposite of `register_plugin()`"""
|
||||
if superclass in self._registered_plugins:
|
||||
self._registered_plugins[superclass].remove(plugin)
|
||||
|
||||
def deregister_plugin_path(self, superclass, path):
|
||||
"""Opposite of `register_plugin_path()`"""
|
||||
self._registered_plugin_paths[superclass].remove(path)
|
||||
|
||||
|
||||
class _GlobalDiscover:
|
||||
"""Access to global object of PluginDiscoverContext.
|
||||
|
||||
Using singleton object to register/deregister plugins and plugin paths
|
||||
and then discover them by superclass.
|
||||
"""
|
||||
|
||||
_context = None
|
||||
|
||||
@classmethod
|
||||
def get_context(cls):
|
||||
if cls._context is None:
|
||||
cls._context = PluginDiscoverContext()
|
||||
return cls._context
|
||||
|
||||
|
||||
def discover(superclass, allow_duplicates=True):
|
||||
context = _GlobalDiscover.get_context()
|
||||
return context.discover(superclass, allow_duplicates)
|
||||
|
||||
|
||||
def get_last_discovered_plugins(superclass):
|
||||
context = _GlobalDiscover.get_context()
|
||||
return context.get_last_discovered_plugins(superclass)
|
||||
|
||||
|
||||
def register_plugin(superclass, cls):
|
||||
context = _GlobalDiscover.get_context()
|
||||
context.register_plugin(superclass, cls)
|
||||
|
||||
|
||||
def register_plugin_path(superclass, path):
|
||||
context = _GlobalDiscover.get_context()
|
||||
context.register_plugin_path(superclass, path)
|
||||
|
||||
|
||||
def deregister_plugin(superclass, cls):
|
||||
context = _GlobalDiscover.get_context()
|
||||
context.deregister_plugin(superclass, cls)
|
||||
|
||||
|
||||
def deregister_plugin_path(superclass, path):
|
||||
context = _GlobalDiscover.get_context()
|
||||
context.deregister_plugin_path(superclass, path)
|
||||
|
|
@ -3,6 +3,7 @@ from .publish_plugins import (
|
|||
PublishXmlValidationError,
|
||||
KnownPublishError,
|
||||
OpenPypePyblishPluginMixin,
|
||||
OptionalPyblishPluginMixin,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -18,6 +19,7 @@ __all__ = (
|
|||
"PublishXmlValidationError",
|
||||
"KnownPublishError",
|
||||
"OpenPypePyblishPluginMixin",
|
||||
"OptionalPyblishPluginMixin",
|
||||
|
||||
"DiscoverResult",
|
||||
"publish_plugins_discover",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from openpype.lib import BoolDef
|
||||
from .lib import load_help_content_from_plugin
|
||||
|
||||
|
||||
|
|
@ -108,3 +109,64 @@ class OpenPypePyblishPluginMixin:
|
|||
plugin_values[key]
|
||||
)
|
||||
return attribute_values
|
||||
|
||||
def get_attr_values_from_data(self, data):
|
||||
"""Get attribute values for attribute definitions from data.
|
||||
|
||||
Args:
|
||||
data(dict): Data from instance or context.
|
||||
"""
|
||||
return (
|
||||
data
|
||||
.get("publish_attributes", {})
|
||||
.get(self.__class__.__name__, {})
|
||||
)
|
||||
|
||||
|
||||
class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin):
|
||||
"""Prepare mixin for optional plugins.
|
||||
|
||||
Defined active attribute definition prepared for published and
|
||||
prepares method which will check if is active or not.
|
||||
|
||||
```
|
||||
class ValidateScene(
|
||||
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
|
||||
):
|
||||
def process(self, instance):
|
||||
# Skip the instance if is not active by data on the instance
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
"""Attribute definitions based on plugin's optional attribute."""
|
||||
|
||||
# Empty list if plugin is not optional
|
||||
if not getattr(cls, "optional", None):
|
||||
return []
|
||||
|
||||
# Get active value from class as default value
|
||||
active = getattr(cls, "active", True)
|
||||
# Return boolean stored under 'active' key with label of the class name
|
||||
label = cls.label or cls.__name__
|
||||
return [
|
||||
BoolDef("active", default=active, label=label)
|
||||
]
|
||||
|
||||
def is_active(self, data):
|
||||
"""Check if plugins is active for instance/context based on their data.
|
||||
|
||||
Args:
|
||||
data(dict): Data from instance or context.
|
||||
"""
|
||||
# Skip if is not optional and return True
|
||||
if not getattr(self, "optional", None):
|
||||
return True
|
||||
attr_values = self.get_attr_values_from_data(data)
|
||||
active = attr_values.get("active")
|
||||
if active is None:
|
||||
active = getattr(self, "active", True)
|
||||
return active
|
||||
|
|
|
|||
|
|
@ -2,6 +2,11 @@ import os
|
|||
import copy
|
||||
import logging
|
||||
|
||||
from .plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -126,21 +131,15 @@ class BinaryThumbnail(ThumbnailResolver):
|
|||
|
||||
# Thumbnail resolvers
|
||||
def discover_thumbnail_resolvers():
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.discover(ThumbnailResolver)
|
||||
return discover(ThumbnailResolver)
|
||||
|
||||
|
||||
def register_thumbnail_resolver(plugin):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin(ThumbnailResolver, plugin)
|
||||
register_plugin(ThumbnailResolver, plugin)
|
||||
|
||||
|
||||
def register_thumbnail_resolver_path(path):
|
||||
import avalon.api
|
||||
|
||||
return avalon.api.register_plugin_path(ThumbnailResolver, path)
|
||||
register_plugin_path(ThumbnailResolver, path)
|
||||
|
||||
|
||||
register_thumbnail_resolver(TemplateResolver)
|
||||
|
|
|
|||
|
|
@ -126,7 +126,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
remainders.remove(file_path_base)
|
||||
if file_path_base in remainders:
|
||||
remainders.remove(file_path_base)
|
||||
continue
|
||||
|
||||
seq_path_base = os.path.split(seq_path)[1]
|
||||
|
|
@ -333,6 +334,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
def main(self, data, remove_publish_folder):
|
||||
# Size of files.
|
||||
size = 0
|
||||
if not data:
|
||||
return size
|
||||
|
||||
if remove_publish_folder:
|
||||
size = self.delete_whole_dir_paths(data["dir_paths"].values())
|
||||
|
|
@ -418,6 +421,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
)
|
||||
|
||||
data = self.get_data(context, versions_to_keep)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
size += self.main(data, remove_publish_folder)
|
||||
print("Progressing {}/{}".format(count + 1, len(contexts)))
|
||||
|
|
|
|||
21
openpype/plugins/publish/collect_cleanup_keys.py
Normal file
21
openpype/plugins/publish/collect_cleanup_keys.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
Provides:
|
||||
context
|
||||
- cleanupFullPaths (list)
|
||||
- cleanupEmptyDirs (list)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectCleanupKeys(pyblish.api.ContextPlugin):
|
||||
"""Prepare keys for 'ExplicitCleanUp' plugin."""
|
||||
|
||||
label = "Collect Cleanup Keys"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
||||
def process(self, context):
|
||||
context.data["cleanupFullPaths"] = []
|
||||
context.data["cleanupEmptyDirs"] = []
|
||||
|
|
@ -7,8 +7,12 @@ import shutil
|
|||
from bson.objectid import ObjectId
|
||||
from pymongo import InsertOne, ReplaceOne
|
||||
import pyblish.api
|
||||
|
||||
from avalon import api, io, schema
|
||||
from openpype.lib import create_hard_link
|
||||
from openpype.lib import (
|
||||
create_hard_link,
|
||||
filter_profiles
|
||||
)
|
||||
|
||||
|
||||
class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +21,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
# Families are modified using settings
|
||||
families = [
|
||||
"model",
|
||||
"rig",
|
||||
|
|
@ -33,11 +39,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
"project", "asset", "task", "subset", "representation",
|
||||
"family", "hierarchy", "task", "username"
|
||||
]
|
||||
# TODO add family filtering
|
||||
# QUESTION/TODO this process should happen on server if crashed due to
|
||||
# permissions error on files (files were used or user didn't have perms)
|
||||
# *but all other plugins must be sucessfully completed
|
||||
|
||||
template_name_profiles = []
|
||||
_default_template_name = "hero"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(
|
||||
"--- Integration of Hero version for subset `{}` begins.".format(
|
||||
|
|
@ -51,27 +59,35 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
)
|
||||
return
|
||||
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
template_key = self._get_template_key(instance)
|
||||
|
||||
# TODO raise error if Hero not set?
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
if "hero" not in anatomy.templates:
|
||||
self.log.warning("!!! Anatomy does not have set `hero` key!")
|
||||
return
|
||||
|
||||
if "path" not in anatomy.templates["hero"]:
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
if template_key not in anatomy.templates:
|
||||
self.log.warning((
|
||||
"!!! There is not set `path` template in `hero` anatomy"
|
||||
" for project \"{}\"."
|
||||
).format(project_name))
|
||||
"!!! Anatomy of project \"{}\" does not have set"
|
||||
" \"{}\" template key!"
|
||||
).format(project_name, template_key))
|
||||
return
|
||||
|
||||
hero_template = anatomy.templates["hero"]["path"]
|
||||
if "path" not in anatomy.templates[template_key]:
|
||||
self.log.warning((
|
||||
"!!! There is not set \"path\" template in \"{}\" anatomy"
|
||||
" for project \"{}\"."
|
||||
).format(template_key, project_name))
|
||||
return
|
||||
|
||||
hero_template = anatomy.templates[template_key]["path"]
|
||||
self.log.debug("`hero` template check was successful. `{}`".format(
|
||||
hero_template
|
||||
))
|
||||
|
||||
hero_publish_dir = self.get_publish_dir(instance)
|
||||
self.integrate_instance(instance, template_key, hero_template)
|
||||
|
||||
def integrate_instance(self, instance, template_key, hero_template):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
published_repres = instance.data["published_representations"]
|
||||
hero_publish_dir = self.get_publish_dir(instance, template_key)
|
||||
|
||||
src_version_entity = instance.data.get("versionEntity")
|
||||
filtered_repre_ids = []
|
||||
|
|
@ -271,12 +287,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
|
||||
# Prepare anatomy data
|
||||
anatomy_data = repre_info["anatomy_data"]
|
||||
anatomy_data = copy.deepcopy(repre_info["anatomy_data"])
|
||||
anatomy_data.pop("version", None)
|
||||
|
||||
# Get filled path to repre context
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
template_filled = anatomy_filled["hero"]["path"]
|
||||
template_filled = anatomy_filled[template_key]["path"]
|
||||
|
||||
repre_data = {
|
||||
"path": str(template_filled),
|
||||
|
|
@ -308,11 +324,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
collections, remainders = clique.assemble(published_files)
|
||||
if remainders or not collections or len(collections) > 1:
|
||||
raise Exception((
|
||||
"Integrity error. Files of published representation "
|
||||
"is combination of frame collections and single files."
|
||||
"Collections: `{}` Single files: `{}`"
|
||||
).format(str(collections),
|
||||
str(remainders)))
|
||||
"Integrity error. Files of published"
|
||||
" representation is combination of frame"
|
||||
" collections and single files. Collections:"
|
||||
" `{}` Single files: `{}`"
|
||||
).format(str(collections), str(remainders)))
|
||||
|
||||
src_col = collections[0]
|
||||
|
||||
|
|
@ -320,13 +336,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
frame_splitter = "_-_FRAME_SPLIT_-_"
|
||||
anatomy_data["frame"] = frame_splitter
|
||||
_anatomy_filled = anatomy.format(anatomy_data)
|
||||
_template_filled = _anatomy_filled["hero"]["path"]
|
||||
_template_filled = _anatomy_filled[template_key]["path"]
|
||||
head, tail = _template_filled.split(frame_splitter)
|
||||
padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding",
|
||||
anatomy.templates["render"].get("padding")
|
||||
)
|
||||
anatomy.templates[template_key]["frame_padding"]
|
||||
)
|
||||
|
||||
dst_col = clique.Collection(
|
||||
|
|
@ -444,6 +457,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
backup_hero_publish_dir is not None and
|
||||
os.path.exists(backup_hero_publish_dir)
|
||||
):
|
||||
if os.path.exists(hero_publish_dir):
|
||||
shutil.rmtree(hero_publish_dir)
|
||||
os.rename(backup_hero_publish_dir, hero_publish_dir)
|
||||
self.log.error((
|
||||
"!!! Creating of hero version failed."
|
||||
|
|
@ -466,13 +481,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
files.append(_path)
|
||||
return files
|
||||
|
||||
def get_publish_dir(self, instance):
|
||||
def get_publish_dir(self, instance, template_key):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
|
||||
if "folder" in anatomy.templates["hero"]:
|
||||
if "folder" in anatomy.templates[template_key]:
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
publish_folder = anatomy_filled["hero"]["folder"]
|
||||
publish_folder = anatomy_filled[template_key]["folder"]
|
||||
else:
|
||||
# This is for cases of Deprecated anatomy without `folder`
|
||||
# TODO remove when all clients have solved this issue
|
||||
|
|
@ -489,7 +504,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
" key underneath `publish` (in global of for project `{}`)."
|
||||
).format(project_name))
|
||||
|
||||
file_path = anatomy_filled["hero"]["path"]
|
||||
file_path = anatomy_filled[template_key]["path"]
|
||||
# Directory
|
||||
publish_folder = os.path.dirname(file_path)
|
||||
|
||||
|
|
@ -499,6 +514,38 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
return publish_folder
|
||||
|
||||
def _get_template_key(self, instance):
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
task_data = anatomy_data.get("task") or {}
|
||||
task_name = task_data.get("name")
|
||||
task_type = task_data.get("type")
|
||||
host_name = instance.context.data["hostName"]
|
||||
# TODO raise error if Hero not set?
|
||||
family = self.main_family_from_instance(instance)
|
||||
key_values = {
|
||||
"families": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
profile = filter_profiles(
|
||||
self.template_name_profiles,
|
||||
key_values,
|
||||
logger=self.log
|
||||
)
|
||||
if profile:
|
||||
template_name = profile["template_name"]
|
||||
else:
|
||||
template_name = self._default_template_name
|
||||
return template_name
|
||||
|
||||
def main_family_from_instance(self, instance):
|
||||
"""Returns main family of entered instance."""
|
||||
family = instance.data.get("family")
|
||||
if not family:
|
||||
family = instance.data["families"][0]
|
||||
return family
|
||||
|
||||
def copy_file(self, src_path, dst_path):
|
||||
# TODO check drives if are the same to check if cas hardlink
|
||||
dirname = os.path.dirname(dst_path)
|
||||
|
|
@ -564,22 +611,16 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
src_file (string) - original file path
|
||||
dst_file (string) - hero file path
|
||||
"""
|
||||
_, rootless = anatomy.find_root_template_from_path(
|
||||
dst_file
|
||||
)
|
||||
_, rtls_src = anatomy.find_root_template_from_path(
|
||||
src_file
|
||||
)
|
||||
_, rootless = anatomy.find_root_template_from_path(dst_file)
|
||||
_, rtls_src = anatomy.find_root_template_from_path(src_file)
|
||||
return path.replace(rtls_src, rootless)
|
||||
|
||||
def _update_hash(self, hash, src_file_name, dst_file):
|
||||
"""
|
||||
Updates hash value with proper hero name
|
||||
"""
|
||||
src_file_name = self._get_name_without_ext(
|
||||
src_file_name)
|
||||
hero_file_name = self._get_name_without_ext(
|
||||
dst_file)
|
||||
src_file_name = self._get_name_without_ext(src_file_name)
|
||||
hero_file_name = self._get_name_without_ext(dst_file)
|
||||
return hash.replace(src_file_name, hero_file_name)
|
||||
|
||||
def _get_name_without_ext(self, value):
|
||||
|
|
|
|||
|
|
@ -151,7 +151,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"effect",
|
||||
"xgen",
|
||||
"hda",
|
||||
"usd"
|
||||
"usd",
|
||||
"usdComposition",
|
||||
"usdOverride"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
db_representation_context_keys = [
|
||||
|
|
|
|||
|
|
@ -8,11 +8,11 @@ M_ENVIRONMENT_KEY = "__environment_keys__"
|
|||
# Metadata key for storing dynamic created labels
|
||||
M_DYNAMIC_KEY_LABEL = "__dynamic_keys_labels__"
|
||||
|
||||
METADATA_KEYS = (
|
||||
METADATA_KEYS = frozenset([
|
||||
M_OVERRIDDEN_KEY,
|
||||
M_ENVIRONMENT_KEY,
|
||||
M_DYNAMIC_KEY_LABEL
|
||||
)
|
||||
])
|
||||
|
||||
# Keys where studio's system overrides are stored
|
||||
GLOBAL_SETTINGS_KEY = "global_settings"
|
||||
|
|
|
|||
|
|
@ -44,20 +44,6 @@
|
|||
"enabled": false,
|
||||
"profiles": []
|
||||
},
|
||||
"IntegrateHeroVersion": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"families": [
|
||||
"model",
|
||||
"rig",
|
||||
"look",
|
||||
"pointcache",
|
||||
"animation",
|
||||
"setdress",
|
||||
"layout",
|
||||
"mayaScene"
|
||||
]
|
||||
},
|
||||
"ExtractJpegEXR": {
|
||||
"enabled": true,
|
||||
"ffmpeg_args": {
|
||||
|
|
@ -206,6 +192,22 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"IntegrateHeroVersion": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true,
|
||||
"families": [
|
||||
"model",
|
||||
"rig",
|
||||
"look",
|
||||
"pointcache",
|
||||
"animation",
|
||||
"setdress",
|
||||
"layout",
|
||||
"mayaScene"
|
||||
],
|
||||
"template_name_profiles": []
|
||||
},
|
||||
"CleanUp": {
|
||||
"paterns": [],
|
||||
"remove_temp_renders": false
|
||||
|
|
|
|||
|
|
@ -106,6 +106,9 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"ExtractReviewData": {
|
||||
"enabled": false
|
||||
},
|
||||
"ExtractReviewDataLut": {
|
||||
"enabled": false
|
||||
},
|
||||
|
|
@ -119,11 +122,10 @@
|
|||
"families": [],
|
||||
"sebsets": []
|
||||
},
|
||||
"extension": "mov",
|
||||
"read_raw": false,
|
||||
"viewer_process_override": "",
|
||||
"bake_viewer_process": true,
|
||||
"bake_viewer_input_process": true,
|
||||
"add_tags": [],
|
||||
"reformat_node_add": false,
|
||||
"reformat_node_config": [
|
||||
{
|
||||
|
|
@ -151,7 +153,9 @@
|
|||
"name": "pbb",
|
||||
"value": false
|
||||
}
|
||||
]
|
||||
],
|
||||
"extension": "mov",
|
||||
"add_tags": []
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -12,13 +12,16 @@
|
|||
"flatten_subset_template": "",
|
||||
"color_code_mapping": []
|
||||
},
|
||||
"CollectInstances": {
|
||||
"flatten_subset_template": ""
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateNaming": {
|
||||
"invalid_chars": "[ \\\\/+\\*\\?\\(\\)\\[\\]\\{\\}:,]",
|
||||
"invalid_chars": "[ \\\\/+\\*\\?\\(\\)\\[\\]\\{\\}:,;]",
|
||||
"replace_char": "_"
|
||||
},
|
||||
"ExtractImage": {
|
||||
|
|
@ -44,4 +47,4 @@
|
|||
"create_first_version": false,
|
||||
"custom_templates": []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -11,6 +11,7 @@
|
|||
"task_types": [],
|
||||
"tasks": [],
|
||||
"subsets": [],
|
||||
"review_upload_limit": 50.0,
|
||||
"channel_messages": []
|
||||
}
|
||||
]
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue