Merge branch 'develop' into bugfix/OP-8005_thumbnail_duration
9
client/ayon_core/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import os
|
||||
|
||||
|
||||
AYON_CORE_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# TODO remove after '1.x.x'
|
||||
PACKAGE_DIR = AYON_CORE_ROOT
|
||||
PLUGINS_DIR = os.path.join(AYON_CORE_ROOT, "plugins")
|
||||
AYON_SERVER_ENABLED = True
|
||||
7
client/ayon_core/__main__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Main entry point for AYON command."""
|
||||
from ayon_core import cli
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli.main()
|
||||
92
client/ayon_core/addon/README.md
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
# AYON addons
|
||||
AYON addons should contain separated logic of specific kind of implementation, such as ftrack connection and its usage code, Deadline farm rendering or may contain only special plugins. Addons work the same way currently, there is no difference between module and addon functionality.
|
||||
|
||||
## Addons concept
|
||||
- addons are dynamically imported based on current AYON bundle
|
||||
|
||||
## Base class `AYONAddon`
|
||||
- abstract class as base for each addon
|
||||
- implementation should contain addon's api without GUI parts
|
||||
- may implement `get_global_environments` method which should return dictionary of environments that are globally applicable and value is the same for whole studio if launched at any workstation (except os specific paths)
|
||||
- abstract parts:
|
||||
- `name` attribute - name of a addon
|
||||
- `initialize` method - method for own initialization of a addon (should not override `__init__`)
|
||||
- `connect_with_addons` method - where addon may look for it's interfaces implementations or check for other addons
|
||||
- `__init__` should not be overridden and `initialize` should not do time consuming part but only prepare base data about addon
|
||||
- also keep in mind that they may be initialized in headless mode
|
||||
- connection with other addons is made with help of interfaces
|
||||
- `cli` method - add cli commands specific for the addon
|
||||
- command line arguments are handled using `click_wrap` python module located in `ayon_core.addon`
|
||||
- `cli` method should expect single argument which is click group on which can be called any group specific methods (e.g. `add_command` to add another click group as children see `ExampleAddon`)
|
||||
- it is possible to add trigger cli commands using `./ayon addon <addon name> <command> *args`
|
||||
|
||||
# Interfaces
|
||||
- interface is class that has defined abstract methods to implement and may contain pre implemented helper methods
|
||||
- addon that inherit from an interface must implement those abstract methods otherwise won't be initialized
|
||||
- it is easy to find which addon object inherited from which interfaces with 100% chance they have implemented required methods
|
||||
- default interfaces are defined in `interfaces.py`
|
||||
|
||||
## IPluginPaths
|
||||
- addon wants to add directory path/s to avalon or publish plugins
|
||||
- addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"`
|
||||
- each key may contain list or string with a path to directory with plugins
|
||||
|
||||
## ITrayModule
|
||||
- addon has more logic when used in a tray
|
||||
- it is possible that addon can be used only in the tray
|
||||
- abstract methods
|
||||
- `tray_init` - initialization triggered after `initialize` when used in `TrayModulesManager` and before `connect_with_addons`
|
||||
- `tray_menu` - add actions to tray widget's menu that represent the addon
|
||||
- `tray_start` - start of addon's login in tray
|
||||
- addon is initialized and connected with other addons
|
||||
- `tray_exit` - addon's cleanup like stop and join threads etc.
|
||||
- order of calling is based on implementation this order is how it works with `TrayModulesManager`
|
||||
- it is recommended to import and use GUI implementation only in these methods
|
||||
- has attribute `tray_initialized` (bool) which is set to False by default and is set by `TrayModulesManager` to True after `tray_init`
|
||||
- if addon has logic only in tray or for both then should be checking for `tray_initialized` attribute to decide how should handle situations
|
||||
|
||||
### ITrayService
|
||||
- inherits from `ITrayModule` and implements `tray_menu` method for you
|
||||
- adds action to submenu "Services" in tray widget menu with icon and label
|
||||
- abstract attribute `label`
|
||||
- label shown in menu
|
||||
- interface has pre implemented methods to change icon color
|
||||
- `set_service_running` - green icon
|
||||
- `set_service_failed` - red icon
|
||||
- `set_service_idle` - orange icon
|
||||
- these states must be set by addon itself `set_service_running` is default state on initialization
|
||||
|
||||
### ITrayAction
|
||||
- inherits from `ITrayModule` and implements `tray_menu` method for you
|
||||
- adds action to tray widget menu with label
|
||||
- abstract attribute `label`
|
||||
- label shown in menu
|
||||
- abstract method `on_action_trigger`
|
||||
- what should happen when an action is triggered
|
||||
- NOTE: It is a good idea to implement logic in `on_action_trigger` to the api method and trigger that method on callbacks. This gives ability to trigger that method outside tray
|
||||
|
||||
|
||||
### AddonsManager
|
||||
- collects addon classes and tries to initialize them
|
||||
- important attributes
|
||||
- `addons` - list of available attributes
|
||||
- `addons_by_id` - dictionary of addons mapped by their ids
|
||||
- `addons_by_name` - dictionary of addons mapped by their names
|
||||
- all these attributes contain all found addons even if are not enabled
|
||||
- helper methods
|
||||
- `collect_global_environments` to collect all global environments from enabled addons with calling `get_global_environments` on each of them
|
||||
- `collect_plugin_paths` collects plugin paths from all enabled addons
|
||||
- output is always dictionary with all keys and values as an list
|
||||
```
|
||||
{
|
||||
"publish": [],
|
||||
"create": [],
|
||||
"load": [],
|
||||
"actions": [],
|
||||
"inventory": []
|
||||
}
|
||||
```
|
||||
|
||||
### TrayAddonsManager
|
||||
- inherits from `AddonsManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayModule` methods
|
||||
32
client/ayon_core/addon/__init__.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from . import click_wrap
|
||||
from .interfaces import (
|
||||
IPluginPaths,
|
||||
ITrayAddon,
|
||||
ITrayAction,
|
||||
ITrayService,
|
||||
IHostAddon,
|
||||
)
|
||||
|
||||
from .base import (
|
||||
AYONAddon,
|
||||
AddonsManager,
|
||||
TrayAddonsManager,
|
||||
load_addons,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"click_wrap",
|
||||
|
||||
"IPluginPaths",
|
||||
"ITrayAddon",
|
||||
"ITrayAction",
|
||||
"ITrayService",
|
||||
"IHostAddon",
|
||||
|
||||
"AYONAddon",
|
||||
"AddonsManager",
|
||||
"TrayAddonsManager",
|
||||
"load_addons",
|
||||
)
|
||||
1417
client/ayon_core/addon/base.py
Normal file
365
client/ayon_core/addon/click_wrap.py
Normal file
|
|
@ -0,0 +1,365 @@
|
|||
"""Simplified wrapper for 'click' python module.
|
||||
|
||||
Module 'click' is used as main cli handler in AYON. Addons can
|
||||
register their own subcommands with options. This wrapper allows to define
|
||||
commands and options as with 'click', but without any dependency.
|
||||
|
||||
Why not to use 'click' directly? Version of 'click' used in AYON
|
||||
is not compatible with 'click' version used in some DCCs (e.g. Houdini 20+).
|
||||
And updating 'click' would break other DCCs.
|
||||
|
||||
How to use it? If you already have cli commands defined in addon, just replace
|
||||
'click' with 'click_wrap' and it should work and modify your addon's cli
|
||||
method to convert 'click_wrap' object to 'click' object.
|
||||
|
||||
Before
|
||||
```python
|
||||
import click
|
||||
from ayon_core.modules import AYONAddon
|
||||
|
||||
|
||||
class ExampleAddon(AYONAddon):
|
||||
name = "example"
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(ExampleAddon.name, help="Example addon")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command(help="Example command")
|
||||
@click.option("--arg1", help="Example argument 1", default="default1")
|
||||
@click.option("--arg2", help="Example argument 2", is_flag=True)
|
||||
def mycommand(arg1, arg2):
|
||||
print(arg1, arg2)
|
||||
```
|
||||
|
||||
Now
|
||||
```
|
||||
from ayon_core import click_wrap
|
||||
from ayon_core.modules import AYONAddon
|
||||
|
||||
|
||||
class ExampleAddon(AYONAddon):
|
||||
name = "example"
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main.to_click_obj())
|
||||
|
||||
|
||||
@click_wrap.group(ExampleAddon.name, help="Example addon")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command(help="Example command")
|
||||
@click_wrap.option("--arg1", help="Example argument 1", default="default1")
|
||||
@click_wrap.option("--arg2", help="Example argument 2", is_flag=True)
|
||||
def mycommand(arg1, arg2):
|
||||
print(arg1, arg2)
|
||||
```
|
||||
|
||||
|
||||
Added small enhancements:
|
||||
- most of the methods can be used as chained calls
|
||||
- functions/methods 'command' and 'group' can be used in a way that
|
||||
first argument is callback function and the rest are arguments
|
||||
for click
|
||||
|
||||
Example:
|
||||
```python
|
||||
from ayon_core import click_wrap
|
||||
from ayon_core.modules import AYONAddon
|
||||
|
||||
|
||||
class ExampleAddon(AYONAddon):
|
||||
name = "example"
|
||||
|
||||
def cli(self, click_group):
|
||||
# Define main command (name 'example')
|
||||
main = click_wrap.group(
|
||||
self._cli_main, name=self.name, help="Example addon"
|
||||
)
|
||||
# Add subcommand (name 'mycommand')
|
||||
(
|
||||
main.command(
|
||||
self._cli_command, name="mycommand", help="Example command"
|
||||
)
|
||||
.option(
|
||||
"--arg1", help="Example argument 1", default="default1"
|
||||
)
|
||||
.option(
|
||||
"--arg2", help="Example argument 2", is_flag=True,
|
||||
)
|
||||
)
|
||||
# Convert main command to click object and add it to parent group
|
||||
click_group.add_command(main.to_click_obj())
|
||||
|
||||
def _cli_main(self):
|
||||
pass
|
||||
|
||||
def _cli_command(self, arg1, arg2):
|
||||
print(arg1, arg2)
|
||||
```
|
||||
|
||||
```shell
|
||||
ayon addon example mycommand --arg1 value1 --arg2
|
||||
```
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
||||
FUNC_ATTR_NAME = "__ayon_cli_options__"
|
||||
|
||||
|
||||
class Command(object):
|
||||
def __init__(self, func, *args, **kwargs):
|
||||
# Command function
|
||||
self._func = func
|
||||
# Command definition arguments
|
||||
self._args = args
|
||||
# Command definition kwargs
|
||||
self._kwargs = kwargs
|
||||
# Both 'options' and 'arguments' are stored to the same variable
|
||||
# - keep order of options and arguments
|
||||
self._options = getattr(func, FUNC_ATTR_NAME, [])
|
||||
|
||||
def to_click_obj(self):
|
||||
"""Converts this object to click object.
|
||||
|
||||
Returns:
|
||||
click.Command: Click command object.
|
||||
"""
|
||||
return convert_to_click(self)
|
||||
|
||||
# --- Methods for 'convert_to_click' function ---
|
||||
def get_args(self):
|
||||
"""
|
||||
Returns:
|
||||
tuple: Command definition arguments.
|
||||
"""
|
||||
return self._args
|
||||
|
||||
def get_kwargs(self):
|
||||
"""
|
||||
Returns:
|
||||
dict[str, Any]: Command definition kwargs.
|
||||
"""
|
||||
return self._kwargs
|
||||
|
||||
def get_func(self):
|
||||
"""
|
||||
Returns:
|
||||
Function: Function to invoke on command trigger.
|
||||
"""
|
||||
return self._func
|
||||
|
||||
def iter_options(self):
|
||||
"""
|
||||
Yields:
|
||||
tuple[str, tuple, dict]: Option type name with args and kwargs.
|
||||
"""
|
||||
for item in self._options:
|
||||
yield item
|
||||
# -----------------------------------------------
|
||||
|
||||
def add_option(self, *args, **kwargs):
|
||||
return self.add_option_by_type("option", *args, **kwargs)
|
||||
|
||||
def add_argument(self, *args, **kwargs):
|
||||
return self.add_option_by_type("argument", *args, **kwargs)
|
||||
|
||||
option = add_option
|
||||
argument = add_argument
|
||||
|
||||
def add_option_by_type(self, option_name, *args, **kwargs):
|
||||
self._options.append((option_name, args, kwargs))
|
||||
return self
|
||||
|
||||
|
||||
class Group(Command):
|
||||
def __init__(self, func, *args, **kwargs):
|
||||
super(Group, self).__init__(func, *args, **kwargs)
|
||||
# Store sub-groupd and sub-commands to the same variable
|
||||
self._commands = []
|
||||
|
||||
# --- Methods for 'convert_to_click' function ---
|
||||
def iter_commands(self):
|
||||
for command in self._commands:
|
||||
yield command
|
||||
# -----------------------------------------------
|
||||
|
||||
def add_command(self, command):
|
||||
"""Add prepared command object as child.
|
||||
|
||||
Args:
|
||||
command (Command): Prepared command object.
|
||||
"""
|
||||
if command not in self._commands:
|
||||
self._commands.append(command)
|
||||
|
||||
def add_group(self, group):
|
||||
"""Add prepared group object as child.
|
||||
|
||||
Args:
|
||||
group (Group): Prepared group object.
|
||||
"""
|
||||
if group not in self._commands:
|
||||
self._commands.append(group)
|
||||
|
||||
def command(self, *args, **kwargs):
|
||||
"""Add child command.
|
||||
|
||||
Returns:
|
||||
Union[Command, Function]: New command object, or wrapper function.
|
||||
"""
|
||||
return self._add_new(Command, *args, **kwargs)
|
||||
|
||||
def group(self, *args, **kwargs):
|
||||
"""Add child group.
|
||||
|
||||
Returns:
|
||||
Union[Group, Function]: New group object, or wrapper function.
|
||||
"""
|
||||
return self._add_new(Group, *args, **kwargs)
|
||||
|
||||
def _add_new(self, target_cls, *args, **kwargs):
|
||||
func = None
|
||||
if args and callable(args[0]):
|
||||
args = list(args)
|
||||
func = args.pop(0)
|
||||
args = tuple(args)
|
||||
|
||||
def decorator(_func):
|
||||
out = target_cls(_func, *args, **kwargs)
|
||||
self._commands.append(out)
|
||||
return out
|
||||
|
||||
if func is not None:
|
||||
return decorator(func)
|
||||
return decorator
|
||||
|
||||
|
||||
def convert_to_click(obj_to_convert):
|
||||
"""Convert wrapped object to click object.
|
||||
|
||||
Args:
|
||||
obj_to_convert (Command): Object to convert to click object.
|
||||
|
||||
Returns:
|
||||
click.Command: Click command object.
|
||||
"""
|
||||
import click
|
||||
|
||||
commands_queue = collections.deque()
|
||||
commands_queue.append((obj_to_convert, None))
|
||||
top_obj = None
|
||||
while commands_queue:
|
||||
item = commands_queue.popleft()
|
||||
command_obj, parent_obj = item
|
||||
if not isinstance(command_obj, Command):
|
||||
raise TypeError(
|
||||
"Invalid type '{}' expected 'Command'".format(
|
||||
type(command_obj)
|
||||
)
|
||||
)
|
||||
|
||||
if isinstance(command_obj, Group):
|
||||
click_obj = (
|
||||
click.group(
|
||||
*command_obj.get_args(),
|
||||
**command_obj.get_kwargs()
|
||||
)(command_obj.get_func())
|
||||
)
|
||||
|
||||
else:
|
||||
click_obj = (
|
||||
click.command(
|
||||
*command_obj.get_args(),
|
||||
**command_obj.get_kwargs()
|
||||
)(command_obj.get_func())
|
||||
)
|
||||
|
||||
for item in command_obj.iter_options():
|
||||
option_name, args, kwargs = item
|
||||
if option_name == "option":
|
||||
click.option(*args, **kwargs)(click_obj)
|
||||
elif option_name == "argument":
|
||||
click.argument(*args, **kwargs)(click_obj)
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid option name '{}'".format(option_name)
|
||||
)
|
||||
|
||||
if top_obj is None:
|
||||
top_obj = click_obj
|
||||
|
||||
if parent_obj is not None:
|
||||
parent_obj.add_command(click_obj)
|
||||
|
||||
if isinstance(command_obj, Group):
|
||||
for command in command_obj.iter_commands():
|
||||
commands_queue.append((command, click_obj))
|
||||
|
||||
return top_obj
|
||||
|
||||
|
||||
def group(*args, **kwargs):
|
||||
func = None
|
||||
if args and callable(args[0]):
|
||||
args = list(args)
|
||||
func = args.pop(0)
|
||||
args = tuple(args)
|
||||
|
||||
def decorator(_func):
|
||||
return Group(_func, *args, **kwargs)
|
||||
|
||||
if func is not None:
|
||||
return decorator(func)
|
||||
return decorator
|
||||
|
||||
|
||||
def command(*args, **kwargs):
|
||||
func = None
|
||||
if args and callable(args[0]):
|
||||
args = list(args)
|
||||
func = args.pop(0)
|
||||
args = tuple(args)
|
||||
|
||||
def decorator(_func):
|
||||
return Command(_func, *args, **kwargs)
|
||||
|
||||
if func is not None:
|
||||
return decorator(func)
|
||||
return decorator
|
||||
|
||||
|
||||
def argument(*args, **kwargs):
|
||||
def decorator(func):
|
||||
return _add_option_to_func(
|
||||
func, "argument", *args, **kwargs
|
||||
)
|
||||
return decorator
|
||||
|
||||
|
||||
def option(*args, **kwargs):
|
||||
def decorator(func):
|
||||
return _add_option_to_func(
|
||||
func, "option", *args, **kwargs
|
||||
)
|
||||
return decorator
|
||||
|
||||
|
||||
def _add_option_to_func(func, option_name, *args, **kwargs):
|
||||
if isinstance(func, Command):
|
||||
func.add_option_by_type(option_name, *args, **kwargs)
|
||||
return func
|
||||
|
||||
if not hasattr(func, FUNC_ATTR_NAME):
|
||||
setattr(func, FUNC_ATTR_NAME, [])
|
||||
cli_options = getattr(func, FUNC_ATTR_NAME)
|
||||
cli_options.append((option_name, args, kwargs))
|
||||
return func
|
||||
385
client/ayon_core/addon/interfaces.py
Normal file
|
|
@ -0,0 +1,385 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core import resources
|
||||
|
||||
|
||||
class _AYONInterfaceMeta(ABCMeta):
|
||||
"""AYONInterface meta class to print proper string."""
|
||||
|
||||
def __str__(self):
|
||||
return "<'AYONInterface.{}'>".format(self.__name__)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
@six.add_metaclass(_AYONInterfaceMeta)
|
||||
class AYONInterface:
|
||||
"""Base class of Interface that can be used as Mixin with abstract parts.
|
||||
|
||||
This is way how AYON addon can define that contains specific predefined
|
||||
functionality.
|
||||
|
||||
Child classes of AYONInterface may be used as mixin in different
|
||||
AYON addons which means they have to have implemented methods defined
|
||||
in the interface. By default, interface does not have any abstract parts.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class IPluginPaths(AYONInterface):
|
||||
"""Addon has plugin paths to return.
|
||||
|
||||
Expected result is dictionary with keys "publish", "create", "load",
|
||||
"actions" or "inventory" and values as list or string.
|
||||
{
|
||||
"publish": ["path/to/publish_plugins"]
|
||||
}
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_plugin_paths(self):
|
||||
pass
|
||||
|
||||
def _get_plugin_paths_by_type(self, plugin_type):
|
||||
paths = self.get_plugin_paths()
|
||||
if not paths or plugin_type not in paths:
|
||||
return []
|
||||
|
||||
paths = paths[plugin_type]
|
||||
if not paths:
|
||||
return []
|
||||
|
||||
if not isinstance(paths, (list, tuple, set)):
|
||||
paths = [paths]
|
||||
return paths
|
||||
|
||||
def get_create_plugin_paths(self, host_name):
|
||||
"""Receive create plugin paths.
|
||||
|
||||
Give addons ability to add create plugin paths based on host name.
|
||||
|
||||
Notes:
|
||||
Default implementation uses 'get_plugin_paths' and always return
|
||||
all create plugin paths.
|
||||
|
||||
Args:
|
||||
host_name (str): For which host are the plugins meant.
|
||||
"""
|
||||
|
||||
return self._get_plugin_paths_by_type("create")
|
||||
|
||||
def get_load_plugin_paths(self, host_name):
|
||||
"""Receive load plugin paths.
|
||||
|
||||
Give addons ability to add load plugin paths based on host name.
|
||||
|
||||
Notes:
|
||||
Default implementation uses 'get_plugin_paths' and always return
|
||||
all load plugin paths.
|
||||
|
||||
Args:
|
||||
host_name (str): For which host are the plugins meant.
|
||||
"""
|
||||
|
||||
return self._get_plugin_paths_by_type("load")
|
||||
|
||||
def get_publish_plugin_paths(self, host_name):
|
||||
"""Receive publish plugin paths.
|
||||
|
||||
Give addons ability to add publish plugin paths based on host name.
|
||||
|
||||
Notes:
|
||||
Default implementation uses 'get_plugin_paths' and always return
|
||||
all publish plugin paths.
|
||||
|
||||
Args:
|
||||
host_name (str): For which host are the plugins meant.
|
||||
"""
|
||||
|
||||
return self._get_plugin_paths_by_type("publish")
|
||||
|
||||
def get_inventory_action_paths(self, host_name):
|
||||
"""Receive inventory action paths.
|
||||
|
||||
Give addons ability to add inventory action plugin paths.
|
||||
|
||||
Notes:
|
||||
Default implementation uses 'get_plugin_paths' and always return
|
||||
all publish plugin paths.
|
||||
|
||||
Args:
|
||||
host_name (str): For which host are the plugins meant.
|
||||
"""
|
||||
|
||||
return self._get_plugin_paths_by_type("inventory")
|
||||
|
||||
|
||||
class ITrayAddon(AYONInterface):
|
||||
"""Addon has special procedures when used in Tray tool.
|
||||
|
||||
IMPORTANT:
|
||||
The addon. still must be usable if is not used in tray even if
|
||||
would do nothing.
|
||||
"""
|
||||
|
||||
tray_initialized = False
|
||||
_tray_manager = None
|
||||
|
||||
@abstractmethod
|
||||
def tray_init(self):
|
||||
"""Initialization part of tray implementation.
|
||||
|
||||
Triggered between `initialization` and `connect_with_addons`.
|
||||
|
||||
This is where GUIs should be loaded or tray specific parts should be
|
||||
prepared.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tray_menu(self, tray_menu):
|
||||
"""Add addon's action to tray menu."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tray_start(self):
|
||||
"""Start procedure in tray tool."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tray_exit(self):
|
||||
"""Cleanup method which is executed on tray shutdown.
|
||||
|
||||
This is place where all threads should be shut.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def execute_in_main_thread(self, callback):
|
||||
""" Pushes callback to the queue or process 'callback' on a main thread
|
||||
|
||||
Some callbacks need to be processed on main thread (menu actions
|
||||
must be added on main thread or they won't get triggered etc.)
|
||||
"""
|
||||
|
||||
if not self.tray_initialized:
|
||||
# TODO Called without initialized tray, still main thread needed
|
||||
try:
|
||||
callback()
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to execute {} in main thread".format(callback),
|
||||
exc_info=True)
|
||||
|
||||
return
|
||||
self.manager.tray_manager.execute_in_main_thread(callback)
|
||||
|
||||
def show_tray_message(self, title, message, icon=None, msecs=None):
|
||||
"""Show tray message.
|
||||
|
||||
Args:
|
||||
title (str): Title of message.
|
||||
message (str): Content of message.
|
||||
icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is
|
||||
Information icon, may differ by Qt version.
|
||||
msecs (int): Duration of message visibility in milliseconds.
|
||||
Default is 10000 msecs, may differ by Qt version.
|
||||
"""
|
||||
|
||||
if self._tray_manager:
|
||||
self._tray_manager.show_tray_message(title, message, icon, msecs)
|
||||
|
||||
def add_doubleclick_callback(self, callback):
|
||||
if hasattr(self.manager, "add_doubleclick_callback"):
|
||||
self.manager.add_doubleclick_callback(self, callback)
|
||||
|
||||
|
||||
class ITrayAction(ITrayAddon):
|
||||
"""Implementation of Tray action.
|
||||
|
||||
Add action to tray menu which will trigger `on_action_trigger`.
|
||||
It is expected to be used for showing tools.
|
||||
|
||||
Methods `tray_start`, `tray_exit` and `connect_with_addons` are overridden
|
||||
as it's not expected that action will use them. But it is possible if
|
||||
necessary.
|
||||
"""
|
||||
|
||||
admin_action = False
|
||||
_admin_submenu = None
|
||||
_action_item = None
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def label(self):
|
||||
"""Service label showed in menu."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def on_action_trigger(self):
|
||||
"""What happens on actions click."""
|
||||
pass
|
||||
|
||||
def tray_menu(self, tray_menu):
|
||||
from qtpy import QtWidgets
|
||||
|
||||
if self.admin_action:
|
||||
menu = self.admin_submenu(tray_menu)
|
||||
action = QtWidgets.QAction(self.label, menu)
|
||||
menu.addAction(action)
|
||||
if not menu.menuAction().isVisible():
|
||||
menu.menuAction().setVisible(True)
|
||||
|
||||
else:
|
||||
action = QtWidgets.QAction(self.label, tray_menu)
|
||||
tray_menu.addAction(action)
|
||||
|
||||
action.triggered.connect(self.on_action_trigger)
|
||||
self._action_item = action
|
||||
|
||||
def tray_start(self):
|
||||
return
|
||||
|
||||
def tray_exit(self):
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def admin_submenu(tray_menu):
|
||||
if ITrayAction._admin_submenu is None:
|
||||
from qtpy import QtWidgets
|
||||
|
||||
admin_submenu = QtWidgets.QMenu("Admin", tray_menu)
|
||||
admin_submenu.menuAction().setVisible(False)
|
||||
ITrayAction._admin_submenu = admin_submenu
|
||||
return ITrayAction._admin_submenu
|
||||
|
||||
|
||||
class ITrayService(ITrayAddon):
|
||||
# Module's property
|
||||
menu_action = None
|
||||
|
||||
# Class properties
|
||||
_services_submenu = None
|
||||
_icon_failed = None
|
||||
_icon_running = None
|
||||
_icon_idle = None
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def label(self):
|
||||
"""Service label showed in menu."""
|
||||
pass
|
||||
|
||||
# TODO be able to get any sort of information to show/print
|
||||
# @abstractmethod
|
||||
# def get_service_info(self):
|
||||
# pass
|
||||
|
||||
@staticmethod
|
||||
def services_submenu(tray_menu):
|
||||
if ITrayService._services_submenu is None:
|
||||
from qtpy import QtWidgets
|
||||
|
||||
services_submenu = QtWidgets.QMenu("Services", tray_menu)
|
||||
services_submenu.menuAction().setVisible(False)
|
||||
ITrayService._services_submenu = services_submenu
|
||||
return ITrayService._services_submenu
|
||||
|
||||
@staticmethod
|
||||
def add_service_action(action):
|
||||
ITrayService._services_submenu.addAction(action)
|
||||
if not ITrayService._services_submenu.menuAction().isVisible():
|
||||
ITrayService._services_submenu.menuAction().setVisible(True)
|
||||
|
||||
@staticmethod
|
||||
def _load_service_icons():
|
||||
from qtpy import QtGui
|
||||
|
||||
ITrayService._failed_icon = QtGui.QIcon(
|
||||
resources.get_resource("icons", "circle_red.png")
|
||||
)
|
||||
ITrayService._icon_running = QtGui.QIcon(
|
||||
resources.get_resource("icons", "circle_green.png")
|
||||
)
|
||||
ITrayService._icon_idle = QtGui.QIcon(
|
||||
resources.get_resource("icons", "circle_orange.png")
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_icon_running():
|
||||
if ITrayService._icon_running is None:
|
||||
ITrayService._load_service_icons()
|
||||
return ITrayService._icon_running
|
||||
|
||||
@staticmethod
|
||||
def get_icon_idle():
|
||||
if ITrayService._icon_idle is None:
|
||||
ITrayService._load_service_icons()
|
||||
return ITrayService._icon_idle
|
||||
|
||||
@staticmethod
|
||||
def get_icon_failed():
|
||||
if ITrayService._failed_icon is None:
|
||||
ITrayService._load_service_icons()
|
||||
return ITrayService._failed_icon
|
||||
|
||||
def tray_menu(self, tray_menu):
|
||||
from qtpy import QtWidgets
|
||||
|
||||
action = QtWidgets.QAction(
|
||||
self.label,
|
||||
self.services_submenu(tray_menu)
|
||||
)
|
||||
self.menu_action = action
|
||||
|
||||
self.add_service_action(action)
|
||||
|
||||
self.set_service_running_icon()
|
||||
|
||||
def set_service_running_icon(self):
|
||||
"""Change icon of an QAction to green circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_running())
|
||||
|
||||
def set_service_failed_icon(self):
|
||||
"""Change icon of an QAction to red circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_failed())
|
||||
|
||||
def set_service_idle_icon(self):
|
||||
"""Change icon of an QAction to orange circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_idle())
|
||||
|
||||
|
||||
class IHostAddon(AYONInterface):
|
||||
"""Addon which also contain a host implementation."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def host_name(self):
|
||||
"""Name of host which addon represents."""
|
||||
|
||||
pass
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
"""Define workfile extensions for host.
|
||||
|
||||
Not all hosts support workfiles thus this is optional implementation.
|
||||
|
||||
Returns:
|
||||
List[str]: Extensions used for workfiles with dot.
|
||||
"""
|
||||
|
||||
return []
|
||||
275
client/ayon_core/cli.py
Normal file
|
|
@ -0,0 +1,275 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package for handling AYON command line arguments."""
|
||||
import os
|
||||
import sys
|
||||
import code
|
||||
import traceback
|
||||
|
||||
import click
|
||||
import acre
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.settings import get_general_environments
|
||||
|
||||
from .cli_commands import Commands
|
||||
|
||||
|
||||
class AliasedGroup(click.Group):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._aliases = {}
|
||||
|
||||
def set_alias(self, src_name, dst_name):
|
||||
self._aliases[dst_name] = src_name
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
if cmd_name in self._aliases:
|
||||
cmd_name = self._aliases[cmd_name]
|
||||
return super().get_command(ctx, cmd_name)
|
||||
|
||||
|
||||
@click.group(cls=AliasedGroup, invoke_without_command=True)
|
||||
@click.pass_context
|
||||
@click.option("--use-staging", is_flag=True,
|
||||
expose_value=False, help="use staging variants")
|
||||
@click.option("--debug", is_flag=True, expose_value=False,
|
||||
help="Enable debug")
|
||||
@click.option("--verbose", expose_value=False,
|
||||
help=("Change AYON log level (debug - critical or 0-50)"))
|
||||
def main_cli(ctx):
|
||||
"""AYON is main command serving as entry point to pipeline system.
|
||||
|
||||
It wraps different commands together.
|
||||
"""
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
# Print help if headless mode is used
|
||||
if os.getenv("AYON_HEADLESS_MODE") == "1":
|
||||
print(ctx.get_help())
|
||||
sys.exit(0)
|
||||
else:
|
||||
ctx.invoke(tray)
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
def tray():
|
||||
"""Launch AYON tray.
|
||||
|
||||
Default action of AYON command is to launch tray widget to control basic
|
||||
aspects of AYON. See documentation for more information.
|
||||
"""
|
||||
Commands.launch_tray()
|
||||
|
||||
|
||||
@Commands.add_addons
|
||||
@main_cli.group(help="Run command line arguments of AYON addons")
|
||||
@click.pass_context
|
||||
def addon(ctx):
|
||||
"""Addon specific commands created dynamically.
|
||||
|
||||
These commands are generated dynamically by currently loaded addons.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# Add 'addon' as alias for module
|
||||
main_cli.set_alias("addon", "module")
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
@click.argument("output_json_path")
|
||||
@click.option("--project", help="Project name", default=None)
|
||||
@click.option("--asset", help="Asset name", default=None)
|
||||
@click.option("--task", help="Task name", default=None)
|
||||
@click.option("--app", help="Application name", default=None)
|
||||
@click.option(
|
||||
"--envgroup", help="Environment group (e.g. \"farm\")", default=None
|
||||
)
|
||||
def extractenvironments(output_json_path, project, asset, task, app, envgroup):
|
||||
"""Extract environment variables for entered context to a json file.
|
||||
|
||||
Entered output filepath will be created if does not exists.
|
||||
|
||||
All context options must be passed otherwise only AYON's global
|
||||
environments will be extracted.
|
||||
|
||||
Context options are "project", "asset", "task", "app"
|
||||
"""
|
||||
Commands.extractenvironments(
|
||||
output_json_path, project, asset, task, app, envgroup
|
||||
)
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
@click.argument("paths", nargs=-1)
|
||||
@click.option("-t", "--targets", help="Targets module", default=None,
|
||||
multiple=True)
|
||||
@click.option("-g", "--gui", is_flag=True,
|
||||
help="Show Publish UI", default=False)
|
||||
def publish(paths, targets, gui):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
|
||||
Commands.publish(list(paths), targets, gui)
|
||||
|
||||
|
||||
@main_cli.command(context_settings={"ignore_unknown_options": True})
|
||||
def publish_report_viewer():
|
||||
from ayon_core.tools.publisher.publish_report_viewer import main
|
||||
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
@click.argument("output_path")
|
||||
@click.option("--project", help="Define project context")
|
||||
@click.option("--asset", help="Define asset in project (project must be set)")
|
||||
@click.option(
|
||||
"--strict",
|
||||
is_flag=True,
|
||||
help="Full context must be set otherwise dialog can't be closed."
|
||||
)
|
||||
def contextselection(
|
||||
output_path,
|
||||
project,
|
||||
asset,
|
||||
strict
|
||||
):
|
||||
"""Show Qt dialog to select context.
|
||||
|
||||
Context is project name, asset name and task name. The result is stored
|
||||
into json file which path is passed in first argument.
|
||||
"""
|
||||
Commands.contextselection(
|
||||
output_path,
|
||||
project,
|
||||
asset,
|
||||
strict
|
||||
)
|
||||
|
||||
|
||||
@main_cli.command(
|
||||
context_settings=dict(
|
||||
ignore_unknown_options=True,
|
||||
allow_extra_args=True))
|
||||
@click.argument("script", required=True, type=click.Path(exists=True))
|
||||
def run(script):
|
||||
"""Run python script in AYON context."""
|
||||
import runpy
|
||||
|
||||
if not script:
|
||||
print("Error: missing path to script file.")
|
||||
else:
|
||||
|
||||
args = sys.argv
|
||||
args.remove("run")
|
||||
args.remove(script)
|
||||
sys.argv = args
|
||||
|
||||
args_string = " ".join(args[1:])
|
||||
print(f"... running: {script} {args_string}")
|
||||
runpy.run_path(script, run_name="__main__", )
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
def interactive():
|
||||
"""Interactive (Python like) console.
|
||||
|
||||
Helpful command not only for development to directly work with python
|
||||
interpreter.
|
||||
|
||||
Warning:
|
||||
Executable 'ayon.exe' on Windows won't work.
|
||||
"""
|
||||
version = os.environ["AYON_VERSION"]
|
||||
banner = (
|
||||
f"AYON launcher {version}\nPython {sys.version} on {sys.platform}"
|
||||
)
|
||||
code.interact(banner)
|
||||
|
||||
|
||||
@main_cli.command()
|
||||
@click.option("--build", help="Print only build version",
|
||||
is_flag=True, default=False)
|
||||
def version(build):
|
||||
"""Print AYON launcher version.
|
||||
|
||||
Deprecated:
|
||||
This function has questionable usage.
|
||||
"""
|
||||
print(os.environ["AYON_VERSION"])
|
||||
|
||||
|
||||
def _set_global_environments() -> None:
|
||||
"""Set global AYON environments."""
|
||||
general_env = get_general_environments()
|
||||
|
||||
# first resolve general environment because merge doesn't expect
|
||||
# values to be list.
|
||||
# TODO: switch to AYON environment functions
|
||||
merged_env = acre.merge(
|
||||
acre.compute(acre.parse(general_env), cleanup=False),
|
||||
dict(os.environ)
|
||||
)
|
||||
env = acre.compute(
|
||||
merged_env,
|
||||
cleanup=False
|
||||
)
|
||||
os.environ.clear()
|
||||
os.environ.update(env)
|
||||
|
||||
# Hardcoded default values
|
||||
os.environ["PYBLISH_GUI"] = "pyblish_pype"
|
||||
# Change scale factor only if is not set
|
||||
if "QT_AUTO_SCREEN_SCALE_FACTOR" not in os.environ:
|
||||
os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
|
||||
|
||||
|
||||
def _set_addons_environments():
|
||||
"""Set global environments for AYON addons."""
|
||||
|
||||
addons_manager = AddonsManager()
|
||||
|
||||
# Merge environments with current environments and update values
|
||||
if module_envs := addons_manager.collect_global_environments():
|
||||
parsed_envs = acre.parse(module_envs)
|
||||
env = acre.merge(parsed_envs, dict(os.environ))
|
||||
os.environ.clear()
|
||||
os.environ.update(env)
|
||||
|
||||
|
||||
def main(*args, **kwargs):
|
||||
python_path = os.getenv("PYTHONPATH", "")
|
||||
split_paths = python_path.split(os.pathsep)
|
||||
|
||||
additional_paths = [
|
||||
# add AYON tools for 'pyblish_pype'
|
||||
os.path.join(AYON_CORE_ROOT, "tools"),
|
||||
# add common AYON vendor
|
||||
# (common for multiple Python interpreter versions)
|
||||
os.path.join(AYON_CORE_ROOT, "vendor", "python", "common")
|
||||
]
|
||||
for path in additional_paths:
|
||||
if path not in split_paths:
|
||||
split_paths.insert(0, path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
|
||||
|
||||
print(">>> loading environments ...")
|
||||
print(" - global AYON ...")
|
||||
_set_global_environments()
|
||||
print(" - for addons ...")
|
||||
_set_addons_environments()
|
||||
|
||||
try:
|
||||
main_cli(obj={}, prog_name="ayon")
|
||||
except Exception: # noqa
|
||||
exc_info = sys.exc_info()
|
||||
print("!!! AYON crashed:")
|
||||
traceback.print_exception(*exc_info)
|
||||
sys.exit(1)
|
||||
173
client/ayon_core/cli_commands.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Implementation of AYON commands."""
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
||||
class Commands:
|
||||
"""Class implementing commands used by AYON.
|
||||
|
||||
Most of its methods are called by :mod:`cli` module.
|
||||
"""
|
||||
@staticmethod
|
||||
def launch_tray():
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.tools import tray
|
||||
|
||||
Logger.set_process_name("Tray")
|
||||
|
||||
tray.main()
|
||||
|
||||
@staticmethod
|
||||
def add_addons(click_func):
|
||||
"""Modules/Addons can add their cli commands dynamically."""
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
manager = AddonsManager()
|
||||
log = Logger.get_logger("CLI-AddModules")
|
||||
for addon in manager.addons:
|
||||
try:
|
||||
addon.cli(click_func)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to add cli command for module \"{}\"".format(
|
||||
addon.name
|
||||
)
|
||||
)
|
||||
return click_func
|
||||
|
||||
@staticmethod
|
||||
def publish(paths, targets=None, gui=False):
|
||||
"""Start headless publishing.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
|
||||
Args:
|
||||
paths (list): Paths to jsons.
|
||||
targets (string): What module should be targeted
|
||||
(to choose validator for example)
|
||||
gui (bool): Show publish UI.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
"""
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib.applications import (
|
||||
get_app_environments_for_context,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline import (
|
||||
install_ayon_plugins,
|
||||
get_global_context,
|
||||
)
|
||||
from ayon_core.tools.utils.host_tools import show_publish
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
log = Logger.get_logger("CLI-publish")
|
||||
|
||||
install_ayon_plugins()
|
||||
|
||||
manager = AddonsManager()
|
||||
|
||||
publish_paths = manager.collect_plugin_paths()["publish"]
|
||||
|
||||
for path in publish_paths:
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
app_full_name = os.getenv("AVALON_APP_NAME")
|
||||
if app_full_name:
|
||||
context = get_global_context()
|
||||
env = get_app_environments_for_context(
|
||||
context["project_name"],
|
||||
context["asset_name"],
|
||||
context["task_name"],
|
||||
app_full_name,
|
||||
launch_type=LaunchTypes.farm_publish,
|
||||
)
|
||||
os.environ.update(env)
|
||||
|
||||
pyblish.api.register_host("shell")
|
||||
|
||||
if targets:
|
||||
for target in targets:
|
||||
print(f"setting target: {target}")
|
||||
pyblish.api.register_target(target)
|
||||
else:
|
||||
pyblish.api.register_target("farm")
|
||||
|
||||
os.environ["AYON_PUBLISH_DATA"] = os.pathsep.join(paths)
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
plugins = pyblish.api.discover()
|
||||
print("Using plugins:")
|
||||
for plugin in plugins:
|
||||
print(plugin)
|
||||
|
||||
if gui:
|
||||
with qt_app_context():
|
||||
show_publish()
|
||||
else:
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = ("Failed {plugin.__name__}: "
|
||||
"{error} -- {error.traceback}")
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
# uninstall()
|
||||
sys.exit(1)
|
||||
|
||||
log.info("Publish finished.")
|
||||
|
||||
@staticmethod
|
||||
def extractenvironments(output_json_path, project, asset, task, app,
|
||||
env_group):
|
||||
"""Produces json file with environment based on project and app.
|
||||
|
||||
Called by Deadline plugin to propagate environment into render jobs.
|
||||
"""
|
||||
|
||||
from ayon_core.lib.applications import (
|
||||
get_app_environments_for_context,
|
||||
LaunchTypes,
|
||||
)
|
||||
|
||||
if all((project, asset, task, app)):
|
||||
env = get_app_environments_for_context(
|
||||
project,
|
||||
asset,
|
||||
task,
|
||||
app,
|
||||
env_group=env_group,
|
||||
launch_type=LaunchTypes.farm_render
|
||||
)
|
||||
else:
|
||||
env = os.environ.copy()
|
||||
|
||||
output_dir = os.path.dirname(output_json_path)
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
with open(output_json_path, "w") as file_stream:
|
||||
json.dump(env, file_stream, indent=4)
|
||||
|
||||
@staticmethod
|
||||
def contextselection(output_path, project_name, asset_name, strict):
|
||||
from ayon_core.tools.context_dialog import main
|
||||
|
||||
main(output_path, project_name, asset_name, strict)
|
||||
110
client/ayon_core/client/__init__.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
from .utils import get_ayon_server_api_connection
|
||||
|
||||
from .entities import (
|
||||
get_projects,
|
||||
get_project,
|
||||
get_whole_project,
|
||||
|
||||
get_asset_by_id,
|
||||
get_asset_by_name,
|
||||
get_assets,
|
||||
get_archived_assets,
|
||||
get_asset_ids_with_subsets,
|
||||
|
||||
get_subset_by_id,
|
||||
get_subset_by_name,
|
||||
get_subsets,
|
||||
get_subset_families,
|
||||
|
||||
get_version_by_id,
|
||||
get_version_by_name,
|
||||
get_versions,
|
||||
get_hero_version_by_id,
|
||||
get_hero_version_by_subset_id,
|
||||
get_hero_versions,
|
||||
get_last_versions,
|
||||
get_last_version_by_subset_id,
|
||||
get_last_version_by_subset_name,
|
||||
get_output_link_versions,
|
||||
|
||||
version_is_latest,
|
||||
|
||||
get_representation_by_id,
|
||||
get_representation_by_name,
|
||||
get_representations,
|
||||
get_representation_parents,
|
||||
get_representations_parents,
|
||||
get_archived_representations,
|
||||
|
||||
get_thumbnail,
|
||||
get_thumbnails,
|
||||
get_thumbnail_id_from_source,
|
||||
|
||||
get_workfile_info,
|
||||
|
||||
get_asset_name_identifier,
|
||||
)
|
||||
|
||||
from .entity_links import (
|
||||
get_linked_asset_ids,
|
||||
get_linked_assets,
|
||||
get_linked_representation_id,
|
||||
)
|
||||
|
||||
from .operations import (
|
||||
create_project,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_ayon_server_api_connection",
|
||||
|
||||
"get_projects",
|
||||
"get_project",
|
||||
"get_whole_project",
|
||||
|
||||
"get_asset_by_id",
|
||||
"get_asset_by_name",
|
||||
"get_assets",
|
||||
"get_archived_assets",
|
||||
"get_asset_ids_with_subsets",
|
||||
|
||||
"get_subset_by_id",
|
||||
"get_subset_by_name",
|
||||
"get_subsets",
|
||||
"get_subset_families",
|
||||
|
||||
"get_version_by_id",
|
||||
"get_version_by_name",
|
||||
"get_versions",
|
||||
"get_hero_version_by_id",
|
||||
"get_hero_version_by_subset_id",
|
||||
"get_hero_versions",
|
||||
"get_last_versions",
|
||||
"get_last_version_by_subset_id",
|
||||
"get_last_version_by_subset_name",
|
||||
"get_output_link_versions",
|
||||
|
||||
"version_is_latest",
|
||||
|
||||
"get_representation_by_id",
|
||||
"get_representation_by_name",
|
||||
"get_representations",
|
||||
"get_representation_parents",
|
||||
"get_representations_parents",
|
||||
"get_archived_representations",
|
||||
|
||||
"get_thumbnail",
|
||||
"get_thumbnails",
|
||||
"get_thumbnail_id_from_source",
|
||||
|
||||
"get_workfile_info",
|
||||
|
||||
"get_linked_asset_ids",
|
||||
"get_linked_assets",
|
||||
"get_linked_representation_id",
|
||||
|
||||
"create_project",
|
||||
|
||||
"get_asset_name_identifier",
|
||||
)
|
||||
28
client/ayon_core/client/constants.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# --- Folders ---
|
||||
DEFAULT_FOLDER_FIELDS = {
|
||||
"id",
|
||||
"name",
|
||||
"path",
|
||||
"parentId",
|
||||
"active",
|
||||
"parents",
|
||||
"thumbnailId"
|
||||
}
|
||||
|
||||
REPRESENTATION_FILES_FIELDS = {
|
||||
"files.name",
|
||||
"files.hash",
|
||||
"files.id",
|
||||
"files.path",
|
||||
"files.size",
|
||||
}
|
||||
|
||||
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
||||
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||
CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
|
||||
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
|
||||
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
|
||||
1362
client/ayon_core/client/conversion_utils.py
Normal file
741
client/ayon_core/client/entities.py
Normal file
|
|
@ -0,0 +1,741 @@
|
|||
import collections
|
||||
|
||||
from .constants import CURRENT_THUMBNAIL_SCHEMA
|
||||
from .utils import get_ayon_server_api_connection
|
||||
from .openpype_comp import get_folders_with_tasks
|
||||
from .conversion_utils import (
|
||||
project_fields_v3_to_v4,
|
||||
convert_v4_project_to_v3,
|
||||
|
||||
folder_fields_v3_to_v4,
|
||||
convert_v4_folder_to_v3,
|
||||
|
||||
subset_fields_v3_to_v4,
|
||||
convert_v4_subset_to_v3,
|
||||
|
||||
version_fields_v3_to_v4,
|
||||
convert_v4_version_to_v3,
|
||||
|
||||
representation_fields_v3_to_v4,
|
||||
convert_v4_representation_to_v3,
|
||||
|
||||
workfile_info_fields_v3_to_v4,
|
||||
convert_v4_workfile_info_to_v3,
|
||||
)
|
||||
|
||||
|
||||
def get_asset_name_identifier(asset_doc):
|
||||
"""Get asset name identifier by asset document.
|
||||
|
||||
This function is added because of AYON implementation where name
|
||||
identifier is not just a name but full path.
|
||||
|
||||
Asset document must have "name" key, and "data.parents" when in AYON mode.
|
||||
|
||||
Args:
|
||||
asset_doc (dict[str, Any]): Asset document.
|
||||
"""
|
||||
|
||||
parents = list(asset_doc["data"]["parents"])
|
||||
parents.append(asset_doc["name"])
|
||||
return "/" + "/".join(parents)
|
||||
|
||||
|
||||
def get_projects(active=True, inactive=False, library=None, fields=None):
|
||||
if not active and not inactive:
|
||||
return
|
||||
|
||||
if active and inactive:
|
||||
active = None
|
||||
elif active:
|
||||
active = True
|
||||
elif inactive:
|
||||
active = False
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = project_fields_v3_to_v4(fields, con)
|
||||
for project in con.get_projects(active, library, fields=fields):
|
||||
yield convert_v4_project_to_v3(project)
|
||||
|
||||
|
||||
def get_project(project_name, active=True, inactive=False, fields=None):
|
||||
# Skip if both are disabled
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = project_fields_v3_to_v4(fields, con)
|
||||
return convert_v4_project_to_v3(
|
||||
con.get_project(project_name, fields=fields)
|
||||
)
|
||||
|
||||
|
||||
def get_whole_project(*args, **kwargs):
|
||||
raise NotImplementedError("'get_whole_project' not implemented")
|
||||
|
||||
|
||||
def _get_subsets(
|
||||
project_name,
|
||||
subset_ids=None,
|
||||
subset_names=None,
|
||||
folder_ids=None,
|
||||
names_by_folder_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
# Convert fields and add minimum required fields
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = subset_fields_v3_to_v4(fields, con)
|
||||
if fields is not None:
|
||||
for key in (
|
||||
"id",
|
||||
"active"
|
||||
):
|
||||
fields.add(key)
|
||||
|
||||
active = True
|
||||
if archived:
|
||||
active = None
|
||||
|
||||
for subset in con.get_products(
|
||||
project_name,
|
||||
product_ids=subset_ids,
|
||||
product_names=subset_names,
|
||||
folder_ids=folder_ids,
|
||||
names_by_folder_ids=names_by_folder_ids,
|
||||
active=active,
|
||||
fields=fields,
|
||||
):
|
||||
yield convert_v4_subset_to_v3(subset)
|
||||
|
||||
|
||||
def _get_versions(
|
||||
project_name,
|
||||
version_ids=None,
|
||||
subset_ids=None,
|
||||
versions=None,
|
||||
hero=True,
|
||||
standard=True,
|
||||
latest=None,
|
||||
active=None,
|
||||
fields=None
|
||||
):
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
fields = version_fields_v3_to_v4(fields, con)
|
||||
|
||||
# Make sure 'productId' and 'version' are available when hero versions
|
||||
# are queried
|
||||
if fields and hero:
|
||||
fields = set(fields)
|
||||
fields |= {"productId", "version"}
|
||||
|
||||
queried_versions = con.get_versions(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
product_ids=subset_ids,
|
||||
versions=versions,
|
||||
hero=hero,
|
||||
standard=standard,
|
||||
latest=latest,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
version_entities = []
|
||||
hero_versions = []
|
||||
for version in queried_versions:
|
||||
if version["version"] < 0:
|
||||
hero_versions.append(version)
|
||||
else:
|
||||
version_entities.append(convert_v4_version_to_v3(version))
|
||||
|
||||
if hero_versions:
|
||||
subset_ids = set()
|
||||
versions_nums = set()
|
||||
for hero_version in hero_versions:
|
||||
versions_nums.add(abs(hero_version["version"]))
|
||||
subset_ids.add(hero_version["productId"])
|
||||
|
||||
hero_eq_versions = con.get_versions(
|
||||
project_name,
|
||||
product_ids=subset_ids,
|
||||
versions=versions_nums,
|
||||
hero=False,
|
||||
fields=["id", "version", "productId"]
|
||||
)
|
||||
hero_eq_by_subset_id = collections.defaultdict(list)
|
||||
for version in hero_eq_versions:
|
||||
hero_eq_by_subset_id[version["productId"]].append(version)
|
||||
|
||||
for hero_version in hero_versions:
|
||||
abs_version = abs(hero_version["version"])
|
||||
subset_id = hero_version["productId"]
|
||||
version_id = None
|
||||
for version in hero_eq_by_subset_id.get(subset_id, []):
|
||||
if version["version"] == abs_version:
|
||||
version_id = version["id"]
|
||||
break
|
||||
conv_hero = convert_v4_version_to_v3(hero_version)
|
||||
conv_hero["version_id"] = version_id
|
||||
version_entities.append(conv_hero)
|
||||
|
||||
return version_entities
|
||||
|
||||
|
||||
def get_asset_by_id(project_name, asset_id, fields=None):
|
||||
assets = get_assets(
|
||||
project_name, asset_ids=[asset_id], fields=fields
|
||||
)
|
||||
for asset in assets:
|
||||
return asset
|
||||
return None
|
||||
|
||||
|
||||
def get_asset_by_name(project_name, asset_name, fields=None):
|
||||
assets = get_assets(
|
||||
project_name, asset_names=[asset_name], fields=fields
|
||||
)
|
||||
for asset in assets:
|
||||
return asset
|
||||
return None
|
||||
|
||||
|
||||
def _folders_query(project_name, con, fields, **kwargs):
|
||||
if fields is None or "tasks" in fields:
|
||||
folders = get_folders_with_tasks(
|
||||
con, project_name, fields=fields, **kwargs
|
||||
)
|
||||
|
||||
else:
|
||||
folders = con.get_folders(project_name, fields=fields, **kwargs)
|
||||
|
||||
for folder in folders:
|
||||
yield folder
|
||||
|
||||
|
||||
def get_assets(
|
||||
project_name,
|
||||
asset_ids=None,
|
||||
asset_names=None,
|
||||
parent_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
if not project_name:
|
||||
return
|
||||
|
||||
active = True
|
||||
if archived:
|
||||
active = None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = folder_fields_v3_to_v4(fields, con)
|
||||
kwargs = dict(
|
||||
folder_ids=asset_ids,
|
||||
parent_ids=parent_ids,
|
||||
active=active,
|
||||
)
|
||||
if not asset_names:
|
||||
for folder in _folders_query(project_name, con, fields, **kwargs):
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
return
|
||||
|
||||
new_asset_names = set()
|
||||
folder_paths = set()
|
||||
for name in asset_names:
|
||||
if "/" in name:
|
||||
folder_paths.add(name)
|
||||
else:
|
||||
new_asset_names.add(name)
|
||||
|
||||
yielded_ids = set()
|
||||
if folder_paths:
|
||||
for folder in _folders_query(
|
||||
project_name, con, fields, folder_paths=folder_paths, **kwargs
|
||||
):
|
||||
yielded_ids.add(folder["id"])
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
|
||||
if not new_asset_names:
|
||||
return
|
||||
|
||||
for folder in _folders_query(
|
||||
project_name, con, fields, folder_names=new_asset_names, **kwargs
|
||||
):
|
||||
if folder["id"] not in yielded_ids:
|
||||
yielded_ids.add(folder["id"])
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
|
||||
|
||||
def get_archived_assets(
|
||||
project_name,
|
||||
asset_ids=None,
|
||||
asset_names=None,
|
||||
parent_ids=None,
|
||||
fields=None
|
||||
):
|
||||
return get_assets(
|
||||
project_name,
|
||||
asset_ids,
|
||||
asset_names,
|
||||
parent_ids,
|
||||
True,
|
||||
fields
|
||||
)
|
||||
|
||||
|
||||
def get_asset_ids_with_subsets(project_name, asset_ids=None):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.get_folder_ids_with_products(project_name, asset_ids)
|
||||
|
||||
|
||||
def get_subset_by_id(project_name, subset_id, fields=None):
|
||||
subsets = get_subsets(
|
||||
project_name, subset_ids=[subset_id], fields=fields
|
||||
)
|
||||
for subset in subsets:
|
||||
return subset
|
||||
return None
|
||||
|
||||
|
||||
def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
|
||||
subsets = get_subsets(
|
||||
project_name,
|
||||
subset_names=[subset_name],
|
||||
asset_ids=[asset_id],
|
||||
fields=fields
|
||||
)
|
||||
for subset in subsets:
|
||||
return subset
|
||||
return None
|
||||
|
||||
|
||||
def get_subsets(
|
||||
project_name,
|
||||
subset_ids=None,
|
||||
subset_names=None,
|
||||
asset_ids=None,
|
||||
names_by_asset_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
return _get_subsets(
|
||||
project_name,
|
||||
subset_ids,
|
||||
subset_names,
|
||||
asset_ids,
|
||||
names_by_asset_ids,
|
||||
archived,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_subset_families(project_name, subset_ids=None):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.get_product_type_names(project_name, subset_ids)
|
||||
|
||||
|
||||
def get_version_by_id(project_name, version_id, fields=None):
|
||||
versions = get_versions(
|
||||
project_name,
|
||||
version_ids=[version_id],
|
||||
fields=fields,
|
||||
hero=True
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_version_by_name(project_name, version, subset_id, fields=None):
|
||||
versions = get_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
versions=[version],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_versions(
|
||||
project_name,
|
||||
version_ids=None,
|
||||
subset_ids=None,
|
||||
versions=None,
|
||||
hero=False,
|
||||
fields=None
|
||||
):
|
||||
return _get_versions(
|
||||
project_name,
|
||||
version_ids,
|
||||
subset_ids,
|
||||
versions,
|
||||
hero=hero,
|
||||
standard=True,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_hero_version_by_id(project_name, version_id, fields=None):
|
||||
versions = get_hero_versions(
|
||||
project_name,
|
||||
version_ids=[version_id],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_hero_version_by_subset_id(
|
||||
project_name, subset_id, fields=None
|
||||
):
|
||||
versions = get_hero_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_hero_versions(
|
||||
project_name, subset_ids=None, version_ids=None, fields=None
|
||||
):
|
||||
return _get_versions(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
subset_ids=subset_ids,
|
||||
hero=True,
|
||||
standard=False,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_last_versions(project_name, subset_ids, active=None, fields=None):
|
||||
if fields:
|
||||
fields = set(fields)
|
||||
fields.add("parent")
|
||||
|
||||
versions = _get_versions(
|
||||
project_name,
|
||||
subset_ids=subset_ids,
|
||||
latest=True,
|
||||
hero=False,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
return {
|
||||
version["parent"]: version
|
||||
for version in versions
|
||||
}
|
||||
|
||||
|
||||
def get_last_version_by_subset_id(project_name, subset_id, fields=None):
|
||||
versions = _get_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
latest=True,
|
||||
hero=False,
|
||||
fields=fields
|
||||
)
|
||||
if not versions:
|
||||
return None
|
||||
return versions[0]
|
||||
|
||||
|
||||
def get_last_version_by_subset_name(
|
||||
project_name,
|
||||
subset_name,
|
||||
asset_id=None,
|
||||
asset_name=None,
|
||||
fields=None
|
||||
):
|
||||
if not asset_id and not asset_name:
|
||||
return None
|
||||
|
||||
if not asset_id:
|
||||
asset = get_asset_by_name(
|
||||
project_name, asset_name, fields=["_id"]
|
||||
)
|
||||
if not asset:
|
||||
return None
|
||||
asset_id = asset["_id"]
|
||||
|
||||
subset = get_subset_by_name(
|
||||
project_name, subset_name, asset_id, fields=["_id"]
|
||||
)
|
||||
if not subset:
|
||||
return None
|
||||
return get_last_version_by_subset_id(
|
||||
project_name, subset["_id"], fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_output_link_versions(project_name, version_id, fields=None):
|
||||
if not version_id:
|
||||
return []
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
version_links = con.get_version_links(
|
||||
project_name, version_id, link_direction="out")
|
||||
|
||||
version_ids = {
|
||||
link["entityId"]
|
||||
for link in version_links
|
||||
if link["entityType"] == "version"
|
||||
}
|
||||
if not version_ids:
|
||||
return []
|
||||
|
||||
return get_versions(project_name, version_ids=version_ids, fields=fields)
|
||||
|
||||
|
||||
def version_is_latest(project_name, version_id):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.version_is_latest(project_name, version_id)
|
||||
|
||||
|
||||
def get_representation_by_id(project_name, representation_id, fields=None):
|
||||
representations = get_representations(
|
||||
project_name,
|
||||
representation_ids=[representation_id],
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
return representation
|
||||
return None
|
||||
|
||||
|
||||
def get_representation_by_name(
|
||||
project_name, representation_name, version_id, fields=None
|
||||
):
|
||||
representations = get_representations(
|
||||
project_name,
|
||||
representation_names=[representation_name],
|
||||
version_ids=[version_id],
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
return representation
|
||||
return None
|
||||
|
||||
|
||||
def get_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
archived=False,
|
||||
standard=True,
|
||||
fields=None
|
||||
):
|
||||
if context_filters is not None:
|
||||
# TODO should we add the support?
|
||||
# - there was ability to fitler using regex
|
||||
raise ValueError("OP v4 can't filter by representation context.")
|
||||
|
||||
if not archived and not standard:
|
||||
return
|
||||
|
||||
if archived and not standard:
|
||||
active = False
|
||||
elif not archived and standard:
|
||||
active = True
|
||||
else:
|
||||
active = None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = representation_fields_v3_to_v4(fields, con)
|
||||
if fields and active is not None:
|
||||
fields.add("active")
|
||||
|
||||
representations = con.get_representations(
|
||||
project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
yield convert_v4_representation_to_v3(representation)
|
||||
|
||||
|
||||
def get_representation_parents(project_name, representation):
|
||||
if not representation:
|
||||
return None
|
||||
|
||||
repre_id = representation["_id"]
|
||||
parents_by_repre_id = get_representations_parents(
|
||||
project_name, [representation]
|
||||
)
|
||||
return parents_by_repre_id[repre_id]
|
||||
|
||||
|
||||
def get_representations_parents(project_name, representations):
|
||||
repre_ids = {
|
||||
repre["_id"]
|
||||
for repre in representations
|
||||
}
|
||||
con = get_ayon_server_api_connection()
|
||||
parents_by_repre_id = con.get_representations_parents(project_name,
|
||||
repre_ids)
|
||||
folder_ids = set()
|
||||
for parents in parents_by_repre_id .values():
|
||||
folder_ids.add(parents[2]["id"])
|
||||
|
||||
tasks_by_folder_id = {}
|
||||
|
||||
new_parents = {}
|
||||
for repre_id, parents in parents_by_repre_id .items():
|
||||
version, subset, folder, project = parents
|
||||
folder_tasks = tasks_by_folder_id.get(folder["id"]) or {}
|
||||
folder["tasks"] = folder_tasks
|
||||
new_parents[repre_id] = (
|
||||
convert_v4_version_to_v3(version),
|
||||
convert_v4_subset_to_v3(subset),
|
||||
convert_v4_folder_to_v3(folder, project_name),
|
||||
project
|
||||
)
|
||||
return new_parents
|
||||
|
||||
|
||||
def get_archived_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
fields=None
|
||||
):
|
||||
return get_representations(
|
||||
project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
context_filters=context_filters,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
archived=True,
|
||||
standard=False,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_thumbnail(
|
||||
project_name, thumbnail_id, entity_type, entity_id, fields=None
|
||||
):
|
||||
"""Receive thumbnail entity data.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
|
||||
entity_type (str): Type of entity for which the thumbnail should be
|
||||
received.
|
||||
entity_id (str): Id of entity for which the thumbnail should be
|
||||
received.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If thumbnail with specified id was not found.
|
||||
Dict: Thumbnail entity data which can be reduced to specified 'fields'.
|
||||
"""
|
||||
|
||||
if not thumbnail_id or not entity_type or not entity_id:
|
||||
return None
|
||||
|
||||
if entity_type == "asset":
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
entity_type = "version"
|
||||
|
||||
return {
|
||||
"_id": thumbnail_id,
|
||||
"type": "thumbnail",
|
||||
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||
"data": {
|
||||
"entity_type": entity_type,
|
||||
"entity_id": entity_id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_thumbnails(project_name, thumbnail_contexts, fields=None):
|
||||
"""Get thumbnail entities.
|
||||
|
||||
Warning:
|
||||
This function is not OpenPype compatible. There is none usage of this
|
||||
function in codebase so there is nothing to convert. The previous
|
||||
implementation cannot be AYON compatible without entity types.
|
||||
"""
|
||||
|
||||
thumbnail_items = set()
|
||||
for thumbnail_context in thumbnail_contexts:
|
||||
thumbnail_id, entity_type, entity_id = thumbnail_context
|
||||
thumbnail_item = get_thumbnail(
|
||||
project_name, thumbnail_id, entity_type, entity_id
|
||||
)
|
||||
if thumbnail_item:
|
||||
thumbnail_items.add(thumbnail_item)
|
||||
return list(thumbnail_items)
|
||||
|
||||
|
||||
def get_thumbnail_id_from_source(project_name, src_type, src_id):
|
||||
"""Receive thumbnail id from source entity.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
src_type (str): Type of source entity ('asset', 'version').
|
||||
src_id (Union[str, ObjectId]): Id of source entity.
|
||||
|
||||
Returns:
|
||||
ObjectId: Thumbnail id assigned to entity.
|
||||
None: If Source entity does not have any thumbnail id assigned.
|
||||
"""
|
||||
|
||||
if not src_type or not src_id:
|
||||
return None
|
||||
|
||||
if src_type == "version":
|
||||
version = get_version_by_id(
|
||||
project_name, src_id, fields=["data.thumbnail_id"]
|
||||
) or {}
|
||||
return version.get("data", {}).get("thumbnail_id")
|
||||
|
||||
if src_type == "asset":
|
||||
asset = get_asset_by_id(
|
||||
project_name, src_id, fields=["data.thumbnail_id"]
|
||||
) or {}
|
||||
return asset.get("data", {}).get("thumbnail_id")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_workfile_info(
|
||||
project_name, asset_id, task_name, filename, fields=None
|
||||
):
|
||||
if not asset_id or not task_name or not filename:
|
||||
return None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
task = con.get_task_by_name(
|
||||
project_name, asset_id, task_name, fields=["id", "name", "folderId"]
|
||||
)
|
||||
if not task:
|
||||
return None
|
||||
|
||||
fields = workfile_info_fields_v3_to_v4(fields)
|
||||
|
||||
for workfile_info in con.get_workfiles_info(
|
||||
project_name, task_ids=[task["id"]], fields=fields
|
||||
):
|
||||
if workfile_info["name"] == filename:
|
||||
return convert_v4_workfile_info_to_v3(workfile_info, task)
|
||||
return None
|
||||
157
client/ayon_core/client/entity_links.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
from .utils import get_ayon_server_api_connection
|
||||
from .entities import get_assets, get_representation_by_id
|
||||
|
||||
|
||||
def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
|
||||
"""Extract linked asset ids from asset document.
|
||||
|
||||
One of asset document or asset id must be passed.
|
||||
|
||||
Note:
|
||||
Asset links now works only from asset to assets.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where to look for asset.
|
||||
asset_doc (dict): Asset document from DB.
|
||||
asset_id (str): Asset id to find its document.
|
||||
|
||||
Returns:
|
||||
List[Union[ObjectId, str]]: Asset ids of input links.
|
||||
"""
|
||||
|
||||
output = []
|
||||
if not asset_doc and not asset_id:
|
||||
return output
|
||||
|
||||
if not asset_id:
|
||||
asset_id = asset_doc["_id"]
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
links = con.get_folder_links(project_name, asset_id, link_direction="in")
|
||||
return [
|
||||
link["entityId"]
|
||||
for link in links
|
||||
if link["entityType"] == "folder"
|
||||
]
|
||||
|
||||
|
||||
def get_linked_assets(
|
||||
project_name, asset_doc=None, asset_id=None, fields=None
|
||||
):
|
||||
"""Return linked assets based on passed asset document.
|
||||
|
||||
One of asset document or asset id must be passed.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
asset_doc (Dict[str, Any]): Asset document from database.
|
||||
asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
|
||||
asset document.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Asset documents of input links for passed
|
||||
asset doc.
|
||||
"""
|
||||
|
||||
link_ids = get_linked_asset_ids(project_name, asset_doc, asset_id)
|
||||
if not link_ids:
|
||||
return []
|
||||
return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
|
||||
|
||||
|
||||
|
||||
def get_linked_representation_id(
|
||||
project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
|
||||
):
|
||||
"""Returns list of linked ids of particular type (if provided).
|
||||
|
||||
One of representation document or representation id must be passed.
|
||||
Note:
|
||||
Representation links now works only from representation through version
|
||||
back to representations.
|
||||
|
||||
Todos:
|
||||
Missing depth query. Not sure how it did find more representations in
|
||||
depth, probably links to version?
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where look for links.
|
||||
repre_doc (Dict[str, Any]): Representation document.
|
||||
repre_id (Union[ObjectId, str]): Representation id.
|
||||
link_type (str): Type of link (e.g. 'reference', ...).
|
||||
max_depth (int): Limit recursion level. Default: 0
|
||||
|
||||
Returns:
|
||||
List[ObjectId] Linked representation ids.
|
||||
"""
|
||||
|
||||
if repre_doc:
|
||||
repre_id = repre_doc["_id"]
|
||||
|
||||
if not repre_id and not repre_doc:
|
||||
return []
|
||||
|
||||
version_id = None
|
||||
if repre_doc:
|
||||
version_id = repre_doc.get("parent")
|
||||
|
||||
if not version_id:
|
||||
repre_doc = get_representation_by_id(
|
||||
project_name, repre_id, fields=["parent"]
|
||||
)
|
||||
if repre_doc:
|
||||
version_id = repre_doc["parent"]
|
||||
|
||||
if not version_id:
|
||||
return []
|
||||
|
||||
if max_depth is None or max_depth == 0:
|
||||
max_depth = 1
|
||||
|
||||
link_types = None
|
||||
if link_type:
|
||||
link_types = [link_type]
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
# Store already found version ids to avoid recursion, and also to store
|
||||
# output -> Don't forget to remove 'version_id' at the end!!!
|
||||
linked_version_ids = {version_id}
|
||||
# Each loop of depth will reset this variable
|
||||
versions_to_check = {version_id}
|
||||
for _ in range(max_depth):
|
||||
if not versions_to_check:
|
||||
break
|
||||
|
||||
versions_links = con.get_versions_links(
|
||||
project_name,
|
||||
versions_to_check,
|
||||
link_types=link_types,
|
||||
link_direction="out")
|
||||
|
||||
versions_to_check = set()
|
||||
for links in versions_links.values():
|
||||
for link in links:
|
||||
# Care only about version links
|
||||
if link["entityType"] != "version":
|
||||
continue
|
||||
entity_id = link["entityId"]
|
||||
# Skip already found linked version ids
|
||||
if entity_id in linked_version_ids:
|
||||
continue
|
||||
linked_version_ids.add(entity_id)
|
||||
versions_to_check.add(entity_id)
|
||||
|
||||
linked_version_ids.remove(version_id)
|
||||
if not linked_version_ids:
|
||||
return []
|
||||
con = get_ayon_server_api_connection()
|
||||
representations = con.get_representations(
|
||||
project_name,
|
||||
version_ids=linked_version_ids,
|
||||
fields=["id"])
|
||||
return [
|
||||
repre["id"]
|
||||
for repre in representations
|
||||
]
|
||||
39
client/ayon_core/client/notes.md
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# Client functionality
|
||||
## Reason
|
||||
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
|
||||
|
||||
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tightly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
|
||||
|
||||
## Queries
|
||||
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
|
||||
|
||||
## Changes
|
||||
Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data.
|
||||
|
||||
### Create
|
||||
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
|
||||
|
||||
### Update
|
||||
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementation.
|
||||
|
||||
### Delete
|
||||
Delete operation need entity id. Entity will be deleted from mongo.
|
||||
|
||||
|
||||
## What (probably) won't be replaced
|
||||
Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future.
|
||||
- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data.
|
||||
- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3.
|
||||
- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure.
|
||||
- Code parts that is marked as deprecated in v3 or will be deprecated in v4.
|
||||
- integrate asset legacy publish plugin - already is legacy kept for safety
|
||||
- integrate thumbnail - thumbnails will be stored in different way in v4
|
||||
- input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation".
|
||||
|
||||
## Known missing replacements
|
||||
- change subset group in loader tool
|
||||
- integrate subset group
|
||||
- query input links in openpype lib
|
||||
- create project in openpype lib
|
||||
- save/create workfile doc in openpype lib
|
||||
- integrate hero version
|
||||
159
client/ayon_core/client/openpype_comp.py
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
import collections
|
||||
import json
|
||||
|
||||
import six
|
||||
from ayon_api.graphql import GraphQlQuery, FIELD_VALUE, fields_to_dict
|
||||
|
||||
from .constants import DEFAULT_FOLDER_FIELDS
|
||||
|
||||
|
||||
def folders_tasks_graphql_query(fields):
|
||||
query = GraphQlQuery("FoldersQuery")
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]")
|
||||
folder_paths_var = query.add_variable("folderPaths", "[String!]")
|
||||
folder_names_var = query.add_variable("folderNames", "[String!]")
|
||||
has_products_var = query.add_variable("folderHasProducts", "Boolean!")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
project_field.set_filter("name", project_name_var)
|
||||
|
||||
folders_field = project_field.add_field_with_edges("folders")
|
||||
folders_field.set_filter("ids", folder_ids_var)
|
||||
folders_field.set_filter("parentIds", parent_folder_ids_var)
|
||||
folders_field.set_filter("names", folder_names_var)
|
||||
folders_field.set_filter("paths", folder_paths_var)
|
||||
folders_field.set_filter("hasProducts", has_products_var)
|
||||
|
||||
fields = set(fields)
|
||||
fields.discard("tasks")
|
||||
tasks_field = folders_field.add_field_with_edges("tasks")
|
||||
tasks_field.add_field("name")
|
||||
tasks_field.add_field("taskType")
|
||||
|
||||
nested_fields = fields_to_dict(fields)
|
||||
|
||||
query_queue = collections.deque()
|
||||
for key, value in nested_fields.items():
|
||||
query_queue.append((key, value, folders_field))
|
||||
|
||||
while query_queue:
|
||||
item = query_queue.popleft()
|
||||
key, value, parent = item
|
||||
field = parent.add_field(key)
|
||||
if value is FIELD_VALUE:
|
||||
continue
|
||||
|
||||
for k, v in value.items():
|
||||
query_queue.append((k, v, field))
|
||||
return query
|
||||
|
||||
|
||||
def get_folders_with_tasks(
|
||||
con,
|
||||
project_name,
|
||||
folder_ids=None,
|
||||
folder_paths=None,
|
||||
folder_names=None,
|
||||
parent_ids=None,
|
||||
active=True,
|
||||
fields=None
|
||||
):
|
||||
"""Query folders with tasks from server.
|
||||
|
||||
This is for v4 compatibility where tasks were stored on assets. This is
|
||||
an inefficient way how folders and tasks are queried so it was added only
|
||||
as compatibility function.
|
||||
|
||||
Todos:
|
||||
Folder name won't be unique identifier, so we should add folder path
|
||||
filtering.
|
||||
|
||||
Notes:
|
||||
Filter 'active' don't have direct filter in GraphQl.
|
||||
|
||||
Args:
|
||||
con (ServerAPI): Connection to server.
|
||||
project_name (str): Name of project where folders are.
|
||||
folder_ids (Iterable[str]): Folder ids to filter.
|
||||
folder_paths (Iterable[str]): Folder paths used for filtering.
|
||||
folder_names (Iterable[str]): Folder names used for filtering.
|
||||
parent_ids (Iterable[str]): Ids of folder parents. Use 'None'
|
||||
if folder is direct child of project.
|
||||
active (Union[bool, None]): Filter active/inactive folders. Both
|
||||
are returned if is set to None.
|
||||
fields (Union[Iterable(str), None]): Fields to be queried
|
||||
for folder. All possible folder fields are returned if 'None'
|
||||
is passed.
|
||||
|
||||
Yields:
|
||||
Dict[str, Any]: Queried folder entities.
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
return
|
||||
|
||||
filters = {
|
||||
"projectName": project_name
|
||||
}
|
||||
if folder_ids is not None:
|
||||
folder_ids = set(folder_ids)
|
||||
if not folder_ids:
|
||||
return
|
||||
filters["folderIds"] = list(folder_ids)
|
||||
|
||||
if folder_paths is not None:
|
||||
folder_paths = set(folder_paths)
|
||||
if not folder_paths:
|
||||
return
|
||||
filters["folderPaths"] = list(folder_paths)
|
||||
|
||||
if folder_names is not None:
|
||||
folder_names = set(folder_names)
|
||||
if not folder_names:
|
||||
return
|
||||
filters["folderNames"] = list(folder_names)
|
||||
|
||||
if parent_ids is not None:
|
||||
parent_ids = set(parent_ids)
|
||||
if not parent_ids:
|
||||
return
|
||||
if None in parent_ids:
|
||||
# Replace 'None' with '"root"' which is used during GraphQl
|
||||
# query for parent ids filter for folders without folder
|
||||
# parent
|
||||
parent_ids.remove(None)
|
||||
parent_ids.add("root")
|
||||
|
||||
if project_name in parent_ids:
|
||||
# Replace project name with '"root"' which is used during
|
||||
# GraphQl query for parent ids filter for folders without
|
||||
# folder parent
|
||||
parent_ids.remove(project_name)
|
||||
parent_ids.add("root")
|
||||
|
||||
filters["parentFolderIds"] = list(parent_ids)
|
||||
|
||||
if fields:
|
||||
fields = set(fields)
|
||||
else:
|
||||
fields = con.get_default_fields_for_type("folder")
|
||||
fields |= DEFAULT_FOLDER_FIELDS
|
||||
|
||||
if active is not None:
|
||||
fields.add("active")
|
||||
|
||||
query = folders_tasks_graphql_query(fields)
|
||||
for attr, filter_value in filters.items():
|
||||
query.set_variable_value(attr, filter_value)
|
||||
|
||||
parsed_data = query.query(con)
|
||||
folders = parsed_data["project"]["folders"]
|
||||
for folder in folders:
|
||||
if active is not None and folder["active"] is not active:
|
||||
continue
|
||||
folder_data = folder.get("data")
|
||||
if isinstance(folder_data, six.string_types):
|
||||
folder["data"] = json.loads(folder_data)
|
||||
yield folder
|
||||
880
client/ayon_core/client/operations.py
Normal file
|
|
@ -0,0 +1,880 @@
|
|||
import copy
|
||||
import json
|
||||
import collections
|
||||
import uuid
|
||||
import datetime
|
||||
|
||||
from ayon_api.server_api import (
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
PROJECT_NAME_REGEX,
|
||||
)
|
||||
|
||||
from .constants import (
|
||||
CURRENT_PROJECT_SCHEMA,
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA,
|
||||
CURRENT_ASSET_DOC_SCHEMA,
|
||||
CURRENT_SUBSET_SCHEMA,
|
||||
CURRENT_VERSION_SCHEMA,
|
||||
CURRENT_HERO_VERSION_SCHEMA,
|
||||
CURRENT_REPRESENTATION_SCHEMA,
|
||||
CURRENT_WORKFILE_INFO_SCHEMA,
|
||||
CURRENT_THUMBNAIL_SCHEMA,
|
||||
)
|
||||
from .operations_base import (
|
||||
REMOVED_VALUE,
|
||||
CreateOperation,
|
||||
UpdateOperation,
|
||||
DeleteOperation,
|
||||
BaseOperationsSession
|
||||
)
|
||||
from .conversion_utils import (
|
||||
convert_create_asset_to_v4,
|
||||
convert_create_task_to_v4,
|
||||
convert_create_subset_to_v4,
|
||||
convert_create_version_to_v4,
|
||||
convert_create_hero_version_to_v4,
|
||||
convert_create_representation_to_v4,
|
||||
convert_create_workfile_info_to_v4,
|
||||
|
||||
convert_update_folder_to_v4,
|
||||
convert_update_subset_to_v4,
|
||||
convert_update_version_to_v4,
|
||||
convert_update_hero_version_to_v4,
|
||||
convert_update_representation_to_v4,
|
||||
convert_update_workfile_info_to_v4,
|
||||
)
|
||||
from .utils import create_entity_id, get_ayon_server_api_connection
|
||||
|
||||
|
||||
def _create_or_convert_to_id(entity_id=None):
|
||||
if entity_id is None:
|
||||
return create_entity_id()
|
||||
|
||||
# Validate if can be converted to uuid
|
||||
uuid.UUID(entity_id)
|
||||
return entity_id
|
||||
|
||||
|
||||
def new_project_document(
|
||||
project_name, project_code, config, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of project document.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project. Used as identifier of a project.
|
||||
project_code (str): Shorter version of projet without spaces and
|
||||
special characters (in most of cases). Should be also considered
|
||||
as unique name across projects.
|
||||
config (Dic[str, Any]): Project config consist of roots, templates,
|
||||
applications and other project Anatomy related data.
|
||||
data (Dict[str, Any]): Project data with information about it's
|
||||
attributes (e.g. 'fps' etc.) or integration specific keys.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of project document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
data["code"] = project_code
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"name": project_name,
|
||||
"type": CURRENT_PROJECT_SCHEMA,
|
||||
"entity_data": data,
|
||||
"config": config
|
||||
}
|
||||
|
||||
|
||||
def new_asset_document(
|
||||
name, project_id, parent_id, parents, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of asset in project.
|
||||
project_id (Union[str, ObjectId]): Id of project doument.
|
||||
parent_id (Union[str, ObjectId]): Id of parent asset.
|
||||
parents (List[str]): List of parent assets names.
|
||||
data (Dict[str, Any]): Asset document data. Empty dictionary is used
|
||||
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of asset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
if parent_id is not None:
|
||||
parent_id = _create_or_convert_to_id(parent_id)
|
||||
data["visualParent"] = parent_id
|
||||
data["parents"] = parents
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "asset",
|
||||
"name": name,
|
||||
# This will be ignored
|
||||
"parent": project_id,
|
||||
"data": data,
|
||||
"schema": CURRENT_ASSET_DOC_SCHEMA
|
||||
}
|
||||
|
||||
|
||||
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of subset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of subset under asset.
|
||||
family (str): Subset's family.
|
||||
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||
data (Dict[str, Any]): Subset document data. Empty dictionary is used
|
||||
if not passed. Value of 'family' is used to fill 'family'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of subset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
data["family"] = family
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_SUBSET_SCHEMA,
|
||||
"type": "subset",
|
||||
"name": name,
|
||||
"data": data,
|
||||
"parent": _create_or_convert_to_id(asset_id)
|
||||
}
|
||||
|
||||
|
||||
def new_version_doc(version, subset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of version document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_VERSION_SCHEMA,
|
||||
"type": "version",
|
||||
"name": int(version),
|
||||
"parent": _create_or_convert_to_id(subset_id),
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_hero_version_doc(subset_id, data, version=None, entity_id=None):
|
||||
"""Create skeleton data of hero version document.
|
||||
|
||||
Args:
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
version (int): Version of source version.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if version is None:
|
||||
version = -1
|
||||
elif version > 0:
|
||||
version = -version
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_HERO_VERSION_SCHEMA,
|
||||
"type": "hero_version",
|
||||
"version": version,
|
||||
"parent": _create_or_convert_to_id(subset_id),
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_representation_doc(
|
||||
name, version_id, context, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of representation document.
|
||||
|
||||
Args:
|
||||
name (str): Representation name considered as unique identifier
|
||||
of representation under version.
|
||||
version_id (Union[str, ObjectId]): Id of parent version.
|
||||
context (Dict[str, Any]): Representation context used for fill template
|
||||
of to query.
|
||||
data (Dict[str, Any]): Representation document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_REPRESENTATION_SCHEMA,
|
||||
"type": "representation",
|
||||
"parent": _create_or_convert_to_id(version_id),
|
||||
"name": name,
|
||||
"data": data,
|
||||
|
||||
# Imprint shortcut to context for performance reasons.
|
||||
"context": context
|
||||
}
|
||||
|
||||
|
||||
def new_thumbnail_doc(data=None, entity_id=None):
|
||||
"""Create skeleton data of thumbnail document.
|
||||
|
||||
Args:
|
||||
data (Dict[str, Any]): Thumbnail document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of thumbnail document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "thumbnail",
|
||||
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_workfile_info_doc(
|
||||
filename, asset_id, task_name, files, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of workfile info document.
|
||||
|
||||
Workfile document is at this moment used primarily for artist notes.
|
||||
|
||||
Args:
|
||||
filename (str): Filename of workfile.
|
||||
asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
|
||||
task_name (str): Task under which was workfile created.
|
||||
files (List[str]): List of rootless filepaths related to workfile.
|
||||
data (Dict[str, Any]): Additional metadata.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of workfile info document.
|
||||
"""
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "workfile",
|
||||
"parent": _create_or_convert_to_id(asset_id),
|
||||
"task_name": task_name,
|
||||
"filename": filename,
|
||||
"data": data,
|
||||
"files": files
|
||||
}
|
||||
|
||||
|
||||
def _prepare_update_data(old_doc, new_doc, replace):
|
||||
changes = {}
|
||||
for key, value in new_doc.items():
|
||||
if key not in old_doc or value != old_doc[key]:
|
||||
changes[key] = value
|
||||
|
||||
if replace:
|
||||
for key in old_doc.keys():
|
||||
if key not in new_doc:
|
||||
changes[key] = REMOVED_VALUE
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_subset_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two subset documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two hero version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
changes = _prepare_update_data(old_doc, new_doc, replace)
|
||||
changes.pop("version_id", None)
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two representation documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
changes = _prepare_update_data(old_doc, new_doc, replace)
|
||||
context = changes.get("data", {}).get("context")
|
||||
# Make sure that both 'family' and 'subset' are in changes if
|
||||
# one of them changed (they'll both become 'product').
|
||||
if (
|
||||
context
|
||||
and ("family" in context or "subset" in context)
|
||||
):
|
||||
context["family"] = new_doc["data"]["context"]["family"]
|
||||
context["subset"] = new_doc["data"]["context"]["subset"]
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two workfile info documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
class FailedOperations(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def entity_data_json_default(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return int(value.timestamp())
|
||||
|
||||
raise TypeError(
|
||||
"Object of type {} is not JSON serializable".format(str(type(value)))
|
||||
)
|
||||
|
||||
|
||||
def failed_json_default(value):
|
||||
return "< Failed value {} > {}".format(type(value), str(value))
|
||||
|
||||
|
||||
class ServerCreateOperation(CreateOperation):
|
||||
"""Operation to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type, data, session):
|
||||
self._session = session
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
data = copy.deepcopy(data)
|
||||
if entity_type == "project":
|
||||
raise ValueError("Project cannot be created using operations")
|
||||
|
||||
tasks = None
|
||||
if entity_type in "asset":
|
||||
# TODO handle tasks
|
||||
entity_type = "folder"
|
||||
if "data" in data:
|
||||
tasks = data["data"].get("tasks")
|
||||
|
||||
project = self._session.get_project(project_name)
|
||||
new_data = convert_create_asset_to_v4(data, project, self.con)
|
||||
|
||||
elif entity_type == "task":
|
||||
project = self._session.get_project(project_name)
|
||||
new_data = convert_create_task_to_v4(data, project, self.con)
|
||||
|
||||
elif entity_type == "subset":
|
||||
new_data = convert_create_subset_to_v4(data, self.con)
|
||||
entity_type = "product"
|
||||
|
||||
elif entity_type == "version":
|
||||
new_data = convert_create_version_to_v4(data, self.con)
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
new_data = convert_create_hero_version_to_v4(
|
||||
data, project_name, self.con
|
||||
)
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type in ("representation", "archived_representation"):
|
||||
new_data = convert_create_representation_to_v4(data, self.con)
|
||||
entity_type = "representation"
|
||||
|
||||
elif entity_type == "workfile":
|
||||
new_data = convert_create_workfile_info_to_v4(
|
||||
data, project_name, self.con
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unhandled entity type \"{}\"".format(entity_type)
|
||||
)
|
||||
|
||||
# Simple check if data can be dumped into json
|
||||
# - should raise error on 'ObjectId' object
|
||||
try:
|
||||
new_data = json.loads(
|
||||
json.dumps(new_data, default=entity_data_json_default)
|
||||
)
|
||||
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(new_data, default=failed_json_default)
|
||||
))
|
||||
|
||||
super(ServerCreateOperation, self).__init__(
|
||||
project_name, entity_type, new_data
|
||||
)
|
||||
|
||||
if "id" not in self._data:
|
||||
self._data["id"] = create_entity_id()
|
||||
|
||||
if tasks:
|
||||
copied_tasks = copy.deepcopy(tasks)
|
||||
for task_name, task in copied_tasks.items():
|
||||
task["name"] = task_name
|
||||
task["folderId"] = self._data["id"]
|
||||
self.session.create_entity(
|
||||
project_name, "task", task, nested_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._data["id"]
|
||||
|
||||
def to_server_operation(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": "create",
|
||||
"entityType": self.entity_type,
|
||||
"entityId": self.entity_id,
|
||||
"data": self._data
|
||||
}
|
||||
|
||||
|
||||
class ServerUpdateOperation(UpdateOperation):
|
||||
"""Operation to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, project_name, entity_type, entity_id, update_data, session
|
||||
):
|
||||
self._session = session
|
||||
|
||||
update_data = copy.deepcopy(update_data)
|
||||
if entity_type == "project":
|
||||
raise ValueError("Project cannot be created using operations")
|
||||
|
||||
if entity_type in ("asset", "archived_asset"):
|
||||
new_update_data = convert_update_folder_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "subset":
|
||||
new_update_data = convert_update_subset_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "product"
|
||||
|
||||
elif entity_type == "version":
|
||||
new_update_data = convert_update_version_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
new_update_data = convert_update_hero_version_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type in ("representation", "archived_representation"):
|
||||
new_update_data = convert_update_representation_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "representation"
|
||||
|
||||
elif entity_type == "workfile":
|
||||
new_update_data = convert_update_workfile_info_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unhandled entity type \"{}\"".format(entity_type)
|
||||
)
|
||||
|
||||
try:
|
||||
new_update_data = json.loads(
|
||||
json.dumps(new_update_data, default=entity_data_json_default)
|
||||
)
|
||||
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(new_update_data, default=failed_json_default)
|
||||
))
|
||||
|
||||
super(ServerUpdateOperation, self).__init__(
|
||||
project_name, entity_type, entity_id, new_update_data
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
def to_server_operation(self):
|
||||
if not self._update_data:
|
||||
return None
|
||||
|
||||
update_data = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
update_data[key] = value
|
||||
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": "update",
|
||||
"entityType": self.entity_type,
|
||||
"entityId": self.entity_id,
|
||||
"data": update_data
|
||||
}
|
||||
|
||||
|
||||
class ServerDeleteOperation(DeleteOperation):
|
||||
"""Operation to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, session):
|
||||
self._session = session
|
||||
|
||||
if entity_type == "asset":
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type == "subset":
|
||||
entity_type = "product"
|
||||
|
||||
super(ServerDeleteOperation, self).__init__(
|
||||
project_name, entity_type, entity_id
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
def to_server_operation(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": self.operation_name,
|
||||
"entityId": self.entity_id,
|
||||
"entityType": self.entity_type,
|
||||
}
|
||||
|
||||
|
||||
class OperationsSession(BaseOperationsSession):
|
||||
def __init__(self, con=None, *args, **kwargs):
|
||||
super(OperationsSession, self).__init__(*args, **kwargs)
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
self._con = con
|
||||
self._project_cache = {}
|
||||
self._nested_operations = collections.defaultdict(list)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self._con
|
||||
|
||||
def get_project(self, project_name):
|
||||
if project_name not in self._project_cache:
|
||||
self._project_cache[project_name] = self.con.get_project(
|
||||
project_name)
|
||||
return copy.deepcopy(self._project_cache[project_name])
|
||||
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
|
||||
operations, self._operations = self._operations, []
|
||||
if not operations:
|
||||
return
|
||||
|
||||
operations_by_project = collections.defaultdict(list)
|
||||
for operation in operations:
|
||||
operations_by_project[operation.project_name].append(operation)
|
||||
|
||||
body_by_id = {}
|
||||
results = []
|
||||
for project_name, operations in operations_by_project.items():
|
||||
operations_body = []
|
||||
for operation in operations:
|
||||
body = operation.to_server_operation()
|
||||
if body is not None:
|
||||
try:
|
||||
json.dumps(body)
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(
|
||||
body, indent=4, default=failed_json_default
|
||||
)
|
||||
))
|
||||
|
||||
body_by_id[operation.id] = body
|
||||
operations_body.append(body)
|
||||
|
||||
if operations_body:
|
||||
result = self._con.post(
|
||||
"projects/{}/operations".format(project_name),
|
||||
operations=operations_body,
|
||||
canFail=False
|
||||
)
|
||||
results.append(result.data)
|
||||
|
||||
for result in results:
|
||||
if result.get("success"):
|
||||
continue
|
||||
|
||||
if "operations" not in result:
|
||||
raise FailedOperations(
|
||||
"Operation failed. Content: {}".format(str(result))
|
||||
)
|
||||
|
||||
for op_result in result["operations"]:
|
||||
if not op_result["success"]:
|
||||
operation_id = op_result["id"]
|
||||
raise FailedOperations((
|
||||
"Operation \"{}\" failed with data:\n{}\nError: {}."
|
||||
).format(
|
||||
operation_id,
|
||||
json.dumps(body_by_id[operation_id], indent=4),
|
||||
op_result.get("error", "unknown"),
|
||||
))
|
||||
|
||||
def create_entity(self, project_name, entity_type, data, nested_id=None):
|
||||
"""Fast access to 'ServerCreateOperation'.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project the creation happens.
|
||||
entity_type (str): Which entity type will be created.
|
||||
data (Dicst[str, Any]): Entity data.
|
||||
nested_id (str): Id of other operation from which is triggered
|
||||
operation -> Operations can trigger suboperations but they
|
||||
must be added to operations list after it's parent is added.
|
||||
|
||||
Returns:
|
||||
ServerCreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = ServerCreateOperation(
|
||||
project_name, entity_type, data, self
|
||||
)
|
||||
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
|
||||
return operation
|
||||
|
||||
def update_entity(
|
||||
self, project_name, entity_type, entity_id, update_data, nested_id=None
|
||||
):
|
||||
"""Fast access to 'ServerUpdateOperation'.
|
||||
|
||||
Returns:
|
||||
ServerUpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = ServerUpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data, self
|
||||
)
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
return operation
|
||||
|
||||
def delete_entity(
|
||||
self, project_name, entity_type, entity_id, nested_id=None
|
||||
):
|
||||
"""Fast access to 'ServerDeleteOperation'.
|
||||
|
||||
Returns:
|
||||
ServerDeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = ServerDeleteOperation(
|
||||
project_name, entity_type, entity_id, self
|
||||
)
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
return operation
|
||||
|
||||
|
||||
def create_project(
|
||||
project_name,
|
||||
project_code,
|
||||
library_project=False,
|
||||
preset_name=None,
|
||||
con=None
|
||||
):
|
||||
"""Create project using OpenPype settings.
|
||||
|
||||
This project creation function is not validating project document on
|
||||
creation. It is because project document is created blindly with only
|
||||
minimum required information about project which is it's name, code, type
|
||||
and schema.
|
||||
|
||||
Entered project name must be unique and project must not exist yet.
|
||||
|
||||
Note:
|
||||
This function is here to be OP v4 ready but in v3 has more logic
|
||||
to do. That's why inner imports are in the body.
|
||||
|
||||
Args:
|
||||
project_name (str): New project name. Should be unique.
|
||||
project_code (str): Project's code should be unique too.
|
||||
library_project (bool): Project is library project.
|
||||
preset_name (str): Name of anatomy preset. Default is used if not
|
||||
passed.
|
||||
con (ServerAPI): Connection to server with logged user.
|
||||
|
||||
Raises:
|
||||
ValueError: When project name already exists in MongoDB.
|
||||
|
||||
Returns:
|
||||
dict: Created project document.
|
||||
"""
|
||||
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
return con.create_project(
|
||||
project_name,
|
||||
project_code,
|
||||
library_project,
|
||||
preset_name
|
||||
)
|
||||
|
||||
|
||||
def delete_project(project_name, con=None):
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
return con.delete_project(project_name)
|
||||
|
||||
|
||||
def create_thumbnail(project_name, src_filepath, thumbnail_id=None, con=None):
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.create_thumbnail(project_name, src_filepath, thumbnail_id)
|
||||
289
client/ayon_core/client/operations_base.py
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
import uuid
|
||||
import copy
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
import six
|
||||
|
||||
REMOVED_VALUE = object()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractOperation(object):
|
||||
"""Base operation class.
|
||||
|
||||
Operation represent a call into database. The call can create, change or
|
||||
remove data.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type):
|
||||
self._project_name = project_name
|
||||
self._entity_type = entity_type
|
||||
self._id = str(uuid.uuid4())
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self._project_name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Identifier of operation."""
|
||||
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def entity_type(self):
|
||||
return self._entity_type
|
||||
|
||||
@abstractproperty
|
||||
def operation_name(self):
|
||||
"""Stringified type of operation."""
|
||||
|
||||
pass
|
||||
|
||||
def to_data(self):
|
||||
"""Convert operation to data that can be converted to json or others.
|
||||
|
||||
Warning:
|
||||
Current state returns ObjectId objects which cannot be parsed by
|
||||
json.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Description of operation.
|
||||
"""
|
||||
|
||||
return {
|
||||
"id": self._id,
|
||||
"entity_type": self.entity_type,
|
||||
"project_name": self.project_name,
|
||||
"operation": self.operation_name
|
||||
}
|
||||
|
||||
|
||||
class CreateOperation(AbstractOperation):
|
||||
"""Operation to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
operation_name = "create"
|
||||
|
||||
def __init__(self, project_name, entity_type, data):
|
||||
super(CreateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
else:
|
||||
data = copy.deepcopy(dict(data))
|
||||
self._data = data
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set_value(key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def set_value(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.data.get(key, *args, **kwargs)
|
||||
|
||||
@abstractproperty
|
||||
def entity_id(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
def to_data(self):
|
||||
output = super(CreateOperation, self).to_data()
|
||||
output["data"] = copy.deepcopy(self.data)
|
||||
return output
|
||||
|
||||
|
||||
class UpdateOperation(AbstractOperation):
|
||||
"""Operation to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
operation_name = "update"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, update_data):
|
||||
super(UpdateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = entity_id
|
||||
self._update_data = update_data
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def update_data(self):
|
||||
return self._update_data
|
||||
|
||||
def to_data(self):
|
||||
changes = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
changes[key] = value
|
||||
|
||||
output = super(UpdateOperation, self).to_data()
|
||||
output.update({
|
||||
"entity_id": self.entity_id,
|
||||
"changes": changes
|
||||
})
|
||||
return output
|
||||
|
||||
|
||||
class DeleteOperation(AbstractOperation):
|
||||
"""Operation to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
operation_name = "delete"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id):
|
||||
super(DeleteOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = entity_id
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
def to_data(self):
|
||||
output = super(DeleteOperation, self).to_data()
|
||||
output["entity_id"] = self.entity_id
|
||||
return output
|
||||
|
||||
|
||||
class BaseOperationsSession(object):
|
||||
"""Session storing operations that should happen in an order.
|
||||
|
||||
At this moment does not handle anything special can be considered as
|
||||
stupid list of operations that will happen after each other. If creation
|
||||
of same entity is there multiple times it's handled in any way and document
|
||||
values are not validated.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._operations = []
|
||||
|
||||
def __len__(self):
|
||||
return len(self._operations)
|
||||
|
||||
def add(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
if not isinstance(
|
||||
operation,
|
||||
(CreateOperation, UpdateOperation, DeleteOperation)
|
||||
):
|
||||
raise TypeError("Expected Operation object got {}".format(
|
||||
str(type(operation))
|
||||
))
|
||||
|
||||
self._operations.append(operation)
|
||||
|
||||
def append(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
|
||||
self.add(operation)
|
||||
|
||||
def extend(self, operations):
|
||||
"""Add operations to be processed.
|
||||
|
||||
Args:
|
||||
operations (List[BaseOperation]): Operations that should be
|
||||
processed.
|
||||
"""
|
||||
|
||||
for operation in operations:
|
||||
self.add(operation)
|
||||
|
||||
def remove(self, operation):
|
||||
"""Remove operation."""
|
||||
|
||||
self._operations.remove(operation)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all registered operations."""
|
||||
|
||||
self._operations = []
|
||||
|
||||
def to_data(self):
|
||||
return [
|
||||
operation.to_data()
|
||||
for operation in self._operations
|
||||
]
|
||||
|
||||
@abstractmethod
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
pass
|
||||
|
||||
def create_entity(self, project_name, entity_type, data):
|
||||
"""Fast access to 'CreateOperation'.
|
||||
|
||||
Returns:
|
||||
CreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = CreateOperation(project_name, entity_type, data)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def update_entity(self, project_name, entity_type, entity_id, update_data):
|
||||
"""Fast access to 'UpdateOperation'.
|
||||
|
||||
Returns:
|
||||
UpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = UpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data
|
||||
)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def delete_entity(self, project_name, entity_type, entity_id):
|
||||
"""Fast access to 'DeleteOperation'.
|
||||
|
||||
Returns:
|
||||
DeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = DeleteOperation(project_name, entity_type, entity_id)
|
||||
self.add(operation)
|
||||
return operation
|
||||
229
client/ayon_core/client/thumbnails.py
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
"""Cache of thumbnails downloaded from AYON server.
|
||||
|
||||
Thumbnails are cached to appdirs to predefined directory.
|
||||
|
||||
This should be moved to thumbnails logic in pipeline but because it would
|
||||
overflow OpenPype logic it's here for now.
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import collections
|
||||
|
||||
import appdirs
|
||||
|
||||
FileInfo = collections.namedtuple(
|
||||
"FileInfo",
|
||||
("path", "size", "modification_time")
|
||||
)
|
||||
|
||||
|
||||
class AYONThumbnailCache:
|
||||
"""Cache of thumbnails on local storage.
|
||||
|
||||
Thumbnails are cached to appdirs to predefined directory. Each project has
|
||||
own subfolder with thumbnails -> that's because each project has own
|
||||
thumbnail id validation and file names are thumbnail ids with matching
|
||||
extension. Extensions are predefined (.png and .jpeg).
|
||||
|
||||
Cache has cleanup mechanism which is triggered on initialized by default.
|
||||
|
||||
The cleanup has 2 levels:
|
||||
1. soft cleanup which remove all files that are older then 'days_alive'
|
||||
2. max size cleanup which remove all files until the thumbnails folder
|
||||
contains less then 'max_filesize'
|
||||
- this is time consuming so it's not triggered automatically
|
||||
|
||||
Args:
|
||||
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
|
||||
"""
|
||||
|
||||
# Lifetime of thumbnails (in seconds)
|
||||
# - default 3 days
|
||||
days_alive = 3
|
||||
# Max size of thumbnail directory (in bytes)
|
||||
# - default 2 Gb
|
||||
max_filesize = 2 * 1024 * 1024 * 1024
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._thumbnails_dir = None
|
||||
self._days_alive_secs = self.days_alive * 24 * 60 * 60
|
||||
if cleanup:
|
||||
self.cleanup()
|
||||
|
||||
def get_thumbnails_dir(self):
|
||||
"""Root directory where thumbnails are stored.
|
||||
|
||||
Returns:
|
||||
str: Path to thumbnails root.
|
||||
"""
|
||||
|
||||
if self._thumbnails_dir is None:
|
||||
# TODO use generic function
|
||||
directory = appdirs.user_data_dir("AYON", "Ynput")
|
||||
self._thumbnails_dir = os.path.join(directory, "thumbnails")
|
||||
return self._thumbnails_dir
|
||||
|
||||
thumbnails_dir = property(get_thumbnails_dir)
|
||||
|
||||
def get_thumbnails_dir_file_info(self):
|
||||
"""Get information about all files in thumbnails directory.
|
||||
|
||||
Returns:
|
||||
List[FileInfo]: List of file information about all files.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.thumbnails_dir
|
||||
files_info = []
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return files_info
|
||||
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
files_info.append(FileInfo(
|
||||
path, os.path.getsize(path), os.path.getmtime(path)
|
||||
))
|
||||
return files_info
|
||||
|
||||
def get_thumbnails_dir_size(self, files_info=None):
|
||||
"""Got full size of thumbnail directory.
|
||||
|
||||
Args:
|
||||
files_info (List[FileInfo]): Prepared file information about
|
||||
files in thumbnail directory.
|
||||
|
||||
Returns:
|
||||
int: File size of all files in thumbnail directory.
|
||||
"""
|
||||
|
||||
if files_info is None:
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
|
||||
if not files_info:
|
||||
return 0
|
||||
|
||||
return sum(
|
||||
file_info.size
|
||||
for file_info in files_info
|
||||
)
|
||||
|
||||
def cleanup(self, check_max_size=False):
|
||||
"""Cleanup thumbnails directory.
|
||||
|
||||
Args:
|
||||
check_max_size (bool): Also cleanup files to match max size of
|
||||
thumbnails directory.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.get_thumbnails_dir()
|
||||
# Skip if thumbnails dir does not exists yet
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return
|
||||
|
||||
self._soft_cleanup(thumbnails_dir)
|
||||
if check_max_size:
|
||||
self._max_size_cleanup(thumbnails_dir)
|
||||
|
||||
def _soft_cleanup(self, thumbnails_dir):
|
||||
current_time = time.time()
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
modification_time = os.path.getmtime(path)
|
||||
if current_time - modification_time > self._days_alive_secs:
|
||||
os.remove(path)
|
||||
|
||||
def _max_size_cleanup(self, thumbnails_dir):
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
size = self.get_thumbnails_dir_size(files_info)
|
||||
if size < self.max_filesize:
|
||||
return
|
||||
|
||||
sorted_file_info = collections.deque(
|
||||
sorted(files_info, key=lambda item: item.modification_time)
|
||||
)
|
||||
diff = size - self.max_filesize
|
||||
while diff > 0:
|
||||
if not sorted_file_info:
|
||||
break
|
||||
|
||||
file_info = sorted_file_info.popleft()
|
||||
diff -= file_info.size
|
||||
os.remove(file_info.path)
|
||||
|
||||
def get_thumbnail_filepath(self, project_name, thumbnail_id):
|
||||
"""Get thumbnail by thumbnail id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
is not cached yet.
|
||||
"""
|
||||
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
for ext in (
|
||||
".png",
|
||||
".jpeg",
|
||||
):
|
||||
filepath = os.path.join(
|
||||
self.thumbnails_dir, project_name, thumbnail_id + ext
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return filepath
|
||||
return None
|
||||
|
||||
def get_project_dir(self, project_name):
|
||||
"""Path to root directory for specific project.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which root directory path
|
||||
should be returned.
|
||||
|
||||
Returns:
|
||||
str: Path to root of project's thumbnails.
|
||||
"""
|
||||
|
||||
return os.path.join(self.thumbnails_dir, project_name)
|
||||
|
||||
def make_sure_project_dir_exists(self, project_name):
|
||||
project_dir = self.get_project_dir(project_name)
|
||||
if not os.path.exists(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
return project_dir
|
||||
|
||||
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
|
||||
"""Store thumbnail to cache folder.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where the thumbnail belong to.
|
||||
thumbnail_id (str): Id of thumbnail.
|
||||
content (bytes): Byte content of thumbnail file.
|
||||
mime_data (str): Type of content.
|
||||
|
||||
Returns:
|
||||
str: Path to cached thumbnail image file.
|
||||
"""
|
||||
|
||||
if mime_type == "image/png":
|
||||
ext = ".png"
|
||||
elif mime_type == "image/jpeg":
|
||||
ext = ".jpeg"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
|
||||
|
||||
project_dir = self.make_sure_project_dir_exists(project_name)
|
||||
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
|
||||
with open(thumbnail_path, "wb") as stream:
|
||||
stream.write(content)
|
||||
|
||||
current_time = time.time()
|
||||
os.utime(thumbnail_path, (current_time, current_time))
|
||||
|
||||
return thumbnail_path
|
||||
134
client/ayon_core/client/utils.py
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
import os
|
||||
import uuid
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.client.operations_base import REMOVED_VALUE
|
||||
|
||||
|
||||
class _GlobalCache:
|
||||
initialized = False
|
||||
|
||||
|
||||
def get_ayon_server_api_connection():
|
||||
if _GlobalCache.initialized:
|
||||
con = ayon_api.get_server_api_connection()
|
||||
else:
|
||||
from ayon_core.lib.local_settings import get_local_site_id
|
||||
|
||||
_GlobalCache.initialized = True
|
||||
site_id = get_local_site_id()
|
||||
version = os.getenv("AYON_VERSION")
|
||||
if ayon_api.is_connection_created():
|
||||
con = ayon_api.get_server_api_connection()
|
||||
con.set_site_id(site_id)
|
||||
con.set_client_version(version)
|
||||
else:
|
||||
con = ayon_api.create_connection(site_id, version)
|
||||
return con
|
||||
|
||||
|
||||
def create_entity_id():
|
||||
return uuid.uuid1().hex
|
||||
|
||||
|
||||
def prepare_attribute_changes(old_entity, new_entity, replace=False):
|
||||
"""Prepare changes of attributes on entities.
|
||||
|
||||
Compare 'attrib' of old and new entity data to prepare only changed
|
||||
values that should be sent to server for update.
|
||||
|
||||
Example:
|
||||
>>> # Limited entity data to 'attrib'
|
||||
>>> old_entity = {
|
||||
... "attrib": {"attr_1": 1, "attr_2": "MyString", "attr_3": True}
|
||||
... }
|
||||
>>> new_entity = {
|
||||
... "attrib": {"attr_1": 2, "attr_3": True, "attr_4": 3}
|
||||
... }
|
||||
>>> # Changes if replacement should not happen
|
||||
>>> expected_changes = {
|
||||
... "attr_1": 2,
|
||||
... "attr_4": 3
|
||||
... }
|
||||
>>> changes = prepare_attribute_changes(old_entity, new_entity)
|
||||
>>> changes == expected_changes
|
||||
True
|
||||
|
||||
>>> # Changes if replacement should happen
|
||||
>>> expected_changes_replace = {
|
||||
... "attr_1": 2,
|
||||
... "attr_2": REMOVED_VALUE,
|
||||
... "attr_4": 3
|
||||
... }
|
||||
>>> changes_replace = prepare_attribute_changes(
|
||||
... old_entity, new_entity, True)
|
||||
>>> changes_replace == expected_changes_replace
|
||||
True
|
||||
|
||||
Args:
|
||||
old_entity (dict[str, Any]): Data of entity queried from server.
|
||||
new_entity (dict[str, Any]): Entity data with applied changes.
|
||||
replace (bool): New entity should fully replace all old entity values.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Values from new entity only if value has changed.
|
||||
"""
|
||||
|
||||
attrib_changes = {}
|
||||
new_attrib = new_entity.get("attrib")
|
||||
old_attrib = old_entity.get("attrib")
|
||||
if new_attrib is None:
|
||||
if not replace:
|
||||
return attrib_changes
|
||||
new_attrib = {}
|
||||
|
||||
if old_attrib is None:
|
||||
return new_attrib
|
||||
|
||||
for attr, new_attr_value in new_attrib.items():
|
||||
old_attr_value = old_attrib.get(attr)
|
||||
if old_attr_value != new_attr_value:
|
||||
attrib_changes[attr] = new_attr_value
|
||||
|
||||
if replace:
|
||||
for attr in old_attrib:
|
||||
if attr not in new_attrib:
|
||||
attrib_changes[attr] = REMOVED_VALUE
|
||||
|
||||
return attrib_changes
|
||||
|
||||
|
||||
def prepare_entity_changes(old_entity, new_entity, replace=False):
|
||||
"""Prepare changes of AYON entities.
|
||||
|
||||
Compare old and new entity to filter values from new data that changed.
|
||||
|
||||
Args:
|
||||
old_entity (dict[str, Any]): Data of entity queried from server.
|
||||
new_entity (dict[str, Any]): Entity data with applied changes.
|
||||
replace (bool): All attributes should be replaced by new values. So
|
||||
all attribute values that are not on new entity will be removed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Only values from new entity that changed.
|
||||
"""
|
||||
|
||||
changes = {}
|
||||
for key, new_value in new_entity.items():
|
||||
if key == "attrib":
|
||||
continue
|
||||
|
||||
old_value = old_entity.get(key)
|
||||
if old_value != new_value:
|
||||
changes[key] = new_value
|
||||
|
||||
if replace:
|
||||
for key in old_entity:
|
||||
if key not in new_entity:
|
||||
changes[key] = REMOVED_VALUE
|
||||
|
||||
attr_changes = prepare_attribute_changes(old_entity, new_entity, replace)
|
||||
if attr_changes:
|
||||
changes["attrib"] = attr_changes
|
||||
return changes
|
||||
49
client/ayon_core/hooks/pre_add_last_workfile_arg.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
"""Add last workfile path to launch arguments.
|
||||
|
||||
This is not possible to do for all applications the same way.
|
||||
Checks 'start_last_workfile', if set to False, it will not open last
|
||||
workfile. This property is set explicitly in Launcher.
|
||||
"""
|
||||
|
||||
# Execute after workfile template copy
|
||||
order = 10
|
||||
app_groups = {
|
||||
"3dsmax", "adsk_3dsmax",
|
||||
"maya",
|
||||
"nuke",
|
||||
"nukex",
|
||||
"hiero",
|
||||
"houdini",
|
||||
"nukestudio",
|
||||
"fusion",
|
||||
"blender",
|
||||
"photoshop",
|
||||
"tvpaint",
|
||||
"substancepainter",
|
||||
"aftereffects",
|
||||
"wrap"
|
||||
}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.data.get("start_last_workfile"):
|
||||
self.log.info("It is set to not start last workfile on start.")
|
||||
return
|
||||
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if not last_workfile:
|
||||
self.log.warning("Last workfile was not collected.")
|
||||
return
|
||||
|
||||
if not os.path.exists(last_workfile):
|
||||
self.log.info("Current context does not have any workfile yet.")
|
||||
return
|
||||
|
||||
# Add path to workfile to arguments
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
113
client/ayon_core/hooks/pre_copy_template_workfile.py
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
import os
|
||||
import shutil
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.pipeline.workfile import (
|
||||
get_custom_workfile_template,
|
||||
get_custom_workfile_template_by_string_context
|
||||
)
|
||||
|
||||
|
||||
class CopyTemplateWorkfile(PreLaunchHook):
|
||||
"""Copy workfile template.
|
||||
|
||||
This is not possible to do for all applications the same way.
|
||||
|
||||
Prelaunch hook works only if last workfile leads to not existing file.
|
||||
- That is possible only if it's first version.
|
||||
"""
|
||||
|
||||
# Before `AddLastWorkfileToLaunchArgs`
|
||||
order = 0
|
||||
app_groups = {"blender", "photoshop", "tvpaint", "aftereffects",
|
||||
"wrap"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
"""Check if can copy template for context and do it if possible.
|
||||
|
||||
First check if host for current project should create first workfile.
|
||||
Second check is if template is reachable and can be copied.
|
||||
|
||||
Args:
|
||||
last_workfile(str): Path where template will be copied.
|
||||
|
||||
Returns:
|
||||
None: This is a void method.
|
||||
"""
|
||||
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if not last_workfile:
|
||||
self.log.warning((
|
||||
"Last workfile was not collected."
|
||||
" Can't add it to launch arguments or determine if should"
|
||||
" copy template."
|
||||
))
|
||||
return
|
||||
|
||||
if os.path.exists(last_workfile):
|
||||
self.log.debug("Last workfile exists. Skipping {} process.".format(
|
||||
self.__class__.__name__
|
||||
))
|
||||
return
|
||||
|
||||
self.log.info("Last workfile does not exist.")
|
||||
|
||||
project_name = self.data["project_name"]
|
||||
asset_name = self.data["asset_name"]
|
||||
task_name = self.data["task_name"]
|
||||
host_name = self.application.host_name
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
project_doc = self.data.get("project_doc")
|
||||
asset_doc = self.data.get("asset_doc")
|
||||
anatomy = self.data.get("anatomy")
|
||||
if project_doc and asset_doc:
|
||||
self.log.debug("Started filtering of custom template paths.")
|
||||
template_path = get_custom_workfile_template(
|
||||
project_doc,
|
||||
asset_doc,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
project_settings
|
||||
)
|
||||
|
||||
else:
|
||||
self.log.warning((
|
||||
"Global data collection probably did not execute."
|
||||
" Using backup solution."
|
||||
))
|
||||
template_path = get_custom_workfile_template_by_string_context(
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
project_settings
|
||||
)
|
||||
|
||||
if not template_path:
|
||||
self.log.info(
|
||||
"Registered custom templates didn't match current context."
|
||||
)
|
||||
return
|
||||
|
||||
if not os.path.exists(template_path):
|
||||
self.log.warning(
|
||||
"Couldn't find workfile template file \"{}\"".format(
|
||||
template_path
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.log.info(
|
||||
f"Creating workfile from template: \"{template_path}\""
|
||||
)
|
||||
|
||||
# Copy template workfile to new destination
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(last_workfile)
|
||||
)
|
||||
35
client/ayon_core/hooks/pre_create_extra_workdir_folders.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.pipeline.workfile import create_workdir_extra_folders
|
||||
|
||||
|
||||
class CreateWorkdirExtraFolders(PreLaunchHook):
|
||||
"""Create extra folders for the work directory.
|
||||
|
||||
Based on setting `project_settings/global/tools/Workfiles/extra_folders`
|
||||
profile filtering will decide whether extra folders need to be created in
|
||||
the work directory.
|
||||
|
||||
"""
|
||||
|
||||
# Execute after workfile template copy
|
||||
order = 15
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
if not self.application.is_host:
|
||||
return
|
||||
|
||||
env = self.data.get("env") or {}
|
||||
workdir = env.get("AVALON_WORKDIR")
|
||||
if not workdir or not os.path.exists(workdir):
|
||||
return
|
||||
|
||||
host_name = self.application.host_name
|
||||
task_type = self.data["task_type"]
|
||||
task_name = self.data["task_name"]
|
||||
project_name = self.data["project_name"]
|
||||
|
||||
create_workdir_extra_folders(
|
||||
workdir, host_name, task_type, task_name, project_name,
|
||||
)
|
||||
77
client/ayon_core/hooks/pre_global_host_data.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from ayon_core.client import get_project, get_asset_by_name
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
from ayon_core.pipeline import Anatomy
|
||||
|
||||
|
||||
class GlobalHostDataHook(PreLaunchHook):
|
||||
order = -100
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
self.prepare_global_data()
|
||||
|
||||
if not self.data.get("asset_doc"):
|
||||
return
|
||||
|
||||
app = self.launch_context.application
|
||||
temp_data = EnvironmentPrepData({
|
||||
"project_name": self.data["project_name"],
|
||||
"asset_name": self.data["asset_name"],
|
||||
"task_name": self.data["task_name"],
|
||||
|
||||
"app": app,
|
||||
|
||||
"project_doc": self.data["project_doc"],
|
||||
"asset_doc": self.data["asset_doc"],
|
||||
|
||||
"anatomy": self.data["anatomy"],
|
||||
|
||||
"env": self.launch_context.env,
|
||||
|
||||
"start_last_workfile": self.data.get("start_last_workfile"),
|
||||
"last_workfile_path": self.data.get("last_workfile_path"),
|
||||
|
||||
"log": self.log
|
||||
})
|
||||
|
||||
prepare_app_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_context_environments(temp_data)
|
||||
|
||||
temp_data.pop("log")
|
||||
|
||||
self.data.update(temp_data)
|
||||
|
||||
def prepare_global_data(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
# Mongo documents
|
||||
project_name = self.data.get("project_name")
|
||||
if not project_name:
|
||||
self.log.info(
|
||||
"Skipping global data preparation."
|
||||
" Key `project_name` was not found in launch context."
|
||||
)
|
||||
return
|
||||
|
||||
self.log.debug("Project name is set to \"{}\"".format(project_name))
|
||||
# Anatomy
|
||||
self.data["anatomy"] = Anatomy(project_name)
|
||||
|
||||
# Project document
|
||||
project_doc = get_project(project_name)
|
||||
self.data["project_doc"] = project_doc
|
||||
|
||||
asset_name = self.data.get("asset_name")
|
||||
if not asset_name:
|
||||
self.log.warning(
|
||||
"Asset name was not set. Skipping asset document query."
|
||||
)
|
||||
return
|
||||
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
self.data["asset_doc"] = asset_doc
|
||||
35
client/ayon_core/hooks/pre_mac_launch.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchWithTerminal(PreLaunchHook):
|
||||
"""Mac specific pre arguments for application.
|
||||
|
||||
Mac applications should be launched using "open" argument which is internal
|
||||
callbacks to open executable. We also add argument "-a" to tell it's
|
||||
application open. This is used only for executables ending with ".app". It
|
||||
is expected that these executables lead to app packages.
|
||||
"""
|
||||
order = 1000
|
||||
|
||||
platforms = {"darwin"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
executable = str(self.launch_context.executable)
|
||||
# Skip executables not ending with ".app" or that are not folder
|
||||
if not executable.endswith(".app") or not os.path.isdir(executable):
|
||||
return
|
||||
|
||||
# Check if first argument match executable path
|
||||
# - Few applications are not executed directly but through AYON
|
||||
# launcher process (Photoshop, AfterEffects, Harmony, ...).
|
||||
# These should not use `open`.
|
||||
if self.launch_context.launch_args[0] != executable:
|
||||
return
|
||||
|
||||
# Tell `open` to pass arguments if there are any
|
||||
if len(self.launch_context.launch_args) > 1:
|
||||
self.launch_context.launch_args.insert(1, "--args")
|
||||
# Prepend open arguments
|
||||
self.launch_context.launch_args.insert(0, ["open", "-na"])
|
||||
31
client/ayon_core/hooks/pre_new_console_apps.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import subprocess
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchNewConsoleApps(PreLaunchHook):
|
||||
"""Foundry applications have specific way how to launch them.
|
||||
|
||||
Nuke is executed "like" python process so it is required to pass
|
||||
`CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console.
|
||||
At the same time the newly created console won't create its own stdout
|
||||
and stderr handlers so they should not be redirected to DEVNULL.
|
||||
"""
|
||||
|
||||
# Should be as last hook because must change launch arguments to string
|
||||
order = 1000
|
||||
app_groups = {
|
||||
"nuke", "nukeassist", "nukex", "hiero", "nukestudio", "mayapy"
|
||||
}
|
||||
platforms = {"windows"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Change `creationflags` to CREATE_NEW_CONSOLE
|
||||
# - on Windows some apps will create new window using its console
|
||||
# Set `stdout` and `stderr` to None so new created console does not
|
||||
# have redirected output to DEVNULL in build
|
||||
self.launch_context.kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE,
|
||||
"stdout": None,
|
||||
"stderr": None
|
||||
})
|
||||
58
client/ayon_core/hooks/pre_non_python_host_launch.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.applications import (
|
||||
get_non_python_host_kwargs,
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
|
||||
|
||||
class NonPythonHostHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Non python host implementation do not launch host directly but use
|
||||
python script which launch the host. For these cases it is necessary to
|
||||
prepend python (or ayon) executable and script path before application's.
|
||||
"""
|
||||
app_groups = {"harmony", "photoshop", "aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = os.path.join(
|
||||
AYON_CORE_ROOT,
|
||||
"scripts",
|
||||
"non_python_host_launch.py"
|
||||
)
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these areguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = \
|
||||
get_non_python_host_kwargs(self.launch_context.kwargs)
|
||||
56
client/ayon_core/hooks/pre_ocio_hook.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook
|
||||
|
||||
from ayon_core.pipeline.colorspace import get_imageio_config
|
||||
from ayon_core.pipeline.template_data import get_template_data_with_names
|
||||
|
||||
|
||||
class OCIOEnvHook(PreLaunchHook):
|
||||
"""Set OCIO environment variable for hosts that use OpenColorIO."""
|
||||
|
||||
order = 0
|
||||
hosts = {
|
||||
"substancepainter",
|
||||
"fusion",
|
||||
"blender",
|
||||
"aftereffects",
|
||||
"3dsmax",
|
||||
"houdini",
|
||||
"maya",
|
||||
"nuke",
|
||||
"hiero",
|
||||
"resolve",
|
||||
}
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
||||
template_data = get_template_data_with_names(
|
||||
project_name=self.data["project_name"],
|
||||
asset_name=self.data["asset_name"],
|
||||
task_name=self.data["task_name"],
|
||||
host_name=self.host_name,
|
||||
system_settings=self.data["system_settings"]
|
||||
)
|
||||
|
||||
config_data = get_imageio_config(
|
||||
project_name=self.data["project_name"],
|
||||
host_name=self.host_name,
|
||||
project_settings=self.data["project_settings"],
|
||||
anatomy_data=template_data,
|
||||
anatomy=self.data["anatomy"],
|
||||
env=self.launch_context.env,
|
||||
)
|
||||
|
||||
if config_data:
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
self.launch_context.env["OCIO"] = ocio_path
|
||||
else:
|
||||
self.log.debug("OCIO not set or enabled")
|
||||
24
client/ayon_core/host/__init__.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from .host import (
|
||||
HostBase,
|
||||
)
|
||||
|
||||
from .interfaces import (
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost,
|
||||
INewPublisher,
|
||||
)
|
||||
|
||||
from .dirmap import HostDirmap
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HostBase",
|
||||
|
||||
"IWorkfileHost",
|
||||
"ILoadHost",
|
||||
"IPublishHost",
|
||||
"INewPublisher",
|
||||
|
||||
"HostDirmap",
|
||||
)
|
||||
222
client/ayon_core/host/dirmap.py
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
"""Dirmap functionality used in host integrations inside DCCs.
|
||||
|
||||
Idea for current dirmap implementation was used from Maya where is possible to
|
||||
enter source and destination roots and maya will try each found source
|
||||
in referenced file replace with each destination paths. First path which
|
||||
exists is used.
|
||||
"""
|
||||
|
||||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import platform
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.settings.lib import get_site_local_overrides
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class HostDirmap(object):
|
||||
"""Abstract class for running dirmap on a workfile in a host.
|
||||
|
||||
Dirmap is used to translate paths inside of host workfile from one
|
||||
OS to another. (Eg. arstist created workfile on Win, different artists
|
||||
opens same file on Linux.)
|
||||
|
||||
Expects methods to be implemented inside of host:
|
||||
on_dirmap_enabled: run host code for enabling dirmap
|
||||
do_dirmap: run host code to do actual remapping
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host_name,
|
||||
project_name,
|
||||
project_settings=None,
|
||||
sync_module=None
|
||||
):
|
||||
self.host_name = host_name
|
||||
self.project_name = project_name
|
||||
self._project_settings = project_settings
|
||||
self._sync_module = sync_module
|
||||
# to limit reinit of Modules
|
||||
self._sync_module_discovered = sync_module is not None
|
||||
self._log = None
|
||||
|
||||
@property
|
||||
def sync_module(self):
|
||||
if not self._sync_module_discovered:
|
||||
self._sync_module_discovered = True
|
||||
manager = AddonsManager()
|
||||
self._sync_module = manager.get("sync_server")
|
||||
return self._sync_module
|
||||
|
||||
@property
|
||||
def project_settings(self):
|
||||
if self._project_settings is None:
|
||||
self._project_settings = get_project_settings(self.project_name)
|
||||
return self._project_settings
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@abstractmethod
|
||||
def on_enable_dirmap(self):
|
||||
"""Run host dependent operation for enabling dirmap if necessary."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def dirmap_routine(self, source_path, destination_path):
|
||||
"""Run host dependent remapping from source_path to destination_path"""
|
||||
pass
|
||||
|
||||
def process_dirmap(self, mapping=None):
|
||||
# type: (dict) -> None
|
||||
"""Go through all paths in Settings and set them using `dirmap`.
|
||||
|
||||
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||
Local Settings when artist is syncing workfile locally.
|
||||
|
||||
"""
|
||||
|
||||
if not mapping:
|
||||
mapping = self.get_mappings()
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
self.on_enable_dirmap()
|
||||
|
||||
for k, sp in enumerate(mapping["source-path"]):
|
||||
dst = mapping["destination-path"][k]
|
||||
try:
|
||||
# add trailing slash if missing
|
||||
sp = os.path.join(sp, '')
|
||||
dst = os.path.join(dst, '')
|
||||
print("{} -> {}".format(sp, dst))
|
||||
self.dirmap_routine(sp, dst)
|
||||
except IndexError:
|
||||
# missing corresponding destination path
|
||||
self.log.error((
|
||||
"invalid dirmap mapping, missing corresponding"
|
||||
" destination directory."
|
||||
))
|
||||
break
|
||||
except RuntimeError:
|
||||
self.log.error(
|
||||
"invalid path {} -> {}, mapping not registered".format(
|
||||
sp, dst
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
def get_mappings(self):
|
||||
"""Get translation from source-path to destination-path.
|
||||
|
||||
It checks if Site Sync is enabled and user chose to use local
|
||||
site, in that case configuration in Local Settings takes precedence
|
||||
"""
|
||||
|
||||
dirmap_label = "{}-dirmap".format(self.host_name)
|
||||
mapping_sett = self.project_settings[self.host_name].get(dirmap_label,
|
||||
{})
|
||||
local_mapping = self._get_local_sync_dirmap()
|
||||
mapping_enabled = mapping_sett.get("enabled") or bool(local_mapping)
|
||||
if not mapping_enabled:
|
||||
return {}
|
||||
|
||||
mapping = (
|
||||
local_mapping
|
||||
or mapping_sett["paths"]
|
||||
or {}
|
||||
)
|
||||
|
||||
if (
|
||||
not mapping
|
||||
or not mapping.get("destination-path")
|
||||
or not mapping.get("source-path")
|
||||
):
|
||||
return {}
|
||||
self.log.info("Processing directory mapping ...")
|
||||
self.log.info("mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
||||
def _get_local_sync_dirmap(self):
|
||||
"""
|
||||
Returns dirmap if synch to local project is enabled.
|
||||
|
||||
Only valid mapping is from roots of remote site to local site set
|
||||
in Local Settings.
|
||||
|
||||
Returns:
|
||||
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||
"""
|
||||
project_name = self.project_name
|
||||
|
||||
sync_module = self.sync_module
|
||||
mapping = {}
|
||||
if (
|
||||
sync_module is None
|
||||
or not sync_module.enabled
|
||||
or project_name not in sync_module.get_enabled_projects()
|
||||
):
|
||||
return mapping
|
||||
|
||||
active_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_active_site(project_name))
|
||||
remote_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_remote_site(project_name))
|
||||
self.log.debug(
|
||||
"active {} - remote {}".format(active_site, remote_site)
|
||||
)
|
||||
|
||||
if active_site == "local" and active_site != remote_site:
|
||||
sync_settings = sync_module.get_sync_project_setting(
|
||||
project_name,
|
||||
exclude_locals=False,
|
||||
cached=False)
|
||||
|
||||
active_overrides = get_site_local_overrides(
|
||||
project_name, active_site)
|
||||
remote_overrides = get_site_local_overrides(
|
||||
project_name, remote_site)
|
||||
|
||||
self.log.debug("local overrides {}".format(active_overrides))
|
||||
self.log.debug("remote overrides {}".format(remote_overrides))
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
remote_provider = sync_module.get_provider_for_site(
|
||||
project_name, remote_site
|
||||
)
|
||||
# dirmap has sense only with regular disk provider, in the workfile
|
||||
# won't be root on cloud or sftp provider
|
||||
if remote_provider != "local_drive":
|
||||
remote_site = "studio"
|
||||
for root_name, active_site_dir in active_overrides.items():
|
||||
remote_site_dir = (
|
||||
remote_overrides.get(root_name)
|
||||
or sync_settings["sites"][remote_site]["root"][root_name]
|
||||
)
|
||||
|
||||
if isinstance(remote_site_dir, dict):
|
||||
remote_site_dir = remote_site_dir.get(current_platform)
|
||||
|
||||
if not remote_site_dir:
|
||||
continue
|
||||
|
||||
if os.path.isdir(active_site_dir):
|
||||
if "destination-path" not in mapping:
|
||||
mapping["destination-path"] = []
|
||||
mapping["destination-path"].append(active_site_dir)
|
||||
|
||||
if "source-path" not in mapping:
|
||||
mapping["source-path"] = []
|
||||
mapping["source-path"].append(remote_site_dir)
|
||||
|
||||
self.log.debug("local sync mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
194
client/ayon_core/host/host.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
import os
|
||||
import logging
|
||||
import contextlib
|
||||
from abc import ABCMeta, abstractproperty
|
||||
import six
|
||||
|
||||
# NOTE can't import 'typing' because of issues in Maya 2020
|
||||
# - shiboken crashes on 'typing' module import
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class HostBase(object):
|
||||
"""Base of host implementation class.
|
||||
|
||||
Host is pipeline implementation of DCC application. This class should help
|
||||
to identify what must/should/can be implemented for specific functionality.
|
||||
|
||||
Compared to 'avalon' concept:
|
||||
What was before considered as functions in host implementation folder. The
|
||||
host implementation should primarily care about adding ability of creation
|
||||
(mark subsets to be published) and optionally about referencing published
|
||||
representations as containers.
|
||||
|
||||
Host may need extend some functionality like working with workfiles
|
||||
or loading. Not all host implementations may allow that for those purposes
|
||||
can be logic extended with implementing functions for the purpose. There
|
||||
are prepared interfaces to be able identify what must be implemented to
|
||||
be able use that functionality.
|
||||
- current statement is that it is not required to inherit from interfaces
|
||||
but all of the methods are validated (only their existence!)
|
||||
|
||||
# Installation of host before (avalon concept):
|
||||
```python
|
||||
from ayon_core.pipeline import install_host
|
||||
import ayon_core.hosts.maya.api as host
|
||||
|
||||
install_host(host)
|
||||
```
|
||||
|
||||
# Installation of host now:
|
||||
```python
|
||||
from ayon_core.pipeline import install_host
|
||||
from ayon_core.hosts.maya.api import MayaHost
|
||||
|
||||
host = MayaHost()
|
||||
install_host(host)
|
||||
```
|
||||
|
||||
Todo:
|
||||
- move content of 'install_host' as method of this class
|
||||
- register host object
|
||||
- install legacy_io
|
||||
- install global plugin paths
|
||||
- store registered plugin paths to this object
|
||||
- handle current context (project, asset, task)
|
||||
- this must be done in many separated steps
|
||||
- have it's object of host tools instead of using globals
|
||||
|
||||
This implementation will probably change over time when more
|
||||
functionality and responsibility will be added.
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self):
|
||||
"""Initialization of host.
|
||||
|
||||
Register DCC callbacks, host specific plugin paths, targets etc.
|
||||
(Part of what 'install' did in 'avalon' concept.)
|
||||
|
||||
Note:
|
||||
At this moment global "installation" must happen before host
|
||||
installation. Because of this current limitation it is recommended
|
||||
to implement 'install' method which is triggered after global
|
||||
'install'.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def install(self):
|
||||
"""Install host specific functionality.
|
||||
|
||||
This is where should be added menu with tools, registered callbacks
|
||||
and other host integration initialization.
|
||||
|
||||
It is called automatically when 'ayon_core.pipeline.install_host' is
|
||||
triggered.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = logging.getLogger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
"""Host name."""
|
||||
|
||||
pass
|
||||
|
||||
def get_current_project_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current project name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_PROJECT")
|
||||
|
||||
def get_current_asset_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current asset name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_ASSET")
|
||||
|
||||
def get_current_task_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current task name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_TASK")
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
||||
This method should be used to get current context of host. Usage of
|
||||
this method can be crucial for host implementations in DCCs where
|
||||
can be opened multiple workfiles at one moment and change of context
|
||||
can't be caught properly.
|
||||
|
||||
Default implementation returns values from 'legacy_io.Session'.
|
||||
|
||||
Returns:
|
||||
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
|
||||
'asset_name' and 'task_name'. All of them can be 'None'.
|
||||
"""
|
||||
|
||||
return {
|
||||
"project_name": self.get_current_project_name(),
|
||||
"asset_name": self.get_current_asset_name(),
|
||||
"task_name": self.get_current_task_name()
|
||||
}
|
||||
|
||||
def get_context_title(self):
|
||||
"""Context title shown for UI purposes.
|
||||
|
||||
Should return current context title if possible.
|
||||
|
||||
Note:
|
||||
This method is used only for UI purposes so it is possible to
|
||||
return some logical title for contextless cases.
|
||||
Is not meant for "Context menu" label.
|
||||
|
||||
Returns:
|
||||
str: Context title.
|
||||
None: Default title is used based on UI implementation.
|
||||
"""
|
||||
|
||||
# Use current context to fill the context title
|
||||
current_context = self.get_current_context()
|
||||
project_name = current_context["project_name"]
|
||||
asset_name = current_context["asset_name"]
|
||||
task_name = current_context["task_name"]
|
||||
items = []
|
||||
if project_name:
|
||||
items.append(project_name)
|
||||
if asset_name:
|
||||
items.append(asset_name.lstrip("/"))
|
||||
if task_name:
|
||||
items.append(task_name)
|
||||
if items:
|
||||
return "/".join(items)
|
||||
return None
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
"""Some functionlity will happen but selection should stay same.
|
||||
|
||||
This is DCC specific. Some may not allow to implement this ability
|
||||
that is reason why default implementation is empty context manager.
|
||||
|
||||
Yields:
|
||||
None: Yield when is ready to restore selected at the end.
|
||||
"""
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
pass
|
||||
386
client/ayon_core/host/interfaces.py
Normal file
|
|
@ -0,0 +1,386 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
import six
|
||||
|
||||
|
||||
class MissingMethodsError(ValueError):
|
||||
"""Exception when host miss some required methods for specific workflow.
|
||||
|
||||
Args:
|
||||
host (HostBase): Host implementation where are missing methods.
|
||||
missing_methods (list[str]): List of missing methods.
|
||||
"""
|
||||
|
||||
def __init__(self, host, missing_methods):
|
||||
joined_missing = ", ".join(
|
||||
['"{}"'.format(item) for item in missing_methods]
|
||||
)
|
||||
host_name = getattr(host, "name", None)
|
||||
if not host_name:
|
||||
try:
|
||||
host_name = host.__file__.replace("\\", "/").split("/")[-3]
|
||||
except Exception:
|
||||
host_name = str(host)
|
||||
message = (
|
||||
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
|
||||
)
|
||||
super(MissingMethodsError, self).__init__(message)
|
||||
|
||||
|
||||
class ILoadHost:
|
||||
"""Implementation requirements to be able use reference of representations.
|
||||
|
||||
The load plugins can do referencing even without implementation of methods
|
||||
here, but switch and removement of containers would not be possible.
|
||||
|
||||
Questions:
|
||||
- Is list container dependency of host or load plugins?
|
||||
- Should this be directly in HostBase?
|
||||
- how to find out if referencing is available?
|
||||
- do we need to know that?
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_load_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
loading. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for loading workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, ILoadHost):
|
||||
return []
|
||||
|
||||
required = ["ls"]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_load_methods(host):
|
||||
"""Validate implemented methods of "old type" host for load workflow.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = ILoadHost.get_missing_load_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_containers(self):
|
||||
"""Retrieve referenced containers from scene.
|
||||
|
||||
This can be implemented in hosts where referencing can be used.
|
||||
|
||||
Todo:
|
||||
Rename function to something more self explanatory.
|
||||
Suggestion: 'get_containers'
|
||||
|
||||
Returns:
|
||||
list[dict]: Information about loaded containers.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def ls(self):
|
||||
"""Deprecated variant of 'get_containers'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.get_containers()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class IWorkfileHost:
|
||||
"""Implementation requirements to be able use workfile utils and tool."""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_workfile_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
workfiles. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for workfiles workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, IWorkfileHost):
|
||||
return []
|
||||
|
||||
required = [
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_workfile_methods(host):
|
||||
"""Validate methods of "old type" host for workfiles workflow.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
|
||||
missing = IWorkfileHost.get_missing_workfile_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_workfile_extensions(self):
|
||||
"""Extensions that can be used as save.
|
||||
|
||||
Questions:
|
||||
This could potentially use 'HostDefinition'.
|
||||
"""
|
||||
|
||||
return []
|
||||
|
||||
@abstractmethod
|
||||
def save_workfile(self, dst_path=None):
|
||||
"""Save currently opened scene.
|
||||
|
||||
Args:
|
||||
dst_path (str): Where the current scene should be saved. Or use
|
||||
current path if 'None' is passed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def open_workfile(self, filepath):
|
||||
"""Open passed filepath in the host.
|
||||
|
||||
Args:
|
||||
filepath (str): Path to workfile.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_current_workfile(self):
|
||||
"""Retrieve path to current opened file.
|
||||
|
||||
Returns:
|
||||
str: Path to file which is currently opened.
|
||||
None: If nothing is opened.
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
"""Currently opened scene is saved.
|
||||
|
||||
Not all hosts can know if current scene is saved because the API of
|
||||
DCC does not support it.
|
||||
|
||||
Returns:
|
||||
bool: True if scene is saved and False if has unsaved
|
||||
modifications.
|
||||
None: Can't tell if workfiles has modifications.
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
def work_root(self, session):
|
||||
"""Modify workdir per host.
|
||||
|
||||
Default implementation keeps workdir untouched.
|
||||
|
||||
Warnings:
|
||||
We must handle this modification with more sophisticated way
|
||||
because this can't be called out of DCC so opening of last workfile
|
||||
(calculated before DCC is launched) is complicated. Also breaking
|
||||
defined work template is not a good idea.
|
||||
Only place where it's really used and can make sense is Maya. There
|
||||
workspace.mel can modify subfolders where to look for maya files.
|
||||
|
||||
Args:
|
||||
session (dict): Session context data.
|
||||
|
||||
Returns:
|
||||
str: Path to new workdir.
|
||||
"""
|
||||
|
||||
return session["AVALON_WORKDIR"]
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def file_extensions(self):
|
||||
"""Deprecated variant of 'get_workfile_extensions'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
return self.get_workfile_extensions()
|
||||
|
||||
def save_file(self, dst_path=None):
|
||||
"""Deprecated variant of 'save_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
self.save_workfile(dst_path)
|
||||
|
||||
def open_file(self, filepath):
|
||||
"""Deprecated variant of 'open_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.open_workfile(filepath)
|
||||
|
||||
def current_file(self):
|
||||
"""Deprecated variant of 'get_current_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.get_current_workfile()
|
||||
|
||||
def has_unsaved_changes(self):
|
||||
"""Deprecated variant of 'workfile_has_unsaved_changes'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.workfile_has_unsaved_changes()
|
||||
|
||||
|
||||
class IPublishHost:
|
||||
"""Functions related to new creation system in new publisher.
|
||||
|
||||
New publisher is not storing information only about each created instance
|
||||
but also some global data. At this moment are data related only to context
|
||||
publish plugins but that can extend in future.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_publish_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
new publish creation. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Host module where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for new publisher
|
||||
workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, IPublishHost):
|
||||
return []
|
||||
|
||||
required = [
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
"get_current_context",
|
||||
]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_publish_methods(host):
|
||||
"""Validate implemented methods of "old type" host.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Host module to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = IPublishHost.get_missing_publish_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_context_data(self):
|
||||
"""Get global data related to creation-publishing from workfile.
|
||||
|
||||
These data are not related to any created instance but to whole
|
||||
publishing context. Not saving/returning them will cause that each
|
||||
reset of publishing resets all values to default ones.
|
||||
|
||||
Context data can contain information about enabled/disabled publish
|
||||
plugins or other values that can be filled by artist.
|
||||
|
||||
Returns:
|
||||
dict: Context data stored using 'update_context_data'.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_context_data(self, data, changes):
|
||||
"""Store global context data to workfile.
|
||||
|
||||
Called when some values in context data has changed.
|
||||
|
||||
Without storing the values in a way that 'get_context_data' would
|
||||
return them will each reset of publishing cause loose of filled values
|
||||
by artist. Best practice is to store values into workfile, if possible.
|
||||
|
||||
Args:
|
||||
data (dict): New data as are.
|
||||
changes (dict): Only data that has been changed. Each value has
|
||||
tuple with '(<old>, <new>)' value.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class INewPublisher(IPublishHost):
|
||||
"""Legacy interface replaced by 'IPublishHost'.
|
||||
|
||||
Deprecated:
|
||||
'INewPublisher' is replaced by 'IPublishHost' please change your
|
||||
imports.
|
||||
There is no "reasonable" way hot mark these classes as deprecated
|
||||
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||
removed in 3.15.*
|
||||
"""
|
||||
|
||||
pass
|
||||
0
client/ayon_core/hosts/__init__.py
Normal file
6
client/ayon_core/hosts/aftereffects/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .addon import AfterEffectsAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AfterEffectsAddon",
|
||||
)
|
||||
22
client/ayon_core/hosts/aftereffects/addon.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from ayon_core.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
|
||||
class AfterEffectsAddon(OpenPypeModule, IHostAddon):
|
||||
name = "aftereffects"
|
||||
host_name = "aftereffects"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
defaults = {
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".aep"]
|
||||
68
client/ayon_core/hosts/aftereffects/api/README.md
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# AfterEffects Integration
|
||||
|
||||
Requirements: This extension requires use of Javascript engine, which is
|
||||
available since CC 16.0.
|
||||
Please check your File>Project Settings>Expressions>Expressions Engine
|
||||
|
||||
## Setup
|
||||
|
||||
The After Effects integration requires two components to work; `extension` and `server`.
|
||||
|
||||
### Extension
|
||||
|
||||
To install the extension download [Extension Manager Command Line tool (ExManCmd)](https://github.com/Adobe-CEP/Getting-Started-guides/tree/master/Package%20Distribute%20Install#option-2---exmancmd).
|
||||
|
||||
```
|
||||
ExManCmd /install {path to addon}/api/extension.zxp
|
||||
```
|
||||
OR
|
||||
download [Anastasiy’s Extension Manager](https://install.anastasiy.com/)
|
||||
|
||||
`{path to addon}` will be most likely in your AppData (on Windows, in your user data folder in Linux and MacOS.)
|
||||
|
||||
### Server
|
||||
|
||||
The easiest way to get the server and After Effects launch is with:
|
||||
|
||||
```
|
||||
python -c ^"import ayon_core.hosts.photoshop;ayon_core.hosts.aftereffects.launch(""c:\Program Files\Adobe\Adobe After Effects 2020\Support Files\AfterFX.exe"")^"
|
||||
```
|
||||
|
||||
`avalon.aftereffects.launch` launches the application and server, and also closes the server when After Effects exists.
|
||||
|
||||
## Usage
|
||||
|
||||
The After Effects extension can be found under `Window > Extensions > AYON`. Once launched you should be presented with a panel like this:
|
||||
|
||||

|
||||
|
||||
|
||||
## Developing
|
||||
|
||||
### Extension
|
||||
When developing the extension you can load it [unsigned](https://github.com/Adobe-CEP/CEP-Resources/blob/master/CEP_9.x/Documentation/CEP%209.0%20HTML%20Extension%20Cookbook.md#debugging-unsigned-extensions).
|
||||
|
||||
When signing the extension you can use this [guide](https://github.com/Adobe-CEP/Getting-Started-guides/tree/master/Package%20Distribute%20Install#package-distribute-install-guide).
|
||||
|
||||
```
|
||||
ZXPSignCmd -selfSignedCert NA NA Ayon Avalon-After-Effects Ayon extension.p12
|
||||
ZXPSignCmd -sign {path to addon}/api/extension {path to addon}/api/extension.zxp extension.p12 Ayon
|
||||
```
|
||||
|
||||
### Plugin Examples
|
||||
|
||||
These plugins were made with the [polly config](https://github.com/mindbender-studio/config). To fully integrate and load, you will have to use this config and add `image` to the [integration plugin](https://github.com/mindbender-studio/config/blob/master/polly/plugins/publish/integrate_asset.py).
|
||||
|
||||
Expected deployed extension location on default Windows:
|
||||
`c:\Program Files (x86)\Common Files\Adobe\CEP\extensions\io.ynput.AE.panel`
|
||||
|
||||
For easier debugging of Javascript:
|
||||
https://community.adobe.com/t5/download-install/adobe-extension-debuger-problem/td-p/10911704?page=1
|
||||
Add (optional) --enable-blink-features=ShadowDOMV0,CustomElementsV0 when starting Chrome
|
||||
then localhost:8092
|
||||
|
||||
Or use Visual Studio Code https://medium.com/adobetech/extendscript-debugger-for-visual-studio-code-public-release-a2ff6161fa01
|
||||
## Resources
|
||||
- https://javascript-tools-guide.readthedocs.io/introduction/index.html
|
||||
- https://github.com/Adobe-CEP/Getting-Started-guides
|
||||
- https://github.com/Adobe-CEP/CEP-Resources
|
||||
45
client/ayon_core/hosts/aftereffects/api/__init__.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
"""Public API
|
||||
|
||||
Anything that isn't defined here is INTERNAL and unreliable for external use.
|
||||
|
||||
"""
|
||||
|
||||
from .ws_stub import (
|
||||
get_stub,
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
AfterEffectsHost,
|
||||
ls,
|
||||
containerise
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
get_extension_manifest_path,
|
||||
get_asset_settings,
|
||||
set_settings
|
||||
)
|
||||
|
||||
from .plugin import (
|
||||
AfterEffectsLoader
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
# ws_stub
|
||||
"get_stub",
|
||||
|
||||
# pipeline
|
||||
"ls",
|
||||
"containerise",
|
||||
|
||||
# lib
|
||||
"maintained_selection",
|
||||
"get_extension_manifest_path",
|
||||
"get_asset_settings",
|
||||
"set_settings",
|
||||
|
||||
# plugin
|
||||
"AfterEffectsLoader"
|
||||
]
|
||||
BIN
client/ayon_core/hosts/aftereffects/api/extension.zxp
Normal file
31
client/ayon_core/hosts/aftereffects/api/extension/.debug
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionList>
|
||||
<Extension Id="io.ynput.AE.panel">
|
||||
<HostList>
|
||||
|
||||
<!-- Comment Host tags according to the apps you want your panel to support -->
|
||||
|
||||
<!-- Photoshop -->
|
||||
<Host Name="PHXS" Port="8088"/>
|
||||
|
||||
<!-- Illustrator -->
|
||||
<Host Name="ILST" Port="8089"/>
|
||||
|
||||
<!-- InDesign -->
|
||||
<Host Name="IDSN" Port="8090" />
|
||||
|
||||
<!-- Premiere -->
|
||||
<Host Name="PPRO" Port="8091" />
|
||||
|
||||
<!-- AfterEffects -->
|
||||
<Host Name="AEFT" Port="8092" />
|
||||
|
||||
<!-- PRELUDE -->
|
||||
<Host Name="PRLD" Port="8093" />
|
||||
|
||||
<!-- FLASH Pro -->
|
||||
<Host Name="FLPR" Port="8094" />
|
||||
|
||||
</HostList>
|
||||
</Extension>
|
||||
</ExtensionList>
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionManifest Version="8.0" ExtensionBundleId="io.ynput.AE.panel" ExtensionBundleVersion="1.1.0"
|
||||
ExtensionBundleName="io.ynput.AE.panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ExtensionList>
|
||||
<Extension Id="io.ynput.AE.panel" Version="1.0" />
|
||||
</ExtensionList>
|
||||
<ExecutionEnvironment>
|
||||
<HostList>
|
||||
<!-- Uncomment Host tags according to the apps you want your panel to support -->
|
||||
<!-- Photoshop -->
|
||||
<!--<Host Name="PHXS" Version="[14.0,19.0]" /> -->
|
||||
<!-- <Host Name="PHSP" Version="[14.0,19.0]" /> -->
|
||||
|
||||
<!-- Illustrator -->
|
||||
<!-- <Host Name="ILST" Version="[18.0,22.0]" /> -->
|
||||
|
||||
<!-- InDesign -->
|
||||
<!-- <Host Name="IDSN" Version="[10.0,13.0]" /> -->
|
||||
|
||||
<!-- Premiere -->
|
||||
<!-- <Host Name="PPRO" Version="[8.0,12.0]" /> -->
|
||||
|
||||
<!-- AfterEffects -->
|
||||
<Host Name="AEFT" Version="[13.0,99.0]" />
|
||||
|
||||
<!-- PRELUDE -->
|
||||
<!-- <Host Name="PRLD" Version="[3.0,7.0]" /> -->
|
||||
|
||||
<!-- FLASH Pro -->
|
||||
<!-- <Host Name="FLPR" Version="[14.0,18.0]" /> -->
|
||||
|
||||
</HostList>
|
||||
<LocaleList>
|
||||
<Locale Code="All" />
|
||||
</LocaleList>
|
||||
<RequiredRuntimeList>
|
||||
<RequiredRuntime Name="CSXS" Version="9.0" />
|
||||
</RequiredRuntimeList>
|
||||
</ExecutionEnvironment>
|
||||
<DispatchInfoList>
|
||||
<Extension Id="io.ynput.AE.panel">
|
||||
<DispatchInfo >
|
||||
<Resources>
|
||||
<MainPath>./index.html</MainPath>
|
||||
<ScriptPath>./jsx/hostscript.jsx</ScriptPath>
|
||||
</Resources>
|
||||
<Lifecycle>
|
||||
<AutoVisible>true</AutoVisible>
|
||||
</Lifecycle>
|
||||
<UI>
|
||||
<Type>Panel</Type>
|
||||
<Menu>AYON</Menu>
|
||||
<Geometry>
|
||||
<Size>
|
||||
<Height>200</Height>
|
||||
<Width>100</Width>
|
||||
</Size>
|
||||
<!--<MinSize>
|
||||
<Height>550</Height>
|
||||
<Width>400</Width>
|
||||
</MinSize>
|
||||
<MaxSize>
|
||||
<Height>550</Height>
|
||||
<Width>400</Width>
|
||||
</MaxSize>-->
|
||||
|
||||
</Geometry>
|
||||
<Icons>
|
||||
<Icon Type="Normal">./icons/ayon_logo.png</Icon>
|
||||
<Icon Type="RollOver">./icons/iconRollover.png</Icon>
|
||||
<Icon Type="Disabled">./icons/iconDisabled.png</Icon>
|
||||
<Icon Type="DarkNormal">./icons/iconDarkNormal.png</Icon>
|
||||
<Icon Type="DarkRollOver">./icons/iconDarkRollover.png</Icon>
|
||||
</Icons>
|
||||
</UI>
|
||||
</DispatchInfo>
|
||||
</Extension>
|
||||
</DispatchInfoList>
|
||||
</ExtensionManifest>
|
||||
|
|
@ -0,0 +1,327 @@
|
|||
/*
|
||||
* HTML5 ✰ Boilerplate
|
||||
*
|
||||
* What follows is the result of much research on cross-browser styling.
|
||||
* Credit left inline and big thanks to Nicolas Gallagher, Jonathan Neal,
|
||||
* Kroc Camen, and the H5BP dev community and team.
|
||||
*
|
||||
* Detailed information about this CSS: h5bp.com/css
|
||||
*
|
||||
* ==|== normalize ==========================================================
|
||||
*/
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
HTML5 display definitions
|
||||
========================================================================== */
|
||||
|
||||
article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { display: block; }
|
||||
audio, canvas, video { display: inline-block; *display: inline; *zoom: 1; }
|
||||
audio:not([controls]) { display: none; }
|
||||
[hidden] { display: none; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Base
|
||||
========================================================================== */
|
||||
|
||||
/*
|
||||
* 1. Correct text resizing oddly in IE6/7 when body font-size is set using em units
|
||||
* 2. Force vertical scrollbar in non-IE
|
||||
* 3. Prevent iOS text size adjust on device orientation change, without disabling user zoom: h5bp.com/g
|
||||
*/
|
||||
|
||||
html { font-size: 100%; overflow-y: scroll; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; }
|
||||
|
||||
body { margin: 0; font-size: 100%; line-height: 1.231; }
|
||||
|
||||
body, button, input, select, textarea { font-family: helvetica, arial,"lucida grande", verdana, "メイリオ", "MS Pゴシック", sans-serif; color: #222; }
|
||||
/*
|
||||
* Remove text-shadow in selection highlight: h5bp.com/i
|
||||
* These selection declarations have to be separate
|
||||
* Also: hot pink! (or customize the background color to match your design)
|
||||
*/
|
||||
|
||||
::selection { text-shadow: none; background-color: highlight; color: highlighttext; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Links
|
||||
========================================================================== */
|
||||
|
||||
a { color: #00e; }
|
||||
a:visited { color: #551a8b; }
|
||||
a:hover { color: #06e; }
|
||||
a:focus { outline: thin dotted; }
|
||||
|
||||
/* Improve readability when focused and hovered in all browsers: h5bp.com/h */
|
||||
a:hover, a:active { outline: 0; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Typography
|
||||
========================================================================== */
|
||||
|
||||
abbr[title] { border-bottom: 1px dotted; }
|
||||
|
||||
b, strong { font-weight: bold; }
|
||||
|
||||
blockquote { margin: 1em 40px; }
|
||||
|
||||
dfn { font-style: italic; }
|
||||
|
||||
hr { display: block; height: 1px; border: 0; border-top: 1px solid #ccc; margin: 1em 0; padding: 0; }
|
||||
|
||||
ins { background: #ff9; color: #000; text-decoration: none; }
|
||||
|
||||
mark { background: #ff0; color: #000; font-style: italic; font-weight: bold; }
|
||||
|
||||
/* Redeclare monospace font family: h5bp.com/j */
|
||||
pre, code, kbd, samp { font-family: monospace, serif; _font-family: 'courier new', monospace; font-size: 1em; }
|
||||
|
||||
/* Improve readability of pre-formatted text in all browsers */
|
||||
pre { white-space: pre; white-space: pre-wrap; word-wrap: break-word; }
|
||||
|
||||
q { quotes: none; }
|
||||
q:before, q:after { content: ""; content: none; }
|
||||
|
||||
small { font-size: 85%; }
|
||||
|
||||
/* Position subscript and superscript content without affecting line-height: h5bp.com/k */
|
||||
sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; }
|
||||
sup { top: -0.5em; }
|
||||
sub { bottom: -0.25em; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Lists
|
||||
========================================================================== */
|
||||
|
||||
ul, ol { margin: 1em 0; padding: 0 0 0 40px; }
|
||||
dd { margin: 0 0 0 40px; }
|
||||
nav ul, nav ol { list-style: none; list-style-image: none; margin: 0; padding: 0; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Embedded content
|
||||
========================================================================== */
|
||||
|
||||
/*
|
||||
* 1. Improve image quality when scaled in IE7: h5bp.com/d
|
||||
* 2. Remove the gap between images and borders on image containers: h5bp.com/e
|
||||
*/
|
||||
|
||||
img { border: 0; -ms-interpolation-mode: bicubic; vertical-align: middle; }
|
||||
|
||||
/*
|
||||
* Correct overflow not hidden in IE9
|
||||
*/
|
||||
|
||||
svg:not(:root) { overflow: hidden; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Figures
|
||||
========================================================================== */
|
||||
|
||||
figure { margin: 0; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Forms
|
||||
========================================================================== */
|
||||
|
||||
form { margin: 0; }
|
||||
fieldset { border: 0; margin: 0; padding: 0; }
|
||||
|
||||
/* Indicate that 'label' will shift focus to the associated form element */
|
||||
label { cursor: pointer; }
|
||||
|
||||
/*
|
||||
* 1. Correct color not inheriting in IE6/7/8/9
|
||||
* 2. Correct alignment displayed oddly in IE6/7
|
||||
*/
|
||||
|
||||
legend { border: 0; *margin-left: -7px; padding: 0; }
|
||||
|
||||
/*
|
||||
* 1. Correct font-size not inheriting in all browsers
|
||||
* 2. Remove margins in FF3/4 S5 Chrome
|
||||
* 3. Define consistent vertical alignment display in all browsers
|
||||
*/
|
||||
|
||||
button, input, select, textarea { font-size: 100%; margin: 0; vertical-align: baseline; *vertical-align: middle; }
|
||||
|
||||
/*
|
||||
* 1. Define line-height as normal to match FF3/4 (set using !important in the UA stylesheet)
|
||||
*/
|
||||
|
||||
button, input { line-height: normal; }
|
||||
|
||||
/*
|
||||
* 1. Display hand cursor for clickable form elements
|
||||
* 2. Allow styling of clickable form elements in iOS
|
||||
* 3. Correct inner spacing displayed oddly in IE7 (doesn't effect IE6)
|
||||
*/
|
||||
|
||||
button, input[type="button"], input[type="reset"], input[type="submit"] { cursor: pointer; -webkit-appearance: button; *overflow: visible; }
|
||||
|
||||
/*
|
||||
* Consistent box sizing and appearance
|
||||
*/
|
||||
|
||||
input[type="checkbox"], input[type="radio"] { box-sizing: border-box; padding: 0; }
|
||||
input[type="search"] { -webkit-appearance: textfield; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; box-sizing: content-box; }
|
||||
input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; }
|
||||
|
||||
/*
|
||||
* Remove inner padding and border in FF3/4: h5bp.com/l
|
||||
*/
|
||||
|
||||
button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; }
|
||||
|
||||
/*
|
||||
* 1. Remove default vertical scrollbar in IE6/7/8/9
|
||||
* 2. Allow only vertical resizing
|
||||
*/
|
||||
|
||||
textarea { overflow: auto; vertical-align: top; resize: vertical; }
|
||||
|
||||
/* Colors for form validity */
|
||||
input:valid, textarea:valid { }
|
||||
input:invalid, textarea:invalid { background-color: #f0dddd; }
|
||||
|
||||
|
||||
/* =============================================================================
|
||||
Tables
|
||||
========================================================================== */
|
||||
|
||||
table { border-collapse: collapse; border-spacing: 0; }
|
||||
td { vertical-align: top; }
|
||||
|
||||
|
||||
/* ==|== primary styles =====================================================
|
||||
Author:
|
||||
========================================================================== */
|
||||
|
||||
/* ==|== media queries ======================================================
|
||||
PLACEHOLDER Media Queries for Responsive Design.
|
||||
These override the primary ('mobile first') styles
|
||||
Modify as content requires.
|
||||
========================================================================== */
|
||||
|
||||
@media only screen and (min-width: 480px) {
|
||||
/* Style adjustments for viewports 480px and over go here */
|
||||
|
||||
}
|
||||
|
||||
@media only screen and (min-width: 768px) {
|
||||
/* Style adjustments for viewports 768px and over go here */
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ==|== non-semantic helper classes ========================================
|
||||
Please define your styles before this section.
|
||||
========================================================================== */
|
||||
|
||||
/* For image replacement */
|
||||
.ir { display: block; border: 0; text-indent: -999em; overflow: hidden; background-color: transparent; background-repeat: no-repeat; text-align: left; direction: ltr; }
|
||||
.ir br { display: none; }
|
||||
|
||||
/* Hide from both screenreaders and browsers: h5bp.com/u */
|
||||
.hidden { display: none !important; visibility: hidden; }
|
||||
|
||||
/* Hide only visually, but have it available for screenreaders: h5bp.com/v */
|
||||
.visuallyhidden { border: 0; clip: rect(0 0 0 0); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; }
|
||||
|
||||
/* Extends the .visuallyhidden class to allow the element to be focusable when navigated to via the keyboard: h5bp.com/p */
|
||||
.visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { clip: auto; height: auto; margin: 0; overflow: visible; position: static; width: auto; }
|
||||
|
||||
/* Hide visually and from screenreaders, but maintain layout */
|
||||
.invisible { visibility: hidden; }
|
||||
|
||||
/* Contain floats: h5bp.com/q */
|
||||
.clearfix:before, .clearfix:after { content: ""; display: table; }
|
||||
.clearfix:after { clear: both; }
|
||||
.clearfix { *zoom: 1; }
|
||||
|
||||
|
||||
|
||||
/* ==|== print styles =======================================================
|
||||
Print styles.
|
||||
Inlined to avoid required HTTP connection: h5bp.com/r
|
||||
========================================================================== */
|
||||
|
||||
@media print {
|
||||
* { background: transparent !important; color: black !important; box-shadow:none !important; text-shadow: none !important; filter:none !important; -ms-filter: none !important; } /* Black prints faster: h5bp.com/s */
|
||||
a, a:visited { text-decoration: underline; }
|
||||
a[href]:after { content: " (" attr(href) ")"; }
|
||||
abbr[title]:after { content: " (" attr(title) ")"; }
|
||||
.ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after { content: ""; } /* Don't show links for images, or javascript/internal links */
|
||||
pre, blockquote { border: 1px solid #999; page-break-inside: avoid; }
|
||||
table { display: table-header-group; } /* h5bp.com/t */
|
||||
tr, img { page-break-inside: avoid; }
|
||||
img { max-width: 100% !important; }
|
||||
@page { margin: 0.5cm; }
|
||||
p, h2, h3 { orphans: 3; widows: 3; }
|
||||
h2, h3 { page-break-after: avoid; }
|
||||
}
|
||||
|
||||
/* reflow reset for -webkit-margin-before: 1em */
|
||||
p { margin: 0; }
|
||||
|
||||
html {
|
||||
overflow-y: auto;
|
||||
background-color: transparent;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
background: #fff;
|
||||
font: normal 100%;
|
||||
position: relative;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body, div, img, p, button, input, select, textarea {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.image {
|
||||
display: block;
|
||||
}
|
||||
|
||||
input {
|
||||
cursor: default;
|
||||
display: block;
|
||||
}
|
||||
|
||||
input[type=button] {
|
||||
background-color: #e5e9e8;
|
||||
border: 1px solid #9daca9;
|
||||
border-radius: 4px;
|
||||
box-shadow: inset 0 1px #fff;
|
||||
font: inherit;
|
||||
letter-spacing: inherit;
|
||||
text-indent: inherit;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
input[type=button]:hover {
|
||||
background-color: #eff1f1;
|
||||
}
|
||||
|
||||
input[type=button]:active {
|
||||
background-color: #d2d6d6;
|
||||
border: 1px solid #9daca9;
|
||||
box-shadow: inset 0 1px rgba(0,0,0,0.1);
|
||||
}
|
||||
|
||||
/* Reset anchor styles to an unstyled default to be in parity with design surface. It
|
||||
is presumed that most link styles in real-world designs are custom (non-default). */
|
||||
a, a:visited, a:hover, a:active {
|
||||
color: inherit;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
/*Your styles*/
|
||||
|
||||
body {
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
|
||||
#content {
|
||||
margin-right:auto;
|
||||
margin-left:auto;
|
||||
vertical-align:middle;
|
||||
width:100%;
|
||||
}
|
||||
|
||||
|
||||
#btn_test{
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
Those classes will be edited at runtime with values specified
|
||||
by the settings of the CC application
|
||||
*/
|
||||
.hostFontColor{}
|
||||
.hostFontFamily{}
|
||||
.hostFontSize{}
|
||||
|
||||
/*font family, color and size*/
|
||||
.hostFont{}
|
||||
/*background color*/
|
||||
.hostBgd{}
|
||||
/*lighter background color*/
|
||||
.hostBgdLight{}
|
||||
/*darker background color*/
|
||||
.hostBgdDark{}
|
||||
/*background color and font*/
|
||||
.hostElt{}
|
||||
|
||||
|
||||
.hostButton{
|
||||
border:1px solid;
|
||||
border-radius:2px;
|
||||
height:20px;
|
||||
vertical-align:bottom;
|
||||
font-family:inherit;
|
||||
color:inherit;
|
||||
font-size:inherit;
|
||||
}
|
||||
1
client/ayon_core/hosts/aftereffects/api/extension/css/topcoat-desktop-dark.min.css
vendored
Normal file
|
After Width: | Height: | Size: 3.5 KiB |
|
After Width: | Height: | Size: 18 KiB |
|
After Width: | Height: | Size: 18 KiB |
|
After Width: | Height: | Size: 18 KiB |
|
After Width: | Height: | Size: 18 KiB |
|
After Width: | Height: | Size: 18 KiB |
187
client/ayon_core/hosts/aftereffects/api/extension/index.html
Normal file
|
|
@ -0,0 +1,187 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
|
||||
<link rel="stylesheet" href="css/topcoat-desktop-dark.min.css"/>
|
||||
<link id="hostStyle" rel="stylesheet" href="css/styles.css"/>
|
||||
|
||||
<style type="text/css">
|
||||
html, body, iframe {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 0px;
|
||||
margin: 0px;
|
||||
overflow: hidden;
|
||||
}
|
||||
button {width: 100%;}
|
||||
</style>
|
||||
|
||||
<style>
|
||||
button {width: 100%;}
|
||||
body {margin:0; padding:0; height: 100%;}
|
||||
html {height: 100%;}
|
||||
</style>
|
||||
|
||||
<title></title>
|
||||
<script src="js/libs/jquery-2.0.2.min.js"></script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#workfiles-button").bind("click", function() {
|
||||
|
||||
RPC.call('AfterEffects.workfiles_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#loader-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.loader_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#publish-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.publish_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#sceneinventory-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.sceneinventory_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#setresolution-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.setresolution_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#setframes-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.setframes_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#setall-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.setall_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#create-placeholder-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.create_placeholder_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#update-placeholder-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.update_placeholder_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#build-workfile-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.build_workfile_template_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#experimental-button").bind("click", function() {
|
||||
RPC.call('AfterEffects.experimental_tools_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body class="hostElt">
|
||||
|
||||
<div id="content">
|
||||
|
||||
<div>
|
||||
<div></div><a href=# id=workfiles-button><button class="hostFontSize">Workfiles...</button></a></div>
|
||||
<div><a href=# id=loader-button><button class="hostFontSize">Load...</button></a></div>
|
||||
<div><a href=# id=publish-button><button class="hostFontSize">Publish...</button></a></div>
|
||||
<div><a href=# id=sceneinventory-button><button class="hostFontSize">Manage...</button></a></div>
|
||||
<div><a href=# id=separator0><button class="hostFontSize"> </button></a></div>
|
||||
<div><a href=# id=setresolution-button><button class="hostFontSize">Set Resolution</button></a></div>
|
||||
<div><a href=# id=setframes-button><button class="hostFontSize">Set Frame Range</button></a></div>
|
||||
<div><a href=# id=setall-button><button class="hostFontSize">Apply All Settings</button></a></div>
|
||||
<div><a href=# id=separator1><button class="hostFontSize"> </button></a></div>
|
||||
<div><a href=# id=create-placeholder-button><button class="hostFontSize">Create placeholder</button></a></div>
|
||||
<div><a href=# id=update-placeholder-button><button class="hostFontSize">Update placeholder</button></a></div>
|
||||
<div><a href=# id=build-workfile-button><button class="hostFontSize">Build Workfile from template</button></a></div>
|
||||
<div><a href=# id=separator3><button class="hostFontSize"> </button></a></div>
|
||||
<div><a href=# id=experimental-button><button class="hostFontSize">Experimental Tools...</button></a></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<!-- <script src="js/libs/PlayerDebugMode"></script> -->
|
||||
<script src="js/libs/wsrpc.js"></script>
|
||||
<script src="js/libs/loglevel.min.js"></script>
|
||||
<script src="js/libs/CSInterface.js"></script>
|
||||
|
||||
<script src="js/themeManager.js"></script>
|
||||
<script src="js/main.js"></script>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
6
client/ayon_core/hosts/aftereffects/api/extension/js/libs/jquery-2.0.2.min.js
vendored
Normal file
|
|
@ -0,0 +1,530 @@
|
|||
// json2.js
|
||||
// 2017-06-12
|
||||
// Public Domain.
|
||||
// NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
// USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
// NOT CONTROL.
|
||||
|
||||
// This file creates a global JSON object containing two methods: stringify
|
||||
// and parse. This file provides the ES5 JSON capability to ES3 systems.
|
||||
// If a project might run on IE8 or earlier, then this file should be included.
|
||||
// This file does nothing on ES5 systems.
|
||||
|
||||
// JSON.stringify(value, replacer, space)
|
||||
// value any JavaScript value, usually an object or array.
|
||||
// replacer an optional parameter that determines how object
|
||||
// values are stringified for objects. It can be a
|
||||
// function or an array of strings.
|
||||
// space an optional parameter that specifies the indentation
|
||||
// of nested structures. If it is omitted, the text will
|
||||
// be packed without extra whitespace. If it is a number,
|
||||
// it will specify the number of spaces to indent at each
|
||||
// level. If it is a string (such as "\t" or " "),
|
||||
// it contains the characters used to indent at each level.
|
||||
// This method produces a JSON text from a JavaScript value.
|
||||
// When an object value is found, if the object contains a toJSON
|
||||
// method, its toJSON method will be called and the result will be
|
||||
// stringified. A toJSON method does not serialize: it returns the
|
||||
// value represented by the name/value pair that should be serialized,
|
||||
// or undefined if nothing should be serialized. The toJSON method
|
||||
// will be passed the key associated with the value, and this will be
|
||||
// bound to the value.
|
||||
|
||||
// For example, this would serialize Dates as ISO strings.
|
||||
|
||||
// Date.prototype.toJSON = function (key) {
|
||||
// function f(n) {
|
||||
// // Format integers to have at least two digits.
|
||||
// return (n < 10)
|
||||
// ? "0" + n
|
||||
// : n;
|
||||
// }
|
||||
// return this.getUTCFullYear() + "-" +
|
||||
// f(this.getUTCMonth() + 1) + "-" +
|
||||
// f(this.getUTCDate()) + "T" +
|
||||
// f(this.getUTCHours()) + ":" +
|
||||
// f(this.getUTCMinutes()) + ":" +
|
||||
// f(this.getUTCSeconds()) + "Z";
|
||||
// };
|
||||
|
||||
// You can provide an optional replacer method. It will be passed the
|
||||
// key and value of each member, with this bound to the containing
|
||||
// object. The value that is returned from your method will be
|
||||
// serialized. If your method returns undefined, then the member will
|
||||
// be excluded from the serialization.
|
||||
|
||||
// If the replacer parameter is an array of strings, then it will be
|
||||
// used to select the members to be serialized. It filters the results
|
||||
// such that only members with keys listed in the replacer array are
|
||||
// stringified.
|
||||
|
||||
// Values that do not have JSON representations, such as undefined or
|
||||
// functions, will not be serialized. Such values in objects will be
|
||||
// dropped; in arrays they will be replaced with null. You can use
|
||||
// a replacer function to replace those with JSON values.
|
||||
|
||||
// JSON.stringify(undefined) returns undefined.
|
||||
|
||||
// The optional space parameter produces a stringification of the
|
||||
// value that is filled with line breaks and indentation to make it
|
||||
// easier to read.
|
||||
|
||||
// If the space parameter is a non-empty string, then that string will
|
||||
// be used for indentation. If the space parameter is a number, then
|
||||
// the indentation will be that many spaces.
|
||||
|
||||
// Example:
|
||||
|
||||
// text = JSON.stringify(["e", {pluribus: "unum"}]);
|
||||
// // text is '["e",{"pluribus":"unum"}]'
|
||||
|
||||
// text = JSON.stringify(["e", {pluribus: "unum"}], null, "\t");
|
||||
// // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
|
||||
|
||||
// text = JSON.stringify([new Date()], function (key, value) {
|
||||
// return this[key] instanceof Date
|
||||
// ? "Date(" + this[key] + ")"
|
||||
// : value;
|
||||
// });
|
||||
// // text is '["Date(---current time---)"]'
|
||||
|
||||
// JSON.parse(text, reviver)
|
||||
// This method parses a JSON text to produce an object or array.
|
||||
// It can throw a SyntaxError exception.
|
||||
|
||||
// The optional reviver parameter is a function that can filter and
|
||||
// transform the results. It receives each of the keys and values,
|
||||
// and its return value is used instead of the original value.
|
||||
// If it returns what it received, then the structure is not modified.
|
||||
// If it returns undefined then the member is deleted.
|
||||
|
||||
// Example:
|
||||
|
||||
// // Parse the text. Values that look like ISO date strings will
|
||||
// // be converted to Date objects.
|
||||
|
||||
// myData = JSON.parse(text, function (key, value) {
|
||||
// var a;
|
||||
// if (typeof value === "string") {
|
||||
// a =
|
||||
// /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
|
||||
// if (a) {
|
||||
// return new Date(Date.UTC(
|
||||
// +a[1], +a[2] - 1, +a[3], +a[4], +a[5], +a[6]
|
||||
// ));
|
||||
// }
|
||||
// return value;
|
||||
// }
|
||||
// });
|
||||
|
||||
// myData = JSON.parse(
|
||||
// "[\"Date(09/09/2001)\"]",
|
||||
// function (key, value) {
|
||||
// var d;
|
||||
// if (
|
||||
// typeof value === "string"
|
||||
// && value.slice(0, 5) === "Date("
|
||||
// && value.slice(-1) === ")"
|
||||
// ) {
|
||||
// d = new Date(value.slice(5, -1));
|
||||
// if (d) {
|
||||
// return d;
|
||||
// }
|
||||
// }
|
||||
// return value;
|
||||
// }
|
||||
// );
|
||||
|
||||
// This is a reference implementation. You are free to copy, modify, or
|
||||
// redistribute.
|
||||
|
||||
/*jslint
|
||||
eval, for, this
|
||||
*/
|
||||
|
||||
/*property
|
||||
JSON, apply, call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
|
||||
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
|
||||
lastIndex, length, parse, prototype, push, replace, slice, stringify,
|
||||
test, toJSON, toString, valueOf
|
||||
*/
|
||||
|
||||
|
||||
// Create a JSON object only if one does not already exist. We create the
|
||||
// methods in a closure to avoid creating global variables.
|
||||
|
||||
if (typeof JSON !== "object") {
|
||||
JSON = {};
|
||||
}
|
||||
|
||||
(function () {
|
||||
"use strict";
|
||||
|
||||
var rx_one = /^[\],:{}\s]*$/;
|
||||
var rx_two = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g;
|
||||
var rx_three = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g;
|
||||
var rx_four = /(?:^|:|,)(?:\s*\[)+/g;
|
||||
var rx_escapable = /[\\"\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
var rx_dangerous = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return (n < 10)
|
||||
? "0" + n
|
||||
: n;
|
||||
}
|
||||
|
||||
function this_value() {
|
||||
return this.valueOf();
|
||||
}
|
||||
|
||||
if (typeof Date.prototype.toJSON !== "function") {
|
||||
|
||||
Date.prototype.toJSON = function () {
|
||||
|
||||
return isFinite(this.valueOf())
|
||||
? (
|
||||
this.getUTCFullYear()
|
||||
+ "-"
|
||||
+ f(this.getUTCMonth() + 1)
|
||||
+ "-"
|
||||
+ f(this.getUTCDate())
|
||||
+ "T"
|
||||
+ f(this.getUTCHours())
|
||||
+ ":"
|
||||
+ f(this.getUTCMinutes())
|
||||
+ ":"
|
||||
+ f(this.getUTCSeconds())
|
||||
+ "Z"
|
||||
)
|
||||
: null;
|
||||
};
|
||||
|
||||
Boolean.prototype.toJSON = this_value;
|
||||
Number.prototype.toJSON = this_value;
|
||||
String.prototype.toJSON = this_value;
|
||||
}
|
||||
|
||||
var gap;
|
||||
var indent;
|
||||
var meta;
|
||||
var rep;
|
||||
|
||||
|
||||
function quote(string) {
|
||||
|
||||
// If the string contains no control characters, no quote characters, and no
|
||||
// backslash characters, then we can safely slap some quotes around it.
|
||||
// Otherwise we must also replace the offending characters with safe escape
|
||||
// sequences.
|
||||
|
||||
rx_escapable.lastIndex = 0;
|
||||
return rx_escapable.test(string)
|
||||
? "\"" + string.replace(rx_escapable, function (a) {
|
||||
var c = meta[a];
|
||||
return typeof c === "string"
|
||||
? c
|
||||
: "\\u" + ("0000" + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
}) + "\""
|
||||
: "\"" + string + "\"";
|
||||
}
|
||||
|
||||
|
||||
function str(key, holder) {
|
||||
|
||||
// Produce a string from holder[key].
|
||||
|
||||
var i; // The loop counter.
|
||||
var k; // The member key.
|
||||
var v; // The member value.
|
||||
var length;
|
||||
var mind = gap;
|
||||
var partial;
|
||||
var value = holder[key];
|
||||
|
||||
// If the value has a toJSON method, call it to obtain a replacement value.
|
||||
|
||||
if (
|
||||
value
|
||||
&& typeof value === "object"
|
||||
&& typeof value.toJSON === "function"
|
||||
) {
|
||||
value = value.toJSON(key);
|
||||
}
|
||||
|
||||
// If we were called with a replacer function, then call the replacer to
|
||||
// obtain a replacement value.
|
||||
|
||||
if (typeof rep === "function") {
|
||||
value = rep.call(holder, key, value);
|
||||
}
|
||||
|
||||
// What happens next depends on the value's type.
|
||||
|
||||
switch (typeof value) {
|
||||
case "string":
|
||||
return quote(value);
|
||||
|
||||
case "number":
|
||||
|
||||
// JSON numbers must be finite. Encode non-finite numbers as null.
|
||||
|
||||
return (isFinite(value))
|
||||
? String(value)
|
||||
: "null";
|
||||
|
||||
case "boolean":
|
||||
case "null":
|
||||
|
||||
// If the value is a boolean or null, convert it to a string. Note:
|
||||
// typeof null does not produce "null". The case is included here in
|
||||
// the remote chance that this gets fixed someday.
|
||||
|
||||
return String(value);
|
||||
|
||||
// If the type is "object", we might be dealing with an object or an array or
|
||||
// null.
|
||||
|
||||
case "object":
|
||||
|
||||
// Due to a specification blunder in ECMAScript, typeof null is "object",
|
||||
// so watch out for that case.
|
||||
|
||||
if (!value) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
// Make an array to hold the partial results of stringifying this object value.
|
||||
|
||||
gap += indent;
|
||||
partial = [];
|
||||
|
||||
// Is the value an array?
|
||||
|
||||
if (Object.prototype.toString.apply(value) === "[object Array]") {
|
||||
|
||||
// The value is an array. Stringify every element. Use null as a placeholder
|
||||
// for non-JSON values.
|
||||
|
||||
length = value.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
partial[i] = str(i, value) || "null";
|
||||
}
|
||||
|
||||
// Join all of the elements together, separated with commas, and wrap them in
|
||||
// brackets.
|
||||
|
||||
v = partial.length === 0
|
||||
? "[]"
|
||||
: gap
|
||||
? (
|
||||
"[\n"
|
||||
+ gap
|
||||
+ partial.join(",\n" + gap)
|
||||
+ "\n"
|
||||
+ mind
|
||||
+ "]"
|
||||
)
|
||||
: "[" + partial.join(",") + "]";
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
|
||||
// If the replacer is an array, use it to select the members to be stringified.
|
||||
|
||||
if (rep && typeof rep === "object") {
|
||||
length = rep.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
if (typeof rep[i] === "string") {
|
||||
k = rep[i];
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (
|
||||
(gap)
|
||||
? ": "
|
||||
: ":"
|
||||
) + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise, iterate through all of the keys in the object.
|
||||
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (
|
||||
(gap)
|
||||
? ": "
|
||||
: ":"
|
||||
) + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join all of the member texts together, separated with commas,
|
||||
// and wrap them in braces.
|
||||
|
||||
v = partial.length === 0
|
||||
? "{}"
|
||||
: gap
|
||||
? "{\n" + gap + partial.join(",\n" + gap) + "\n" + mind + "}"
|
||||
: "{" + partial.join(",") + "}";
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
// If the JSON object does not yet have a stringify method, give it one.
|
||||
|
||||
if (typeof JSON.stringify !== "function") {
|
||||
meta = { // table of character substitutions
|
||||
"\b": "\\b",
|
||||
"\t": "\\t",
|
||||
"\n": "\\n",
|
||||
"\f": "\\f",
|
||||
"\r": "\\r",
|
||||
"\"": "\\\"",
|
||||
"\\": "\\\\"
|
||||
};
|
||||
JSON.stringify = function (value, replacer, space) {
|
||||
|
||||
// The stringify method takes a value and an optional replacer, and an optional
|
||||
// space parameter, and returns a JSON text. The replacer can be a function
|
||||
// that can replace values, or an array of strings that will select the keys.
|
||||
// A default replacer method can be provided. Use of the space parameter can
|
||||
// produce text that is more easily readable.
|
||||
|
||||
var i;
|
||||
gap = "";
|
||||
indent = "";
|
||||
|
||||
// If the space parameter is a number, make an indent string containing that
|
||||
// many spaces.
|
||||
|
||||
if (typeof space === "number") {
|
||||
for (i = 0; i < space; i += 1) {
|
||||
indent += " ";
|
||||
}
|
||||
|
||||
// If the space parameter is a string, it will be used as the indent string.
|
||||
|
||||
} else if (typeof space === "string") {
|
||||
indent = space;
|
||||
}
|
||||
|
||||
// If there is a replacer, it must be a function or an array.
|
||||
// Otherwise, throw an error.
|
||||
|
||||
rep = replacer;
|
||||
if (replacer && typeof replacer !== "function" && (
|
||||
typeof replacer !== "object"
|
||||
|| typeof replacer.length !== "number"
|
||||
)) {
|
||||
throw new Error("JSON.stringify");
|
||||
}
|
||||
|
||||
// Make a fake root object containing our value under the key of "".
|
||||
// Return the result of stringifying the value.
|
||||
|
||||
return str("", {"": value});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// If the JSON object does not yet have a parse method, give it one.
|
||||
|
||||
if (typeof JSON.parse !== "function") {
|
||||
JSON.parse = function (text, reviver) {
|
||||
|
||||
// The parse method takes a text and an optional reviver function, and returns
|
||||
// a JavaScript value if the text is a valid JSON text.
|
||||
|
||||
var j;
|
||||
|
||||
function walk(holder, key) {
|
||||
|
||||
// The walk method is used to recursively walk the resulting structure so
|
||||
// that modifications can be made.
|
||||
|
||||
var k;
|
||||
var v;
|
||||
var value = holder[key];
|
||||
if (value && typeof value === "object") {
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = walk(value, k);
|
||||
if (v !== undefined) {
|
||||
value[k] = v;
|
||||
} else {
|
||||
delete value[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(holder, key, value);
|
||||
}
|
||||
|
||||
|
||||
// Parsing happens in four stages. In the first stage, we replace certain
|
||||
// Unicode characters with escape sequences. JavaScript handles many characters
|
||||
// incorrectly, either silently deleting them, or treating them as line endings.
|
||||
|
||||
text = String(text);
|
||||
rx_dangerous.lastIndex = 0;
|
||||
if (rx_dangerous.test(text)) {
|
||||
text = text.replace(rx_dangerous, function (a) {
|
||||
return (
|
||||
"\\u"
|
||||
+ ("0000" + a.charCodeAt(0).toString(16)).slice(-4)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// In the second stage, we run the text against regular expressions that look
|
||||
// for non-JSON patterns. We are especially concerned with "()" and "new"
|
||||
// because they can cause invocation, and "=" because it can cause mutation.
|
||||
// But just to be safe, we want to reject all unexpected forms.
|
||||
|
||||
// We split the second stage into 4 regexp operations in order to work around
|
||||
// crippling inefficiencies in IE's and Safari's regexp engines. First we
|
||||
// replace the JSON backslash pairs with "@" (a non-JSON character). Second, we
|
||||
// replace all simple value tokens with "]" characters. Third, we delete all
|
||||
// open brackets that follow a colon or comma or that begin the text. Finally,
|
||||
// we look to see that the remaining characters are only whitespace or "]" or
|
||||
// "," or ":" or "{" or "}". If that is so, then the text is safe for eval.
|
||||
|
||||
if (
|
||||
rx_one.test(
|
||||
text
|
||||
.replace(rx_two, "@")
|
||||
.replace(rx_three, "]")
|
||||
.replace(rx_four, "")
|
||||
)
|
||||
) {
|
||||
|
||||
// In the third stage we use the eval function to compile the text into a
|
||||
// JavaScript structure. The "{" operator is subject to a syntactic ambiguity
|
||||
// in JavaScript: it can begin a block or an object literal. We wrap the text
|
||||
// in parens to eliminate the ambiguity.
|
||||
|
||||
j = eval("(" + text + ")");
|
||||
|
||||
// In the optional fourth stage, we recursively walk the new structure, passing
|
||||
// each name/value pair to a reviver function for possible transformation.
|
||||
|
||||
return (typeof reviver === "function")
|
||||
? walk({"": j}, "")
|
||||
: j;
|
||||
}
|
||||
|
||||
// If the text is not JSON parseable, then a SyntaxError is thrown.
|
||||
|
||||
throw new SyntaxError("JSON.parse");
|
||||
};
|
||||
}
|
||||
}());
|
||||
2
client/ayon_core/hosts/aftereffects/api/extension/js/libs/loglevel.min.js
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
/*! loglevel - v1.6.8 - https://github.com/pimterry/loglevel - (c) 2020 Tim Perry - licensed MIT */
|
||||
!function(a,b){"use strict";"function"==typeof define&&define.amd?define(b):"object"==typeof module&&module.exports?module.exports=b():a.log=b()}(this,function(){"use strict";function a(a,b){var c=a[b];if("function"==typeof c.bind)return c.bind(a);try{return Function.prototype.bind.call(c,a)}catch(b){return function(){return Function.prototype.apply.apply(c,[a,arguments])}}}function b(){console.log&&(console.log.apply?console.log.apply(console,arguments):Function.prototype.apply.apply(console.log,[console,arguments])),console.trace&&console.trace()}function c(c){return"debug"===c&&(c="log"),typeof console!==i&&("trace"===c&&j?b:void 0!==console[c]?a(console,c):void 0!==console.log?a(console,"log"):h)}function d(a,b){for(var c=0;c<k.length;c++){var d=k[c];this[d]=c<a?h:this.methodFactory(d,a,b)}this.log=this.debug}function e(a,b,c){return function(){typeof console!==i&&(d.call(this,b,c),this[a].apply(this,arguments))}}function f(a,b,d){return c(a)||e.apply(this,arguments)}function g(a,b,c){function e(a){var b=(k[a]||"silent").toUpperCase();if(typeof window!==i){try{return void(window.localStorage[l]=b)}catch(a){}try{window.document.cookie=encodeURIComponent(l)+"="+b+";"}catch(a){}}}function g(){var a;if(typeof window!==i){try{a=window.localStorage[l]}catch(a){}if(typeof a===i)try{var b=window.document.cookie,c=b.indexOf(encodeURIComponent(l)+"=");-1!==c&&(a=/^([^;]+)/.exec(b.slice(c))[1])}catch(a){}return void 0===j.levels[a]&&(a=void 0),a}}var h,j=this,l="loglevel";a&&(l+=":"+a),j.name=a,j.levels={TRACE:0,DEBUG:1,INFO:2,WARN:3,ERROR:4,SILENT:5},j.methodFactory=c||f,j.getLevel=function(){return h},j.setLevel=function(b,c){if("string"==typeof b&&void 0!==j.levels[b.toUpperCase()]&&(b=j.levels[b.toUpperCase()]),!("number"==typeof b&&b>=0&&b<=j.levels.SILENT))throw"log.setLevel() called with invalid level: "+b;if(h=b,!1!==c&&e(b),d.call(j,b,a),typeof console===i&&b<j.levels.SILENT)return"No console available for logging"},j.setDefaultLevel=function(a){g()||j.setLevel(a,!1)},j.enableAll=function(a){j.setLevel(j.levels.TRACE,a)},j.disableAll=function(a){j.setLevel(j.levels.SILENT,a)};var m=g();null==m&&(m=null==b?"WARN":b),j.setLevel(m,!1)}var h=function(){},i="undefined",j=typeof window!==i&&typeof window.navigator!==i&&/Trident\/|MSIE /.test(window.navigator.userAgent),k=["trace","debug","info","warn","error"],l=new g,m={};l.getLogger=function(a){if("string"!=typeof a||""===a)throw new TypeError("You must supply a name when creating a logger.");var b=m[a];return b||(b=m[a]=new g(a,l.getLevel(),l.methodFactory)),b};var n=typeof window!==i?window.log:void 0;return l.noConflict=function(){return typeof window!==i&&window.log===l&&(window.log=n),l},l.getLoggers=function(){return m},l});
|
||||
|
|
@ -0,0 +1,393 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = global || self, global.WSRPC = factory());
|
||||
}(this, function () { 'use strict';
|
||||
|
||||
function _classCallCheck(instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
}
|
||||
|
||||
var Deferred = function Deferred() {
|
||||
_classCallCheck(this, Deferred);
|
||||
|
||||
var self = this;
|
||||
self.resolve = null;
|
||||
self.reject = null;
|
||||
self.done = false;
|
||||
|
||||
function wrapper(func) {
|
||||
return function () {
|
||||
if (self.done) throw new Error('Promise already done');
|
||||
self.done = true;
|
||||
return func.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
self.promise = new Promise(function (resolve, reject) {
|
||||
self.resolve = wrapper(resolve);
|
||||
self.reject = wrapper(reject);
|
||||
});
|
||||
|
||||
self.promise.isPending = function () {
|
||||
return !self.done;
|
||||
};
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
function logGroup(group, level, args) {
|
||||
console.group(group);
|
||||
console[level].apply(this, args);
|
||||
console.groupEnd();
|
||||
}
|
||||
|
||||
function log() {
|
||||
if (!WSRPC.DEBUG) return;
|
||||
logGroup('WSRPC.DEBUG', 'trace', arguments);
|
||||
}
|
||||
|
||||
function trace(msg) {
|
||||
if (!WSRPC.TRACE) return;
|
||||
var payload = msg;
|
||||
if ('data' in msg) payload = JSON.parse(msg.data);
|
||||
logGroup("WSRPC.TRACE", 'trace', [payload]);
|
||||
}
|
||||
|
||||
function getAbsoluteWsUrl(url) {
|
||||
if (/^\w+:\/\//.test(url)) return url;
|
||||
if (typeof window == 'undefined' && window.location.host.length < 1) throw new Error("Can not construct absolute URL from ".concat(window.location));
|
||||
var scheme = window.location.protocol === "https:" ? "wss:" : "ws:";
|
||||
var port = window.location.port === '' ? ":".concat(window.location.port) : '';
|
||||
var host = window.location.host;
|
||||
var path = url.replace(/^\/+/gm, '');
|
||||
return "".concat(scheme, "//").concat(host).concat(port, "/").concat(path);
|
||||
}
|
||||
|
||||
var readyState = Object.freeze({
|
||||
0: 'CONNECTING',
|
||||
1: 'OPEN',
|
||||
2: 'CLOSING',
|
||||
3: 'CLOSED'
|
||||
});
|
||||
|
||||
var WSRPC = function WSRPC(URL) {
|
||||
var reconnectTimeout = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1000;
|
||||
|
||||
_classCallCheck(this, WSRPC);
|
||||
|
||||
var self = this;
|
||||
URL = getAbsoluteWsUrl(URL);
|
||||
self.id = 1;
|
||||
self.eventId = 0;
|
||||
self.socketStarted = false;
|
||||
self.eventStore = {
|
||||
onconnect: {},
|
||||
onerror: {},
|
||||
onclose: {},
|
||||
onchange: {}
|
||||
};
|
||||
self.connectionNumber = 0;
|
||||
self.oneTimeEventStore = {
|
||||
onconnect: [],
|
||||
onerror: [],
|
||||
onclose: [],
|
||||
onchange: []
|
||||
};
|
||||
self.callQueue = [];
|
||||
|
||||
function createSocket() {
|
||||
var ws = new WebSocket(URL);
|
||||
|
||||
var rejectQueue = function rejectQueue() {
|
||||
self.connectionNumber++; // rejects incoming calls
|
||||
|
||||
var deferred; //reject all pending calls
|
||||
|
||||
while (0 < self.callQueue.length) {
|
||||
var callObj = self.callQueue.shift();
|
||||
deferred = self.store[callObj.id];
|
||||
delete self.store[callObj.id];
|
||||
|
||||
if (deferred && deferred.promise.isPending()) {
|
||||
deferred.reject('WebSocket error occurred');
|
||||
}
|
||||
} // reject all from the store
|
||||
|
||||
|
||||
for (var key in self.store) {
|
||||
if (!self.store.hasOwnProperty(key)) continue;
|
||||
deferred = self.store[key];
|
||||
|
||||
if (deferred && deferred.promise.isPending()) {
|
||||
deferred.reject('WebSocket error occurred');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function reconnect(callEvents) {
|
||||
setTimeout(function () {
|
||||
try {
|
||||
self.socket = createSocket();
|
||||
self.id = 1;
|
||||
} catch (exc) {
|
||||
callEvents('onerror', exc);
|
||||
delete self.socket;
|
||||
console.error(exc);
|
||||
}
|
||||
}, reconnectTimeout);
|
||||
}
|
||||
|
||||
ws.onclose = function (err) {
|
||||
log('ONCLOSE CALLED', 'STATE', self.public.state());
|
||||
trace(err);
|
||||
|
||||
for (var serial in self.store) {
|
||||
if (!self.store.hasOwnProperty(serial)) continue;
|
||||
|
||||
if (self.store[serial].hasOwnProperty('reject')) {
|
||||
self.store[serial].reject('Connection closed');
|
||||
}
|
||||
}
|
||||
|
||||
rejectQueue();
|
||||
callEvents('onclose', err);
|
||||
callEvents('onchange', err);
|
||||
reconnect(callEvents);
|
||||
};
|
||||
|
||||
ws.onerror = function (err) {
|
||||
log('ONERROR CALLED', 'STATE', self.public.state());
|
||||
trace(err);
|
||||
rejectQueue();
|
||||
callEvents('onerror', err);
|
||||
callEvents('onchange', err);
|
||||
log('WebSocket has been closed by error: ', err);
|
||||
};
|
||||
|
||||
function tryCallEvent(func, event) {
|
||||
try {
|
||||
return func(event);
|
||||
} catch (e) {
|
||||
if (e.hasOwnProperty('stack')) {
|
||||
log(e.stack);
|
||||
} else {
|
||||
log('Event function', func, 'raised unknown error:', e);
|
||||
}
|
||||
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
function callEvents(evName, event) {
|
||||
while (0 < self.oneTimeEventStore[evName].length) {
|
||||
var deferred = self.oneTimeEventStore[evName].shift();
|
||||
if (deferred.hasOwnProperty('resolve') && deferred.promise.isPending()) deferred.resolve();
|
||||
}
|
||||
|
||||
for (var i in self.eventStore[evName]) {
|
||||
if (!self.eventStore[evName].hasOwnProperty(i)) continue;
|
||||
var cur = self.eventStore[evName][i];
|
||||
tryCallEvent(cur, event);
|
||||
}
|
||||
}
|
||||
|
||||
ws.onopen = function (ev) {
|
||||
log('ONOPEN CALLED', 'STATE', self.public.state());
|
||||
trace(ev);
|
||||
|
||||
while (0 < self.callQueue.length) {
|
||||
// noinspection JSUnresolvedFunction
|
||||
self.socket.send(JSON.stringify(self.callQueue.shift(), 0, 1));
|
||||
}
|
||||
|
||||
callEvents('onconnect', ev);
|
||||
callEvents('onchange', ev);
|
||||
};
|
||||
|
||||
function handleCall(self, data) {
|
||||
if (!self.routes.hasOwnProperty(data.method)) throw new Error('Route not found');
|
||||
var connectionNumber = self.connectionNumber;
|
||||
var deferred = new Deferred();
|
||||
deferred.promise.then(function (result) {
|
||||
if (connectionNumber !== self.connectionNumber) return;
|
||||
self.socket.send(JSON.stringify({
|
||||
id: data.id,
|
||||
result: result
|
||||
}));
|
||||
}, function (error) {
|
||||
if (connectionNumber !== self.connectionNumber) return;
|
||||
self.socket.send(JSON.stringify({
|
||||
id: data.id,
|
||||
error: error
|
||||
}));
|
||||
});
|
||||
var func = self.routes[data.method];
|
||||
if (self.asyncRoutes[data.method]) return func.apply(deferred, [data.params]);
|
||||
|
||||
function badPromise() {
|
||||
throw new Error("You should register route with async flag.");
|
||||
}
|
||||
|
||||
var promiseMock = {
|
||||
resolve: badPromise,
|
||||
reject: badPromise
|
||||
};
|
||||
|
||||
try {
|
||||
deferred.resolve(func.apply(promiseMock, [data.params]));
|
||||
} catch (e) {
|
||||
deferred.reject(e);
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
function handleError(self, data) {
|
||||
if (!self.store.hasOwnProperty(data.id)) return log('Unknown callback');
|
||||
var deferred = self.store[data.id];
|
||||
if (typeof deferred === 'undefined') return log('Confirmation without handler');
|
||||
delete self.store[data.id];
|
||||
log('REJECTING', data.error);
|
||||
deferred.reject(data.error);
|
||||
}
|
||||
|
||||
function handleResult(self, data) {
|
||||
var deferred = self.store[data.id];
|
||||
if (typeof deferred === 'undefined') return log('Confirmation without handler');
|
||||
delete self.store[data.id];
|
||||
|
||||
if (data.hasOwnProperty('result')) {
|
||||
return deferred.resolve(data.result);
|
||||
}
|
||||
|
||||
return deferred.reject(data.error);
|
||||
}
|
||||
|
||||
ws.onmessage = function (message) {
|
||||
log('ONMESSAGE CALLED', 'STATE', self.public.state());
|
||||
trace(message);
|
||||
if (message.type !== 'message') return;
|
||||
var data;
|
||||
|
||||
try {
|
||||
data = JSON.parse(message.data);
|
||||
log(data);
|
||||
|
||||
if (data.hasOwnProperty('method')) {
|
||||
return handleCall(self, data);
|
||||
} else if (data.hasOwnProperty('error') && data.error === null) {
|
||||
return handleError(self, data);
|
||||
} else {
|
||||
return handleResult(self, data);
|
||||
}
|
||||
} catch (exception) {
|
||||
var err = {
|
||||
error: exception.message,
|
||||
result: null,
|
||||
id: data ? data.id : null
|
||||
};
|
||||
self.socket.send(JSON.stringify(err));
|
||||
console.error(exception);
|
||||
}
|
||||
};
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
function makeCall(func, args, params) {
|
||||
self.id += 2;
|
||||
var deferred = new Deferred();
|
||||
var callObj = Object.freeze({
|
||||
id: self.id,
|
||||
method: func,
|
||||
params: args
|
||||
});
|
||||
var state = self.public.state();
|
||||
|
||||
if (state === 'OPEN') {
|
||||
self.store[self.id] = deferred;
|
||||
self.socket.send(JSON.stringify(callObj));
|
||||
} else if (state === 'CONNECTING') {
|
||||
log('SOCKET IS', state);
|
||||
self.store[self.id] = deferred;
|
||||
self.callQueue.push(callObj);
|
||||
} else {
|
||||
log('SOCKET IS', state);
|
||||
|
||||
if (params && params['noWait']) {
|
||||
deferred.reject("Socket is: ".concat(state));
|
||||
} else {
|
||||
self.store[self.id] = deferred;
|
||||
self.callQueue.push(callObj);
|
||||
}
|
||||
}
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
self.asyncRoutes = {};
|
||||
self.routes = {};
|
||||
self.store = {};
|
||||
self.public = Object.freeze({
|
||||
call: function call(func, args, params) {
|
||||
return makeCall(func, args, params);
|
||||
},
|
||||
addRoute: function addRoute(route, callback, isAsync) {
|
||||
self.asyncRoutes[route] = isAsync || false;
|
||||
self.routes[route] = callback;
|
||||
},
|
||||
deleteRoute: function deleteRoute(route) {
|
||||
delete self.asyncRoutes[route];
|
||||
return delete self.routes[route];
|
||||
},
|
||||
addEventListener: function addEventListener(event, func) {
|
||||
var eventId = self.eventId++;
|
||||
self.eventStore[event][eventId] = func;
|
||||
return eventId;
|
||||
},
|
||||
removeEventListener: function removeEventListener(event, index) {
|
||||
if (self.eventStore[event].hasOwnProperty(index)) {
|
||||
delete self.eventStore[event][index];
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
onEvent: function onEvent(event) {
|
||||
var deferred = new Deferred();
|
||||
self.oneTimeEventStore[event].push(deferred);
|
||||
return deferred.promise;
|
||||
},
|
||||
destroy: function destroy() {
|
||||
return self.socket.close();
|
||||
},
|
||||
state: function state() {
|
||||
return readyState[this.stateCode()];
|
||||
},
|
||||
stateCode: function stateCode() {
|
||||
if (self.socketStarted && self.socket) return self.socket.readyState;
|
||||
return 3;
|
||||
},
|
||||
connect: function connect() {
|
||||
self.socketStarted = true;
|
||||
self.socket = createSocket();
|
||||
}
|
||||
});
|
||||
self.public.addRoute('log', function (argsObj) {
|
||||
//console.info("Websocket sent: ".concat(argsObj));
|
||||
});
|
||||
self.public.addRoute('ping', function (data) {
|
||||
return data;
|
||||
});
|
||||
return self.public;
|
||||
};
|
||||
|
||||
WSRPC.DEBUG = false;
|
||||
WSRPC.TRACE = false;
|
||||
|
||||
return WSRPC;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=wsrpc.js.map
|
||||
1
client/ayon_core/hosts/aftereffects/api/extension/js/libs/wsrpc.min.js
vendored
Normal file
412
client/ayon_core/hosts/aftereffects/api/extension/js/main.js
Normal file
|
|
@ -0,0 +1,412 @@
|
|||
/*jslint vars: true, plusplus: true, devel: true, nomen: true, regexp: true,
|
||||
indent: 4, maxerr: 50 */
|
||||
/*global $, window, location, CSInterface, SystemPath, themeManager*/
|
||||
|
||||
|
||||
var csInterface = new CSInterface();
|
||||
|
||||
log.warn("script start");
|
||||
|
||||
WSRPC.DEBUG = false;
|
||||
WSRPC.TRACE = false;
|
||||
|
||||
// get websocket server url from environment value
|
||||
async function startUp(url){
|
||||
promis = runEvalScript("getEnv('" + url + "')");
|
||||
|
||||
var res = await promis;
|
||||
log.warn("res: " + res);
|
||||
|
||||
promis = runEvalScript("getEnv('AYON_DEBUG')");
|
||||
var debug = await promis;
|
||||
log.warn("debug: " + debug);
|
||||
if (debug && debug.toString() == '3'){
|
||||
WSRPC.DEBUG = true;
|
||||
WSRPC.TRACE = true;
|
||||
}
|
||||
// run rest only after resolved promise
|
||||
main(res);
|
||||
}
|
||||
|
||||
function get_extension_version(){
|
||||
/** Returns version number from extension manifest.xml **/
|
||||
log.debug("get_extension_version")
|
||||
var path = csInterface.getSystemPath(SystemPath.EXTENSION);
|
||||
log.debug("extension path " + path);
|
||||
|
||||
var result = window.cep.fs.readFile(path + "/CSXS/manifest.xml");
|
||||
var version = undefined;
|
||||
if(result.err === 0){
|
||||
if (window.DOMParser) {
|
||||
const parser = new DOMParser();
|
||||
const xmlDoc = parser.parseFromString(result.data.toString(),
|
||||
'text/xml');
|
||||
const children = xmlDoc.children;
|
||||
|
||||
for (let i = 0; i <= children.length; i++) {
|
||||
if (children[i] &&
|
||||
children[i].getAttribute('ExtensionBundleVersion')) {
|
||||
version =
|
||||
children[i].getAttribute('ExtensionBundleVersion');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return '{"result":"' + version + '"}'
|
||||
}
|
||||
|
||||
function main(websocket_url){
|
||||
// creates connection to 'websocket_url', registers routes
|
||||
var default_url = 'ws://localhost:8099/ws/';
|
||||
|
||||
if (websocket_url == ''){
|
||||
websocket_url = default_url;
|
||||
}
|
||||
RPC = new WSRPC(websocket_url, 5000); // spin connection
|
||||
|
||||
RPC.connect();
|
||||
|
||||
log.warn("connected");
|
||||
|
||||
RPC.addRoute('AfterEffects.open', function (data) {
|
||||
log.warn('Server called client route "open":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("fileOpen('" + escapedPath +"')")
|
||||
.then(function(result){
|
||||
log.warn("open: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_metadata', function (data) {
|
||||
log.warn('Server called client route "get_metadata":', data);
|
||||
return runEvalScript("getMetadata()")
|
||||
.then(function(result){
|
||||
log.warn("getMetadata: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_active_document_name', function (data) {
|
||||
log.warn('Server called client route ' +
|
||||
'"get_active_document_name":', data);
|
||||
return runEvalScript("getActiveDocumentName()")
|
||||
.then(function(result){
|
||||
log.warn("get_active_document_name: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_active_document_full_name', function (data){
|
||||
log.warn('Server called client route ' +
|
||||
'"get_active_document_full_name":', data);
|
||||
return runEvalScript("getActiveDocumentFullName()")
|
||||
.then(function(result){
|
||||
log.warn("get_active_document_full_name: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.add_item', function (data) {
|
||||
log.warn('Server called client route "add_item":', data);
|
||||
var escapedName = EscapeStringForJSX(data.name);
|
||||
return runEvalScript("addItem('" + escapedName +"', " +
|
||||
"'" + data.item_type + "')")
|
||||
.then(function(result){
|
||||
log.warn("get_items: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_items', function (data) {
|
||||
log.warn('Server called client route "get_items":', data);
|
||||
return runEvalScript("getItems(" + data.comps + "," +
|
||||
data.folders + "," +
|
||||
data.footages + ")")
|
||||
.then(function(result){
|
||||
log.warn("get_items: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.select_items', function (data) {
|
||||
log.warn('Server called client route "select_items":', data);
|
||||
return runEvalScript("selectItems(" + JSON.stringify(data.items) + ")")
|
||||
.then(function(result){
|
||||
log.warn("select_items: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
RPC.addRoute('AfterEffects.get_selected_items', function (data) {
|
||||
log.warn('Server called client route "get_selected_items":', data);
|
||||
return runEvalScript("getSelectedItems(" + data.comps + "," +
|
||||
data.folders + "," +
|
||||
data.footages + ")")
|
||||
.then(function(result){
|
||||
log.warn("get_items: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.import_file', function (data) {
|
||||
log.warn('Server called client route "import_file":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("importFile('" + escapedPath +"', " +
|
||||
"'" + data.item_name + "'," +
|
||||
"'" + JSON.stringify(
|
||||
data.import_options) + "')")
|
||||
.then(function(result){
|
||||
log.warn("importFile: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.replace_item', function (data) {
|
||||
log.warn('Server called client route "replace_item":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("replaceItem(" + data.item_id + ", " +
|
||||
"'" + escapedPath + "', " +
|
||||
"'" + data.item_name + "')")
|
||||
.then(function(result){
|
||||
log.warn("replaceItem: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.rename_item', function (data) {
|
||||
log.warn('Server called client route "rename_item":', data);
|
||||
return runEvalScript("renameItem(" + data.item_id + ", " +
|
||||
"'" + data.item_name + "')")
|
||||
.then(function(result){
|
||||
log.warn("renameItem: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.delete_item', function (data) {
|
||||
log.warn('Server called client route "delete_item":', data);
|
||||
return runEvalScript("deleteItem(" + data.item_id + ")")
|
||||
.then(function(result){
|
||||
log.warn("deleteItem: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.imprint', function (data) {
|
||||
log.warn('Server called client route "imprint":', data);
|
||||
var escaped = data.payload.replace(/\n/g, "\\n");
|
||||
return runEvalScript("imprint('" + escaped +"')")
|
||||
.then(function(result){
|
||||
log.warn("imprint: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.set_label_color', function (data) {
|
||||
log.warn('Server called client route "set_label_color":', data);
|
||||
return runEvalScript("setLabelColor(" + data.item_id + "," +
|
||||
data.color_idx + ")")
|
||||
.then(function(result){
|
||||
log.warn("imprint: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_comp_properties', function (data) {
|
||||
log.warn('Server called client route "get_comp_properties":', data);
|
||||
return runEvalScript("getCompProperties(" + data.item_id + ")")
|
||||
.then(function(result){
|
||||
log.warn("get_comp_properties: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.set_comp_properties', function (data) {
|
||||
log.warn('Server called client route "set_work_area":', data);
|
||||
return runEvalScript("setCompProperties(" + data.item_id + ',' +
|
||||
data.start + ',' +
|
||||
data.duration + ',' +
|
||||
data.frame_rate + ',' +
|
||||
data.width + ',' +
|
||||
data.height + ")")
|
||||
.then(function(result){
|
||||
log.warn("set_comp_properties: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.saveAs', function (data) {
|
||||
log.warn('Server called client route "saveAs":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.image_path);
|
||||
return runEvalScript("saveAs('" + escapedPath + "', " +
|
||||
data.as_copy + ")")
|
||||
.then(function(result){
|
||||
log.warn("saveAs: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.save', function (data) {
|
||||
log.warn('Server called client route "save":', data);
|
||||
return runEvalScript("save()")
|
||||
.then(function(result){
|
||||
log.warn("save: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_render_info', function (data) {
|
||||
log.warn('Server called client route "get_render_info":', data);
|
||||
return runEvalScript("getRenderInfo(" + data.comp_id +")")
|
||||
.then(function(result){
|
||||
log.warn("get_render_info: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_audio_url', function (data) {
|
||||
log.warn('Server called client route "get_audio_url":', data);
|
||||
return runEvalScript("getAudioUrlForComp(" + data.item_id + ")")
|
||||
.then(function(result){
|
||||
log.warn("getAudioUrlForComp: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.import_background', function (data) {
|
||||
log.warn('Server called client route "import_background":', data);
|
||||
return runEvalScript("importBackground(" + data.comp_id + ", " +
|
||||
"'" + data.comp_name + "', " +
|
||||
JSON.stringify(data.files) + ")")
|
||||
.then(function(result){
|
||||
log.warn("importBackground: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.reload_background', function (data) {
|
||||
log.warn('Server called client route "reload_background":', data);
|
||||
return runEvalScript("reloadBackground(" + data.comp_id + ", " +
|
||||
"'" + data.comp_name + "', " +
|
||||
JSON.stringify(data.files) + ")")
|
||||
.then(function(result){
|
||||
log.warn("reloadBackground: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.add_item_as_layer', function (data) {
|
||||
log.warn('Server called client route "add_item_as_layer":', data);
|
||||
return runEvalScript("addItemAsLayerToComp(" + data.comp_id + ", " +
|
||||
data.item_id + "," +
|
||||
" null )")
|
||||
.then(function(result){
|
||||
log.warn("addItemAsLayerToComp: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.add_item_instead_placeholder', function (data) {
|
||||
log.warn('Server called client route "add_item_instead_placeholder":', data);
|
||||
return runEvalScript("addItemInstead(" + data.placeholder_item_id + ", " +
|
||||
data.item_id + ")")
|
||||
.then(function(result){
|
||||
log.warn("add_item_instead_placeholder: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.render', function (data) {
|
||||
log.warn('Server called client route "render":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.folder_url);
|
||||
return runEvalScript("render('" + escapedPath +"', " + data.comp_id + ")")
|
||||
.then(function(result){
|
||||
log.warn("render: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_extension_version', function (data) {
|
||||
log.warn('Server called client route "get_extension_version":', data);
|
||||
return get_extension_version();
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_app_version', function (data) {
|
||||
log.warn('Server called client route "get_app_version":', data);
|
||||
return runEvalScript("getAppVersion()")
|
||||
.then(function(result){
|
||||
log.warn("get_app_version: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.add_placeholder', function (data) {
|
||||
log.warn('Server called client route "add_placeholder":', data);
|
||||
var escapedName = EscapeStringForJSX(data.name);
|
||||
return runEvalScript("addPlaceholder('" + escapedName +"',"+
|
||||
data.width + ',' +
|
||||
data.height + ',' +
|
||||
data.fps + ',' +
|
||||
data.duration + ")")
|
||||
.then(function(result){
|
||||
log.warn("add_placeholder: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.close', function (data) {
|
||||
log.warn('Server called client route "close":', data);
|
||||
return runEvalScript("close()");
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.print_msg', function (data) {
|
||||
log.warn('Server called client route "print_msg":', data);
|
||||
var escaped_msg = EscapeStringForJSX(data.msg);
|
||||
return runEvalScript("printMsg('" + escaped_msg +"')")
|
||||
.then(function(result){
|
||||
log.warn("print_msg: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/** main entry point **/
|
||||
startUp("WEBSOCKET_URL");
|
||||
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var csInterface = new CSInterface();
|
||||
|
||||
|
||||
function init() {
|
||||
|
||||
themeManager.init();
|
||||
|
||||
$("#btn_test").click(function () {
|
||||
csInterface.evalScript('sayHello()');
|
||||
});
|
||||
}
|
||||
|
||||
init();
|
||||
|
||||
}());
|
||||
|
||||
function EscapeStringForJSX(str){
|
||||
// Replaces:
|
||||
// \ with \\
|
||||
// ' with \'
|
||||
// " with \"
|
||||
// See: https://stackoverflow.com/a/3967927/5285364
|
||||
return str.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/"/g,'\\"');
|
||||
}
|
||||
|
||||
function runEvalScript(script) {
|
||||
// because of asynchronous nature of functions in jsx
|
||||
// this waits for response
|
||||
return new Promise(function(resolve, reject){
|
||||
csInterface.evalScript(script, resolve);
|
||||
});
|
||||
}
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
/*jslint vars: true, plusplus: true, devel: true, nomen: true, regexp: true, indent: 4, maxerr: 50 */
|
||||
/*global window, document, CSInterface*/
|
||||
|
||||
|
||||
/*
|
||||
|
||||
Responsible for overwriting CSS at runtime according to CC app
|
||||
settings as defined by the end user.
|
||||
|
||||
*/
|
||||
|
||||
var themeManager = (function () {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Convert the Color object to string in hexadecimal format;
|
||||
*/
|
||||
function toHex(color, delta) {
|
||||
|
||||
function computeValue(value, delta) {
|
||||
var computedValue = !isNaN(delta) ? value + delta : value;
|
||||
if (computedValue < 0) {
|
||||
computedValue = 0;
|
||||
} else if (computedValue > 255) {
|
||||
computedValue = 255;
|
||||
}
|
||||
|
||||
computedValue = Math.floor(computedValue);
|
||||
|
||||
computedValue = computedValue.toString(16);
|
||||
return computedValue.length === 1 ? "0" + computedValue : computedValue;
|
||||
}
|
||||
|
||||
var hex = "";
|
||||
if (color) {
|
||||
hex = computeValue(color.red, delta) + computeValue(color.green, delta) + computeValue(color.blue, delta);
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
|
||||
|
||||
function reverseColor(color, delta) {
|
||||
return toHex({
|
||||
red: Math.abs(255 - color.red),
|
||||
green: Math.abs(255 - color.green),
|
||||
blue: Math.abs(255 - color.blue)
|
||||
},
|
||||
delta);
|
||||
}
|
||||
|
||||
|
||||
function addRule(stylesheetId, selector, rule) {
|
||||
var stylesheet = document.getElementById(stylesheetId);
|
||||
|
||||
if (stylesheet) {
|
||||
stylesheet = stylesheet.sheet;
|
||||
if (stylesheet.addRule) {
|
||||
stylesheet.addRule(selector, rule);
|
||||
} else if (stylesheet.insertRule) {
|
||||
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Update the theme with the AppSkinInfo retrieved from the host product.
|
||||
*/
|
||||
function updateThemeWithAppSkinInfo(appSkinInfo) {
|
||||
|
||||
var panelBgColor = appSkinInfo.panelBackgroundColor.color;
|
||||
var bgdColor = toHex(panelBgColor);
|
||||
|
||||
var darkBgdColor = toHex(panelBgColor, 20);
|
||||
|
||||
var fontColor = "F0F0F0";
|
||||
if (panelBgColor.red > 122) {
|
||||
fontColor = "000000";
|
||||
}
|
||||
var lightBgdColor = toHex(panelBgColor, -100);
|
||||
|
||||
var styleId = "hostStyle";
|
||||
|
||||
addRule(styleId, ".hostElt", "background-color:" + "#" + bgdColor);
|
||||
addRule(styleId, ".hostElt", "font-size:" + appSkinInfo.baseFontSize + "px;");
|
||||
addRule(styleId, ".hostElt", "font-family:" + appSkinInfo.baseFontFamily);
|
||||
addRule(styleId, ".hostElt", "color:" + "#" + fontColor);
|
||||
|
||||
addRule(styleId, ".hostBgd", "background-color:" + "#" + bgdColor);
|
||||
addRule(styleId, ".hostBgdDark", "background-color: " + "#" + darkBgdColor);
|
||||
addRule(styleId, ".hostBgdLight", "background-color: " + "#" + lightBgdColor);
|
||||
addRule(styleId, ".hostFontSize", "font-size:" + appSkinInfo.baseFontSize + "px;");
|
||||
addRule(styleId, ".hostFontFamily", "font-family:" + appSkinInfo.baseFontFamily);
|
||||
addRule(styleId, ".hostFontColor", "color:" + "#" + fontColor);
|
||||
|
||||
addRule(styleId, ".hostFont", "font-size:" + appSkinInfo.baseFontSize + "px;");
|
||||
addRule(styleId, ".hostFont", "font-family:" + appSkinInfo.baseFontFamily);
|
||||
addRule(styleId, ".hostFont", "color:" + "#" + fontColor);
|
||||
|
||||
addRule(styleId, ".hostButton", "background-color:" + "#" + darkBgdColor);
|
||||
addRule(styleId, ".hostButton:hover", "background-color:" + "#" + bgdColor);
|
||||
addRule(styleId, ".hostButton:active", "background-color:" + "#" + darkBgdColor);
|
||||
addRule(styleId, ".hostButton", "border-color: " + "#" + lightBgdColor);
|
||||
|
||||
}
|
||||
|
||||
|
||||
function onAppThemeColorChanged(event) {
|
||||
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
|
||||
updateThemeWithAppSkinInfo(skinInfo);
|
||||
}
|
||||
|
||||
|
||||
function init() {
|
||||
|
||||
var csInterface = new CSInterface();
|
||||
|
||||
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
|
||||
|
||||
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
|
||||
}
|
||||
|
||||
return {
|
||||
init: init
|
||||
};
|
||||
|
||||
}());
|
||||
|
|
@ -0,0 +1,946 @@
|
|||
/*jslint vars: true, plusplus: true, devel: true, nomen: true, regexp: true,
|
||||
indent: 4, maxerr: 50 */
|
||||
/*global $, Folder*/
|
||||
//@include "../js/libs/json.js"
|
||||
|
||||
/* All public API function should return JSON! */
|
||||
|
||||
app.preferences.savePrefAsBool("General Section", "Show Welcome Screen", false) ;
|
||||
|
||||
if(!Array.prototype.indexOf) {
|
||||
Array.prototype.indexOf = function ( item ) {
|
||||
var index = 0, length = this.length;
|
||||
for ( ; index < length; index++ ) {
|
||||
if ( this[index] === item )
|
||||
return index;
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
}
|
||||
|
||||
function sayHello(){
|
||||
alert("hello from ExtendScript");
|
||||
}
|
||||
|
||||
function getEnv(variable){
|
||||
return $.getenv(variable);
|
||||
}
|
||||
|
||||
function getMetadata(){
|
||||
/**
|
||||
* Returns payload in 'Label' field of project's metadata
|
||||
*
|
||||
**/
|
||||
if (ExternalObject.AdobeXMPScript === undefined){
|
||||
ExternalObject.AdobeXMPScript =
|
||||
new ExternalObject('lib:AdobeXMPScript');
|
||||
}
|
||||
|
||||
var proj = app.project;
|
||||
var meta = new XMPMeta(app.project.xmpPacket);
|
||||
var schemaNS = XMPMeta.getNamespaceURI("xmp");
|
||||
var label = "xmp:Label";
|
||||
|
||||
if (meta.doesPropertyExist(schemaNS, label)){
|
||||
var prop = meta.getProperty(schemaNS, label);
|
||||
return prop.value;
|
||||
}
|
||||
|
||||
return _prepareSingleValue([]);
|
||||
|
||||
}
|
||||
|
||||
function imprint(payload){
|
||||
/**
|
||||
* Stores payload in 'Label' field of project's metadata
|
||||
*
|
||||
* Args:
|
||||
* payload (string): json content
|
||||
*/
|
||||
if (ExternalObject.AdobeXMPScript === undefined){
|
||||
ExternalObject.AdobeXMPScript =
|
||||
new ExternalObject('lib:AdobeXMPScript');
|
||||
}
|
||||
|
||||
var proj = app.project;
|
||||
var meta = new XMPMeta(app.project.xmpPacket);
|
||||
var schemaNS = XMPMeta.getNamespaceURI("xmp");
|
||||
var label = "xmp:Label";
|
||||
|
||||
meta.setProperty(schemaNS, label, payload);
|
||||
|
||||
app.project.xmpPacket = meta.serialize();
|
||||
|
||||
}
|
||||
|
||||
|
||||
function fileOpen(path){
|
||||
/**
|
||||
* Opens (project) file on 'path'
|
||||
*/
|
||||
fp = new File(path);
|
||||
return _prepareSingleValue(app.open(fp))
|
||||
}
|
||||
|
||||
function getActiveDocumentName(){
|
||||
/**
|
||||
* Returns file name of active document
|
||||
* */
|
||||
var file = app.project.file;
|
||||
|
||||
if (file){
|
||||
return _prepareSingleValue(file.name)
|
||||
}
|
||||
|
||||
return _prepareError("No file open currently");
|
||||
}
|
||||
|
||||
function getActiveDocumentFullName(){
|
||||
/**
|
||||
* Returns absolute path to current project
|
||||
* */
|
||||
var file = app.project.file;
|
||||
|
||||
if (file){
|
||||
var f = new File(file.fullName);
|
||||
var path = f.fsName;
|
||||
f.close();
|
||||
|
||||
return _prepareSingleValue(path)
|
||||
}
|
||||
|
||||
return _prepareError("No file open currently");
|
||||
}
|
||||
|
||||
|
||||
function addItem(name, item_type){
|
||||
/**
|
||||
* Adds comp or folder to project items.
|
||||
*
|
||||
* Could be called when creating publishable instance to prepare
|
||||
* composition (and render queue).
|
||||
*
|
||||
* Args:
|
||||
* name (str): composition name
|
||||
* item_type (str): COMP|FOLDER
|
||||
* Returns:
|
||||
* SingleItemValue: eg {"result": VALUE}
|
||||
*/
|
||||
if (item_type == "COMP"){
|
||||
// dummy values, will be rewritten later
|
||||
item = app.project.items.addComp(name, 1920, 1060, 1, 10, 25);
|
||||
}else if (item_type == "FOLDER"){
|
||||
item = app.project.items.addFolder(name);
|
||||
}else{
|
||||
return _prepareError("Only 'COMP' or 'FOLDER' can be created");
|
||||
}
|
||||
return _prepareSingleValue(item.id);
|
||||
|
||||
}
|
||||
|
||||
function getItems(comps, folders, footages){
|
||||
/**
|
||||
* Returns JSON representation of compositions and
|
||||
* if 'collectLayers' then layers in comps too.
|
||||
*
|
||||
* Args:
|
||||
* comps (bool): return selected compositions
|
||||
* folders (bool): return folders
|
||||
* footages (bool): return FootageItem
|
||||
* Returns:
|
||||
* (list) of JSON items
|
||||
*/
|
||||
var items = []
|
||||
for (i = 1; i <= app.project.items.length; ++i){
|
||||
var item = app.project.items[i];
|
||||
if (!item){
|
||||
continue;
|
||||
}
|
||||
var ret = _getItem(item, comps, folders, footages);
|
||||
if (ret){
|
||||
items.push(ret);
|
||||
}
|
||||
}
|
||||
return '[' + items.join() + ']';
|
||||
|
||||
}
|
||||
|
||||
function selectItems(items){
|
||||
/**
|
||||
* Select all items from `items`, deselect other.
|
||||
*
|
||||
* Args:
|
||||
* items (list)
|
||||
*/
|
||||
for (i = 1; i <= app.project.items.length; ++i){
|
||||
item = app.project.items[i];
|
||||
if (items.indexOf(item.id) > -1){
|
||||
item.selected = true;
|
||||
}else{
|
||||
item.selected = false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function getSelectedItems(comps, folders, footages){
|
||||
/**
|
||||
* Returns list of selected items from Project menu
|
||||
*
|
||||
* Args:
|
||||
* comps (bool): return selected compositions
|
||||
* folders (bool): return folders
|
||||
* footages (bool): return FootageItem
|
||||
* Returns:
|
||||
* (list) of JSON items
|
||||
*/
|
||||
var items = []
|
||||
for (i = 0; i < app.project.selection.length; ++i){
|
||||
var item = app.project.selection[i];
|
||||
if (!item){
|
||||
continue;
|
||||
}
|
||||
var ret = _getItem(item, comps, folders, footages);
|
||||
if (ret){
|
||||
items.push(ret);
|
||||
}
|
||||
}
|
||||
return '[' + items.join() + ']';
|
||||
}
|
||||
|
||||
function _getItem(item, comps, folders, footages){
|
||||
/**
|
||||
* Auxiliary function as project items and selections
|
||||
* are indexed in different way :/
|
||||
* Refactor
|
||||
*/
|
||||
var item_type = '';
|
||||
var path = '';
|
||||
var containing_comps = [];
|
||||
if (item instanceof FolderItem){
|
||||
item_type = 'folder';
|
||||
if (!folders){
|
||||
return "{}";
|
||||
}
|
||||
}
|
||||
if (item instanceof FootageItem){
|
||||
if (!footages){
|
||||
return "{}";
|
||||
}
|
||||
item_type = 'footage';
|
||||
if (item.file){
|
||||
path = item.file.fsName;
|
||||
}
|
||||
if (item.usedIn){
|
||||
for (j = 0; j < item.usedIn.length; ++j){
|
||||
containing_comps.push(item.usedIn[j].id);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (item instanceof CompItem){
|
||||
item_type = 'comp';
|
||||
if (!comps){
|
||||
return "{}";
|
||||
}
|
||||
}
|
||||
|
||||
var item = {"name": item.name,
|
||||
"id": item.id,
|
||||
"type": item_type,
|
||||
"path": path,
|
||||
"containing_comps": containing_comps};
|
||||
return JSON.stringify(item);
|
||||
}
|
||||
|
||||
function importFile(path, item_name, import_options){
|
||||
/**
|
||||
* Imports file (image tested for now) as a FootageItem.
|
||||
* Creates new composition
|
||||
*
|
||||
* Args:
|
||||
* path (string): absolute path to image file
|
||||
* item_name (string): label for composition
|
||||
* Returns:
|
||||
* JSON {name, id}
|
||||
*/
|
||||
var comp;
|
||||
var ret = {};
|
||||
try{
|
||||
import_options = JSON.parse(import_options);
|
||||
} catch (e){
|
||||
return _prepareError("Couldn't parse import options " + import_options);
|
||||
}
|
||||
|
||||
app.beginUndoGroup("Import File");
|
||||
fp = new File(path);
|
||||
if (fp.exists){
|
||||
try {
|
||||
im_opt = new ImportOptions(fp);
|
||||
importAsType = import_options["ImportAsType"];
|
||||
|
||||
if ('ImportAsType' in import_options){ // refactor
|
||||
if (importAsType.indexOf('COMP') > 0){
|
||||
im_opt.importAs = ImportAsType.COMP;
|
||||
}
|
||||
if (importAsType.indexOf('FOOTAGE') > 0){
|
||||
im_opt.importAs = ImportAsType.FOOTAGE;
|
||||
}
|
||||
if (importAsType.indexOf('COMP_CROPPED_LAYERS') > 0){
|
||||
im_opt.importAs = ImportAsType.COMP_CROPPED_LAYERS;
|
||||
}
|
||||
if (importAsType.indexOf('PROJECT') > 0){
|
||||
im_opt.importAs = ImportAsType.PROJECT;
|
||||
}
|
||||
|
||||
}
|
||||
if ('sequence' in import_options){
|
||||
im_opt.sequence = true;
|
||||
}
|
||||
|
||||
comp = app.project.importFile(im_opt);
|
||||
|
||||
if (app.project.selection.length == 2 &&
|
||||
app.project.selection[0] instanceof FolderItem){
|
||||
comp.parentFolder = app.project.selection[0]
|
||||
}
|
||||
} catch (error) {
|
||||
return _prepareError(error.toString() + importOptions.file.fsName);
|
||||
} finally {
|
||||
fp.close();
|
||||
}
|
||||
}else{
|
||||
return _prepareError("File " + path + " not found.");
|
||||
}
|
||||
if (comp){
|
||||
comp.name = item_name;
|
||||
comp.label = 9; // Green
|
||||
ret = {"name": comp.name, "id": comp.id}
|
||||
}
|
||||
app.endUndoGroup();
|
||||
|
||||
return JSON.stringify(ret);
|
||||
}
|
||||
|
||||
function setLabelColor(comp_id, color_idx){
|
||||
/**
|
||||
* Set item_id label to 'color_idx' color
|
||||
* Args:
|
||||
* item_id (int): item id
|
||||
* color_idx (int): 0-16 index from Label
|
||||
*/
|
||||
var item = app.project.itemByID(comp_id);
|
||||
if (item){
|
||||
item.label = color_idx;
|
||||
}else{
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
}
|
||||
|
||||
function replaceItem(item_id, path, item_name){
|
||||
/**
|
||||
* Replaces loaded file with new file and updates name
|
||||
*
|
||||
* Args:
|
||||
* item_id (int): id of composition, not a index!
|
||||
* path (string): absolute path to new file
|
||||
* item_name (string): new composition name
|
||||
*/
|
||||
app.beginUndoGroup("Replace File");
|
||||
|
||||
fp = new File(path);
|
||||
if (!fp.exists){
|
||||
return _prepareError("File " + path + " not found.");
|
||||
}
|
||||
var item = app.project.itemByID(item_id);
|
||||
if (item){
|
||||
try{
|
||||
if (isFileSequence(item)) {
|
||||
item.replaceWithSequence(fp, false);
|
||||
}else{
|
||||
item.replace(fp);
|
||||
}
|
||||
|
||||
item.name = item_name;
|
||||
} catch (error) {
|
||||
return _prepareError(error.toString() + path);
|
||||
} finally {
|
||||
fp.close();
|
||||
}
|
||||
}else{
|
||||
return _prepareError("There is no item with "+ item_id);
|
||||
}
|
||||
app.endUndoGroup();
|
||||
}
|
||||
|
||||
function renameItem(item_id, new_name){
|
||||
/**
|
||||
* Renames item with 'item_id' to 'new_name'
|
||||
*
|
||||
* Args:
|
||||
* item_id (int): id to search item
|
||||
* new_name (str)
|
||||
*/
|
||||
var item = app.project.itemByID(item_id);
|
||||
if (item){
|
||||
item.name = new_name;
|
||||
}else{
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
}
|
||||
|
||||
function deleteItem(item_id){
|
||||
/**
|
||||
* Delete any 'item_id'
|
||||
*
|
||||
* Not restricted only to comp, it could delete
|
||||
* any item with 'id'
|
||||
*/
|
||||
var item = app.project.itemByID(item_id);
|
||||
if (item){
|
||||
item.remove();
|
||||
}else{
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
}
|
||||
|
||||
function getCompProperties(comp_id){
|
||||
/**
|
||||
* Returns information about composition - are that will be
|
||||
* rendered.
|
||||
*
|
||||
* Returns
|
||||
* (dict)
|
||||
*/
|
||||
var comp = app.project.itemByID(comp_id);
|
||||
if (!comp){
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
|
||||
return JSON.stringify({
|
||||
"id": comp.id,
|
||||
"name": comp.name,
|
||||
"frameStart": comp.displayStartFrame,
|
||||
"framesDuration": comp.duration * comp.frameRate,
|
||||
"frameRate": comp.frameRate,
|
||||
"width": comp.width,
|
||||
"height": comp.height});
|
||||
}
|
||||
|
||||
function setCompProperties(comp_id, frameStart, framesCount, frameRate,
|
||||
width, height){
|
||||
/**
|
||||
* Sets work area info from outside (from Ftrack via OpenPype)
|
||||
*/
|
||||
var comp = app.project.itemByID(comp_id);
|
||||
if (!comp){
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
|
||||
app.beginUndoGroup('change comp properties');
|
||||
if (frameStart && framesCount && frameRate){
|
||||
comp.displayStartFrame = frameStart;
|
||||
comp.duration = framesCount / frameRate;
|
||||
comp.frameRate = frameRate;
|
||||
}
|
||||
if (width && height){
|
||||
var widthOld = comp.width;
|
||||
var widthNew = width;
|
||||
var widthDelta = widthNew - widthOld;
|
||||
|
||||
var heightOld = comp.height;
|
||||
var heightNew = height;
|
||||
var heightDelta = heightNew - heightOld;
|
||||
|
||||
var offset = [widthDelta / 2, heightDelta / 2];
|
||||
|
||||
comp.width = widthNew;
|
||||
comp.height = heightNew;
|
||||
|
||||
for (var i = 1, il = comp.numLayers; i <= il; i++) {
|
||||
var layer = comp.layer(i);
|
||||
var positionProperty = layer.property('ADBE Transform Group').property('ADBE Position');
|
||||
|
||||
if (positionProperty.numKeys > 0) {
|
||||
for (var j = 1, jl = positionProperty.numKeys; j <= jl; j++) {
|
||||
var keyValue = positionProperty.keyValue(j);
|
||||
positionProperty.setValueAtKey(j, keyValue + offset);
|
||||
}
|
||||
} else {
|
||||
var positionValue = positionProperty.value;
|
||||
positionProperty.setValue(positionValue + offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
app.endUndoGroup();
|
||||
}
|
||||
|
||||
function save(){
|
||||
/**
|
||||
* Saves current project
|
||||
*/
|
||||
app.project.save(); //TODO path is wrong, File instead
|
||||
}
|
||||
|
||||
function saveAs(path){
|
||||
/**
|
||||
* Saves current project as 'path'
|
||||
* */
|
||||
app.project.save(fp = new File(path));
|
||||
}
|
||||
|
||||
function getRenderInfo(comp_id){
|
||||
/***
|
||||
Get info from render queue.
|
||||
Currently pulls only file name to parse extension and
|
||||
if it is sequence in Python
|
||||
Args:
|
||||
comp_id (int): id of composition
|
||||
Return:
|
||||
(list) [{file_name:"xx.png", width:00, height:00}]
|
||||
**/
|
||||
var item = app.project.itemByID(comp_id);
|
||||
if (!item){
|
||||
return _prepareError("Composition with '" + comp_id + "' wasn't found! Recreate publishable instance(s)")
|
||||
}
|
||||
|
||||
var comp_name = item.name;
|
||||
var output_metadata = []
|
||||
try{
|
||||
// render_item.duplicate() should create new item on renderQueue
|
||||
// BUT it works only sometimes, there are some weird synchronization issue
|
||||
// this method will be called always before render, so prepare items here
|
||||
// for render to spare the hassle
|
||||
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
|
||||
var render_item = app.project.renderQueue.item(i);
|
||||
if (render_item.comp.id != comp_id){
|
||||
continue;
|
||||
}
|
||||
|
||||
if (render_item.status == RQItemStatus.DONE){
|
||||
render_item.duplicate(); // create new, cannot change status if DONE
|
||||
render_item.remove(); // remove existing to limit duplications
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// properly validate as `numItems` won't change magically
|
||||
var comp_id_count = 0;
|
||||
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
|
||||
var render_item = app.project.renderQueue.item(i);
|
||||
if (render_item.comp.id != comp_id){
|
||||
continue;
|
||||
}
|
||||
comp_id_count += 1;
|
||||
var item = render_item.outputModule(1);
|
||||
|
||||
for (j = 1; j<= render_item.numOutputModules; ++j){
|
||||
var file_url = item.file.toString();
|
||||
output_metadata.push(
|
||||
JSON.stringify({
|
||||
"file_name": file_url,
|
||||
"width": render_item.comp.width,
|
||||
"height": render_item.comp.height
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return _prepareError("There is no render queue, create one");
|
||||
}
|
||||
|
||||
if (comp_id_count > 1){
|
||||
return _prepareError("There cannot be more items in Render Queue for '" + comp_name + "'!")
|
||||
}
|
||||
|
||||
if (comp_id_count == 0){
|
||||
return _prepareError("There is no item in Render Queue for '" + comp_name + "'! Add composition to Render Queue.")
|
||||
}
|
||||
|
||||
return '[' + output_metadata.join() + ']';
|
||||
}
|
||||
|
||||
function getAudioUrlForComp(comp_id){
|
||||
/**
|
||||
* Searches composition for audio layer
|
||||
*
|
||||
* Only single AVLayer is expected!
|
||||
* Used for collecting Audio
|
||||
*
|
||||
* Args:
|
||||
* comp_id (int): id of composition
|
||||
* Return:
|
||||
* (str) with url to audio content
|
||||
*/
|
||||
var item = app.project.itemByID(comp_id);
|
||||
if (item){
|
||||
for (i = 1; i <= item.numLayers; ++i){
|
||||
var layer = item.layers[i];
|
||||
if (layer instanceof AVLayer){
|
||||
if (layer.hasAudio){
|
||||
source_url = layer.source.file.fsName.toString()
|
||||
return _prepareSingleValue(source_url);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}else{
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function addItemAsLayerToComp(comp_id, item_id, found_comp){
|
||||
/**
|
||||
* Adds already imported FootageItem ('item_id') as a new
|
||||
* layer to composition ('comp_id').
|
||||
*
|
||||
* Args:
|
||||
* comp_id (int): id of target composition
|
||||
* item_id (int): FootageItem.id
|
||||
* found_comp (CompItem, optional): to limit quering if
|
||||
* comp already found previously
|
||||
*/
|
||||
var comp = found_comp || app.project.itemByID(comp_id);
|
||||
if (comp){
|
||||
item = app.project.itemByID(item_id);
|
||||
if (item){
|
||||
comp.layers.add(item);
|
||||
}else{
|
||||
return _prepareError("There is no item with " + item_id);
|
||||
}
|
||||
}else{
|
||||
return _prepareError("There is no composition with "+ comp_id);
|
||||
}
|
||||
}
|
||||
|
||||
function importBackground(comp_id, composition_name, files_to_import){
|
||||
/**
|
||||
* Imports backgrounds images to existing or new composition.
|
||||
*
|
||||
* If comp_id is not provided, new composition is created, basic
|
||||
* values (width, heights, frameRatio) takes from first imported
|
||||
* image.
|
||||
*
|
||||
* Args:
|
||||
* comp_id (int): id of existing composition (null if new)
|
||||
* composition_name (str): used when new composition
|
||||
* files_to_import (list): list of absolute paths to import and
|
||||
* add as layers
|
||||
*
|
||||
* Returns:
|
||||
* (str): json representation (id, name, members)
|
||||
*/
|
||||
var comp;
|
||||
var folder;
|
||||
var imported_ids = [];
|
||||
if (comp_id){
|
||||
comp = app.project.itemByID(comp_id);
|
||||
folder = comp.parentFolder;
|
||||
}else{
|
||||
if (app.project.selection.length > 1){
|
||||
return _prepareError(
|
||||
"Too many items selected, select only target composition!");
|
||||
}else{
|
||||
selected_item = app.project.activeItem;
|
||||
if (selected_item instanceof Folder){
|
||||
comp = selected_item;
|
||||
folder = selected_item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (files_to_import){
|
||||
for (i = 0; i < files_to_import.length; ++i){
|
||||
item = _importItem(files_to_import[i]);
|
||||
if (!item){
|
||||
return _prepareError(
|
||||
"No item for " + item_json["id"] +
|
||||
". Import background failed.")
|
||||
}
|
||||
if (!comp){
|
||||
folder = app.project.items.addFolder(composition_name);
|
||||
imported_ids.push(folder.id);
|
||||
comp = app.project.items.addComp(composition_name, item.width,
|
||||
item.height, item.pixelAspect,
|
||||
1, 26.7); // hardcode defaults
|
||||
imported_ids.push(comp.id);
|
||||
comp.parentFolder = folder;
|
||||
}
|
||||
imported_ids.push(item.id)
|
||||
item.parentFolder = folder;
|
||||
|
||||
addItemAsLayerToComp(comp.id, item.id, comp);
|
||||
}
|
||||
}
|
||||
var item = {"name": comp.name,
|
||||
"id": folder.id,
|
||||
"members": imported_ids};
|
||||
return JSON.stringify(item);
|
||||
}
|
||||
|
||||
function reloadBackground(comp_id, composition_name, files_to_import){
|
||||
/**
|
||||
* Reloads existing composition.
|
||||
*
|
||||
* It deletes complete composition with encompassing folder, recreates
|
||||
* from scratch via 'importBackground' functionality.
|
||||
*
|
||||
* Args:
|
||||
* comp_id (int): id of existing composition (null if new)
|
||||
* composition_name (str): used when new composition
|
||||
* files_to_import (list): list of absolute paths to import and
|
||||
* add as layers
|
||||
*
|
||||
* Returns:
|
||||
* (str): json representation (id, name, members)
|
||||
*
|
||||
*/
|
||||
var imported_ids = []; // keep track of members of composition
|
||||
comp = app.project.itemByID(comp_id);
|
||||
folder = comp.parentFolder;
|
||||
if (folder){
|
||||
renameItem(folder.id, composition_name);
|
||||
imported_ids.push(folder.id);
|
||||
}
|
||||
if (comp){
|
||||
renameItem(comp.id, composition_name);
|
||||
imported_ids.push(comp.id);
|
||||
}
|
||||
|
||||
var existing_layer_names = [];
|
||||
var existing_layer_ids = []; // because ExtendedScript doesnt have keys()
|
||||
for (i = 1; i <= folder.items.length; ++i){
|
||||
layer = folder.items[i];
|
||||
//because comp.layers[i] doesnt have 'id' accessible
|
||||
if (layer instanceof CompItem){
|
||||
continue;
|
||||
}
|
||||
existing_layer_names.push(layer.name);
|
||||
existing_layer_ids.push(layer.id);
|
||||
}
|
||||
|
||||
var new_filenames = [];
|
||||
if (files_to_import){
|
||||
for (i = 0; i < files_to_import.length; ++i){
|
||||
file_name = _get_file_name(files_to_import[i]);
|
||||
new_filenames.push(file_name);
|
||||
|
||||
idx = existing_layer_names.indexOf(file_name);
|
||||
if (idx >= 0){ // update
|
||||
var layer_id = existing_layer_ids[idx];
|
||||
replaceItem(layer_id, files_to_import[i], file_name);
|
||||
imported_ids.push(layer_id);
|
||||
}else{ // new layer
|
||||
item = _importItem(files_to_import[i]);
|
||||
if (!item){
|
||||
return _prepareError(
|
||||
"No item for " + files_to_import[i] +
|
||||
". Reload background failed.");
|
||||
}
|
||||
imported_ids.push(item.id);
|
||||
item.parentFolder = folder;
|
||||
addItemAsLayerToComp(comp.id, item.id, comp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_delete_obsolete_items(folder, new_filenames);
|
||||
|
||||
var item = {"name": comp.name,
|
||||
"id": folder.id,
|
||||
"members": imported_ids};
|
||||
|
||||
return JSON.stringify(item);
|
||||
}
|
||||
|
||||
function _get_file_name(file_url){
|
||||
/**
|
||||
* Returns file name without extension from 'file_url'
|
||||
*
|
||||
* Args:
|
||||
* file_url (str): full absolute url
|
||||
* Returns:
|
||||
* (str)
|
||||
*/
|
||||
fp = new File(file_url);
|
||||
file_name = fp.name.substring(0, fp.name.lastIndexOf("."));
|
||||
return file_name;
|
||||
}
|
||||
|
||||
function _delete_obsolete_items(folder, new_filenames){
|
||||
/***
|
||||
* Goes through 'folder' and removes layers not in new
|
||||
* background
|
||||
*
|
||||
* Args:
|
||||
* folder (FolderItem)
|
||||
* new_filenames (array): list of layer names in new bg
|
||||
*/
|
||||
// remove items in old, but not in new
|
||||
delete_ids = []
|
||||
for (i = 1; i <= folder.items.length; ++i){
|
||||
layer = folder.items[i];
|
||||
//because comp.layers[i] doesnt have 'id' accessible
|
||||
if (layer instanceof CompItem){
|
||||
continue;
|
||||
}
|
||||
if (new_filenames.indexOf(layer.name) < 0){
|
||||
delete_ids.push(layer.id);
|
||||
}
|
||||
}
|
||||
for (i = 0; i < delete_ids.length; ++i){
|
||||
deleteItem(delete_ids[i]);
|
||||
}
|
||||
}
|
||||
|
||||
function _importItem(file_url){
|
||||
/**
|
||||
* Imports 'file_url' as new FootageItem
|
||||
*
|
||||
* Args:
|
||||
* file_url (str): file url with content
|
||||
* Returns:
|
||||
* (FootageItem)
|
||||
*/
|
||||
file_name = _get_file_name(file_url);
|
||||
|
||||
//importFile prepared previously to return json
|
||||
item_json = importFile(file_url, file_name, JSON.stringify({"ImportAsType":"FOOTAGE"}));
|
||||
item_json = JSON.parse(item_json);
|
||||
item = app.project.itemByID(item_json["id"]);
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
function isFileSequence (item){
|
||||
/**
|
||||
* Check that item is a recognizable sequence
|
||||
*/
|
||||
if (item instanceof FootageItem && item.mainSource instanceof FileSource && !(item.mainSource.isStill) && item.hasVideo){
|
||||
var extname = item.mainSource.file.fsName.split('.').pop();
|
||||
|
||||
return extname.match(new RegExp("(ai|bmp|bw|cin|cr2|crw|dcr|dng|dib|dpx|eps|erf|exr|gif|hdr|ico|icb|iff|jpe|jpeg|jpg|mos|mrw|nef|orf|pbm|pef|pct|pcx|pdf|pic|pict|png|ps|psd|pxr|raf|raw|rgb|rgbe|rla|rle|rpf|sgi|srf|tdi|tga|tif|tiff|vda|vst|x3f|xyze)", "i")) !== null;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function render(target_folder, comp_id){
|
||||
var out_dir = new Folder(target_folder);
|
||||
var out_dir = out_dir.fsName;
|
||||
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
|
||||
var render_item = app.project.renderQueue.item(i);
|
||||
var composition = render_item.comp;
|
||||
if (composition.id == comp_id){
|
||||
if (render_item.status == RQItemStatus.DONE){
|
||||
var new_item = render_item.duplicate();
|
||||
render_item.remove();
|
||||
render_item = new_item;
|
||||
}
|
||||
|
||||
render_item.render = true;
|
||||
|
||||
var om1 = app.project.renderQueue.item(i).outputModule(1);
|
||||
var file_name = File.decode( om1.file.name ).replace('℗', ''); // Name contains special character, space?
|
||||
|
||||
var omItem1_settable_str = app.project.renderQueue.item(i).outputModule(1).getSettings( GetSettingsFormat.STRING_SETTABLE );
|
||||
|
||||
var targetFolder = new Folder(target_folder);
|
||||
if (!targetFolder.exists) {
|
||||
targetFolder.create();
|
||||
}
|
||||
|
||||
om1.file = new File(targetFolder.fsName + '/' + file_name);
|
||||
}else{
|
||||
if (render_item.status != RQItemStatus.DONE){
|
||||
render_item.render = false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
app.beginSuppressDialogs();
|
||||
app.project.renderQueue.render();
|
||||
app.endSuppressDialogs(false);
|
||||
}
|
||||
|
||||
function close(){
|
||||
app.project.close(CloseOptions.DO_NOT_SAVE_CHANGES);
|
||||
app.quit();
|
||||
}
|
||||
|
||||
function getAppVersion(){
|
||||
return _prepareSingleValue(app.version);
|
||||
}
|
||||
|
||||
function printMsg(msg){
|
||||
alert(msg);
|
||||
}
|
||||
|
||||
function addPlaceholder(name, width, height, fps, duration){
|
||||
/** Add AE PlaceholderItem to Project list.
|
||||
*
|
||||
* PlaceholderItem chosen as it doesn't require existing file and
|
||||
* might potentially allow nice functionality in the future.
|
||||
*
|
||||
*/
|
||||
app.beginUndoGroup('change comp properties');
|
||||
try{
|
||||
item = app.project.importPlaceholder(name, width, height,
|
||||
fps, duration);
|
||||
|
||||
return _prepareSingleValue(item.id);
|
||||
}catch (error) {
|
||||
writeLn(_prepareError("Cannot add placeholder " + error.toString()));
|
||||
}
|
||||
app.endUndoGroup();
|
||||
}
|
||||
|
||||
function addItemInstead(placeholder_item_id, item_id){
|
||||
/** Add new loaded item in place of load placeholder.
|
||||
*
|
||||
* Each placeholder could be placed multiple times into multiple
|
||||
* composition. This loops through all compositions and
|
||||
* places loaded item under placeholder.
|
||||
* Placeholder item gets deleted later separately according
|
||||
* to configuration in Settings.
|
||||
*
|
||||
* Args:
|
||||
* placeholder_item_id (int)
|
||||
* item_id (int)
|
||||
*/
|
||||
var item = app.project.itemByID(item_id);
|
||||
if (!item){
|
||||
return _prepareError("There is no item with "+ item_id);
|
||||
}
|
||||
|
||||
app.beginUndoGroup('Add loaded items');
|
||||
for (i = 1; i <= app.project.items.length; ++i){
|
||||
var comp = app.project.items[i];
|
||||
if (!(comp instanceof CompItem)){
|
||||
continue
|
||||
}
|
||||
|
||||
var i = 1;
|
||||
while (i <= comp.numLayers) {
|
||||
var layer = comp.layer(i);
|
||||
var layer_source = layer.source;
|
||||
if (layer_source && layer_source.id == placeholder_item_id){
|
||||
var new_layer = comp.layers.add(item);
|
||||
new_layer.moveAfter(layer);
|
||||
// copy all(?) properties to new layer
|
||||
layer.property("ADBE Transform Group").copyToComp(new_layer);
|
||||
i = i + 1;
|
||||
}
|
||||
i = i + 1;
|
||||
}
|
||||
}
|
||||
app.endUndoGroup();
|
||||
}
|
||||
|
||||
function _prepareSingleValue(value){
|
||||
return JSON.stringify({"result": value})
|
||||
}
|
||||
function _prepareError(error_msg){
|
||||
return JSON.stringify({"error": error_msg})
|
||||
}
|
||||
389
client/ayon_core/hosts/aftereffects/api/launch_logic.py
Normal file
|
|
@ -0,0 +1,389 @@
|
|||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import collections
|
||||
import logging
|
||||
import asyncio
|
||||
import functools
|
||||
import traceback
|
||||
|
||||
|
||||
from wsrpc_aiohttp import (
|
||||
WebSocketRoute,
|
||||
WebSocketAsync
|
||||
)
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.tests.lib import is_in_tests
|
||||
from ayon_core.pipeline import install_host, legacy_io
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.tools.utils import host_tools, get_ayon_qt_app
|
||||
from ayon_core.tools.adobe_webserver.app import WebServerTool
|
||||
|
||||
from .ws_stub import get_stub
|
||||
from .lib import set_settings
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
def safe_excepthook(*args):
|
||||
traceback.print_exception(*args)
|
||||
|
||||
|
||||
def main(*subprocess_args):
|
||||
"""Main entrypoint to AE launching, called from pre hook."""
|
||||
sys.excepthook = safe_excepthook
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import AfterEffectsHost
|
||||
|
||||
host = AfterEffectsHost()
|
||||
install_host(host)
|
||||
|
||||
os.environ["AYON_LOG_NO_COLORS"] = "0"
|
||||
app = get_ayon_qt_app()
|
||||
app.setQuitOnLastWindowClosed(False)
|
||||
|
||||
launcher = ProcessLauncher(subprocess_args)
|
||||
launcher.start()
|
||||
|
||||
if os.environ.get("HEADLESS_PUBLISH"):
|
||||
manager = AddonsManager()
|
||||
webpublisher_addon = manager["webpublisher"]
|
||||
|
||||
launcher.execute_in_main_thread(
|
||||
functools.partial(
|
||||
webpublisher_addon.headless_publish,
|
||||
log,
|
||||
"CloseAE",
|
||||
is_in_tests()
|
||||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
save = True
|
||||
|
||||
launcher.execute_in_main_thread(
|
||||
lambda: host_tools.show_tool_by_name("workfiles", save=save)
|
||||
)
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
|
||||
def show_tool_by_name(tool_name):
|
||||
kwargs = {}
|
||||
if tool_name == "loader":
|
||||
kwargs["use_context"] = True
|
||||
|
||||
host_tools.show_tool_by_name(tool_name, **kwargs)
|
||||
|
||||
|
||||
class ProcessLauncher(QtCore.QObject):
|
||||
"""Launches webserver, connects to it, runs main thread."""
|
||||
route_name = "AfterEffects"
|
||||
_main_thread_callbacks = collections.deque()
|
||||
|
||||
def __init__(self, subprocess_args):
|
||||
self._subprocess_args = subprocess_args
|
||||
self._log = None
|
||||
|
||||
super(ProcessLauncher, self).__init__()
|
||||
|
||||
# Keep track if launcher was alreadu started
|
||||
self._started = False
|
||||
|
||||
self._process = None
|
||||
self._websocket_server = None
|
||||
|
||||
start_process_timer = QtCore.QTimer()
|
||||
start_process_timer.setInterval(100)
|
||||
|
||||
loop_timer = QtCore.QTimer()
|
||||
loop_timer.setInterval(200)
|
||||
|
||||
start_process_timer.timeout.connect(self._on_start_process_timer)
|
||||
loop_timer.timeout.connect(self._on_loop_timer)
|
||||
|
||||
self._start_process_timer = start_process_timer
|
||||
self._loop_timer = loop_timer
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger("{}-launcher".format(
|
||||
self.route_name))
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def websocket_server_is_running(self):
|
||||
if self._websocket_server is not None:
|
||||
return self._websocket_server.is_running
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_process_running(self):
|
||||
if self._process is not None:
|
||||
return self._process.poll() is None
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_host_connected(self):
|
||||
"""Returns True if connected, False if app is not running at all."""
|
||||
if not self.is_process_running:
|
||||
return False
|
||||
|
||||
try:
|
||||
|
||||
_stub = get_stub()
|
||||
if _stub:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def execute_in_main_thread(cls, callback):
|
||||
cls._main_thread_callbacks.append(callback)
|
||||
|
||||
def start(self):
|
||||
if self._started:
|
||||
return
|
||||
self.log.info("Started launch logic of AfterEffects")
|
||||
self._started = True
|
||||
self._start_process_timer.start()
|
||||
|
||||
def exit(self):
|
||||
""" Exit whole application. """
|
||||
if self._start_process_timer.isActive():
|
||||
self._start_process_timer.stop()
|
||||
if self._loop_timer.isActive():
|
||||
self._loop_timer.stop()
|
||||
|
||||
if self._websocket_server is not None:
|
||||
self._websocket_server.stop()
|
||||
|
||||
if self._process:
|
||||
self._process.kill()
|
||||
self._process.wait()
|
||||
|
||||
QtCore.QCoreApplication.exit()
|
||||
|
||||
def _on_loop_timer(self):
|
||||
# TODO find better way and catch errors
|
||||
# Run only callbacks that are in queue at the moment
|
||||
cls = self.__class__
|
||||
for _ in range(len(cls._main_thread_callbacks)):
|
||||
if cls._main_thread_callbacks:
|
||||
callback = cls._main_thread_callbacks.popleft()
|
||||
callback()
|
||||
|
||||
if not self.is_process_running:
|
||||
self.log.info("Host process is not running. Closing")
|
||||
self.exit()
|
||||
|
||||
elif not self.websocket_server_is_running:
|
||||
self.log.info("Websocket server is not running. Closing")
|
||||
self.exit()
|
||||
|
||||
def _on_start_process_timer(self):
|
||||
# TODO add try except validations for each part in this method
|
||||
# Start server as first thing
|
||||
if self._websocket_server is None:
|
||||
self._init_server()
|
||||
return
|
||||
|
||||
# TODO add waiting time
|
||||
# Wait for webserver
|
||||
if not self.websocket_server_is_running:
|
||||
return
|
||||
|
||||
# Start application process
|
||||
if self._process is None:
|
||||
self._start_process()
|
||||
self.log.info("Waiting for host to connect")
|
||||
return
|
||||
|
||||
# TODO add waiting time
|
||||
# Wait until host is connected
|
||||
if self.is_host_connected:
|
||||
self._start_process_timer.stop()
|
||||
self._loop_timer.start()
|
||||
elif (
|
||||
not self.is_process_running
|
||||
or not self.websocket_server_is_running
|
||||
):
|
||||
self.exit()
|
||||
|
||||
def _init_server(self):
|
||||
if self._websocket_server is not None:
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"Initialization of websocket server for host communication"
|
||||
)
|
||||
|
||||
self._websocket_server = websocket_server = WebServerTool()
|
||||
if websocket_server.port_occupied(
|
||||
websocket_server.host_name,
|
||||
websocket_server.port
|
||||
):
|
||||
self.log.info(
|
||||
"Server already running, sending actual context and exit."
|
||||
)
|
||||
asyncio.run(websocket_server.send_context_change(self.route_name))
|
||||
self.exit()
|
||||
return
|
||||
|
||||
# Add Websocket route
|
||||
websocket_server.add_route("*", "/ws/", WebSocketAsync)
|
||||
# Add after effects route to websocket handler
|
||||
|
||||
print("Adding {} route".format(self.route_name))
|
||||
WebSocketAsync.add_route(
|
||||
self.route_name, AfterEffectsRoute
|
||||
)
|
||||
self.log.info("Starting websocket server for host communication")
|
||||
websocket_server.start_server()
|
||||
|
||||
def _start_process(self):
|
||||
if self._process is not None:
|
||||
return
|
||||
self.log.info("Starting host process")
|
||||
try:
|
||||
self._process = subprocess.Popen(
|
||||
self._subprocess_args,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception:
|
||||
self.log.info("exce", exc_info=True)
|
||||
self.exit()
|
||||
|
||||
|
||||
class AfterEffectsRoute(WebSocketRoute):
|
||||
"""
|
||||
One route, mimicking external application (like Harmony, etc).
|
||||
All functions could be called from client.
|
||||
'do_notify' function calls function on the client - mimicking
|
||||
notification after long running job on the server or similar
|
||||
"""
|
||||
instance = None
|
||||
|
||||
def init(self, **kwargs):
|
||||
# Python __init__ must be return "self".
|
||||
# This method might return anything.
|
||||
log.debug("someone called AfterEffects route")
|
||||
self.instance = self
|
||||
return kwargs
|
||||
|
||||
# server functions
|
||||
async def ping(self):
|
||||
log.debug("someone called AfterEffects route ping")
|
||||
|
||||
# This method calls function on the client side
|
||||
# client functions
|
||||
async def set_context(self, project, asset, task):
|
||||
"""
|
||||
Sets 'project' and 'asset' to envs, eg. setting context
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
"""
|
||||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
if project:
|
||||
legacy_io.Session["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
if asset:
|
||||
legacy_io.Session["AVALON_ASSET"] = asset
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
if task:
|
||||
legacy_io.Session["AVALON_TASK"] = task
|
||||
os.environ["AVALON_TASK"] = task
|
||||
|
||||
async def read(self):
|
||||
log.debug("aftereffects.read client calls server server calls "
|
||||
"aftereffects client")
|
||||
return await self.socket.call('aftereffects.read')
|
||||
|
||||
# panel routes for tools
|
||||
async def workfiles_route(self):
|
||||
self._tool_route("workfiles")
|
||||
|
||||
async def loader_route(self):
|
||||
self._tool_route("loader")
|
||||
|
||||
async def publish_route(self):
|
||||
self._tool_route("publisher")
|
||||
|
||||
async def sceneinventory_route(self):
|
||||
self._tool_route("sceneinventory")
|
||||
|
||||
async def setresolution_route(self):
|
||||
self._settings_route(False, True)
|
||||
|
||||
async def setframes_route(self):
|
||||
self._settings_route(True, False)
|
||||
|
||||
async def setall_route(self):
|
||||
self._settings_route(True, True)
|
||||
|
||||
async def experimental_tools_route(self):
|
||||
self._tool_route("experimental_tools")
|
||||
|
||||
def _tool_route(self, _tool_name):
|
||||
"""The address accessed when clicking on the buttons."""
|
||||
|
||||
partial_method = functools.partial(show_tool_by_name,
|
||||
_tool_name)
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def _settings_route(self, frames, resolution):
|
||||
partial_method = functools.partial(set_settings,
|
||||
frames,
|
||||
resolution)
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def create_placeholder_route(self):
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import \
|
||||
create_placeholder
|
||||
partial_method = functools.partial(create_placeholder)
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def update_placeholder_route(self):
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import \
|
||||
update_placeholder
|
||||
partial_method = functools.partial(update_placeholder)
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def build_workfile_template_route(self):
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import \
|
||||
build_workfile_template
|
||||
partial_method = functools.partial(build_workfile_template)
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
160
client/ayon_core/hosts/aftereffects/api/lib.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import contextlib
|
||||
import logging
|
||||
|
||||
from ayon_core.pipeline.context_tools import get_current_context
|
||||
from ayon_core.client import get_asset_by_name
|
||||
from .ws_stub import get_stub
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
"""Maintain selection during context."""
|
||||
selection = get_stub().get_selected_items(True, False, False)
|
||||
try:
|
||||
yield selection
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def get_extension_manifest_path():
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
"extension",
|
||||
"CSXS",
|
||||
"manifest.xml"
|
||||
)
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, name):
|
||||
"""
|
||||
Gets all layer names and if 'name' is present in them, increases
|
||||
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||
Args:
|
||||
layers (list): of strings, names only
|
||||
name (string): checked value
|
||||
|
||||
Returns:
|
||||
(string): name_00X (without version)
|
||||
"""
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
||||
|
||||
def get_background_layers(file_url):
|
||||
"""
|
||||
Pulls file name from background json file, enrich with folder url for
|
||||
AE to be able import files.
|
||||
|
||||
Order is important, follows order in json.
|
||||
|
||||
Args:
|
||||
file_url (str): abs url of background json
|
||||
|
||||
Returns:
|
||||
(list): of abs paths to images
|
||||
"""
|
||||
with open(file_url) as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
layers = list()
|
||||
bg_folder = os.path.dirname(file_url)
|
||||
for child in data['children']:
|
||||
if child.get("filename"):
|
||||
layers.append(os.path.join(bg_folder, child.get("filename")).
|
||||
replace("\\", "/"))
|
||||
else:
|
||||
for layer in child['children']:
|
||||
if layer.get("filename"):
|
||||
layers.append(os.path.join(bg_folder,
|
||||
layer.get("filename")).
|
||||
replace("\\", "/"))
|
||||
return layers
|
||||
|
||||
|
||||
def get_asset_settings(asset_doc):
|
||||
"""Get settings on current asset from database.
|
||||
|
||||
Returns:
|
||||
dict: Scene data.
|
||||
|
||||
"""
|
||||
asset_data = asset_doc["data"]
|
||||
fps = asset_data.get("fps", 0)
|
||||
frame_start = asset_data.get("frameStart", 0)
|
||||
frame_end = asset_data.get("frameEnd", 0)
|
||||
handle_start = asset_data.get("handleStart", 0)
|
||||
handle_end = asset_data.get("handleEnd", 0)
|
||||
resolution_width = asset_data.get("resolutionWidth", 0)
|
||||
resolution_height = asset_data.get("resolutionHeight", 0)
|
||||
duration = (frame_end - frame_start + 1) + handle_start + handle_end
|
||||
|
||||
return {
|
||||
"fps": fps,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"duration": duration
|
||||
}
|
||||
|
||||
|
||||
def set_settings(frames, resolution, comp_ids=None, print_msg=True):
|
||||
"""Sets number of frames and resolution to selected comps.
|
||||
|
||||
Args:
|
||||
frames (bool): True if set frame info
|
||||
resolution (bool): True if set resolution
|
||||
comp_ids (list): specific composition ids, if empty
|
||||
it tries to look for currently selected
|
||||
print_msg (bool): True throw JS alert with msg
|
||||
"""
|
||||
frame_start = frames_duration = fps = width = height = None
|
||||
current_context = get_current_context()
|
||||
|
||||
asset_doc = get_asset_by_name(current_context["project_name"],
|
||||
current_context["asset_name"])
|
||||
settings = get_asset_settings(asset_doc)
|
||||
|
||||
msg = ''
|
||||
if frames:
|
||||
frame_start = settings["frameStart"] - settings["handleStart"]
|
||||
frames_duration = settings["duration"]
|
||||
fps = settings["fps"]
|
||||
msg += f"frame start:{frame_start}, duration:{frames_duration}, "\
|
||||
f"fps:{fps}"
|
||||
if resolution:
|
||||
width = settings["resolutionWidth"]
|
||||
height = settings["resolutionHeight"]
|
||||
msg += f"width:{width} and height:{height}"
|
||||
|
||||
stub = get_stub()
|
||||
if not comp_ids:
|
||||
comps = stub.get_selected_items(True, False, False)
|
||||
comp_ids = [comp.id for comp in comps]
|
||||
if not comp_ids:
|
||||
stub.print_msg("Select at least one composition to apply settings.")
|
||||
return
|
||||
|
||||
for comp_id in comp_ids:
|
||||
msg = f"Setting for comp {comp_id} " + msg
|
||||
log.debug(msg)
|
||||
stub.set_comp_properties(comp_id, frame_start, frames_duration,
|
||||
fps, width, height)
|
||||
if print_msg:
|
||||
stub.print_msg(msg)
|
||||
BIN
client/ayon_core/hosts/aftereffects/api/panel.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
client/ayon_core/hosts/aftereffects/api/panel_failure.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
293
client/ayon_core/hosts/aftereffects/api/pipeline.py
Normal file
|
|
@ -0,0 +1,293 @@
|
|||
import os
|
||||
|
||||
from qtpy import QtWidgets
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import Logger, register_event_callback
|
||||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
)
|
||||
from ayon_core.pipeline.load import any_outdated_containers
|
||||
import ayon_core.hosts.aftereffects
|
||||
|
||||
from ayon_core.host import (
|
||||
HostBase,
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost
|
||||
)
|
||||
from ayon_core.tools.utils import get_ayon_qt_app
|
||||
|
||||
from .launch_logic import get_stub
|
||||
from .ws_stub import ConnectionNotEstablishedYet
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
HOST_DIR = os.path.dirname(
|
||||
os.path.abspath(ayon_core.hosts.aftereffects.__file__)
|
||||
)
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
||||
|
||||
class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "aftereffects"
|
||||
|
||||
def __init__(self):
|
||||
self._stub = None
|
||||
super(AfterEffectsHost, self).__init__()
|
||||
|
||||
@property
|
||||
def stub(self):
|
||||
"""
|
||||
Handle pulling stub from PS to run operations on host
|
||||
Returns:
|
||||
(AEServerStub) or None
|
||||
"""
|
||||
if self._stub:
|
||||
return self._stub
|
||||
|
||||
try:
|
||||
stub = get_stub() # only after Photoshop is up
|
||||
except ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
self._stub = stub
|
||||
return self._stub
|
||||
|
||||
def install(self):
|
||||
print("Installing Pype config...")
|
||||
|
||||
pyblish.api.register_host("aftereffects")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
register_event_callback("application.launched", application_launch)
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".aep"]
|
||||
|
||||
def save_workfile(self, dst_path=None):
|
||||
self.stub.saveAs(dst_path, True)
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
self.stub.open(filepath)
|
||||
|
||||
return True
|
||||
|
||||
def get_current_workfile(self):
|
||||
try:
|
||||
full_name = get_stub().get_active_document_full_name()
|
||||
if full_name and full_name != "null":
|
||||
return os.path.normpath(full_name).replace("\\", "/")
|
||||
except ValueError:
|
||||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def get_context_data(self):
|
||||
meta = self.stub.get_metadata()
|
||||
for item in meta:
|
||||
if item.get("id") == "publish_context":
|
||||
item.pop("id")
|
||||
return item
|
||||
|
||||
return {}
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
item = data
|
||||
item["id"] = "publish_context"
|
||||
self.stub.imprint(item["id"], item)
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
# created instances section
|
||||
def list_instances(self):
|
||||
"""List all created instances from current workfile which
|
||||
will be published.
|
||||
|
||||
Pulls from File > File Info
|
||||
|
||||
For SubsetManager
|
||||
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
stub = self.stub
|
||||
if not stub:
|
||||
return []
|
||||
|
||||
instances = []
|
||||
layers_meta = stub.get_metadata()
|
||||
|
||||
for instance in layers_meta:
|
||||
if instance.get("id") == "pyblish.avalon.instance":
|
||||
instances.append(instance)
|
||||
return instances
|
||||
|
||||
def remove_instance(self, instance):
|
||||
"""Remove instance from current workfile metadata.
|
||||
|
||||
Updates metadata of current file in File > File Info and removes
|
||||
icon highlight on group layer.
|
||||
|
||||
For SubsetManager
|
||||
|
||||
Args:
|
||||
instance (dict): instance representation from subsetmanager model
|
||||
"""
|
||||
stub = self.stub
|
||||
|
||||
if not stub:
|
||||
return
|
||||
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
|
||||
if not inst_id:
|
||||
log.warning("No instance identifier for {}".format(instance))
|
||||
return
|
||||
|
||||
stub.remove_instance(inst_id)
|
||||
|
||||
if instance.get("members"):
|
||||
item = stub.get_item(instance["members"][0])
|
||||
if item:
|
||||
stub.rename_item(item.id,
|
||||
item.name.replace(stub.PUBLISH_ICON, ''))
|
||||
|
||||
|
||||
def application_launch():
|
||||
"""Triggered after start of app"""
|
||||
check_inventory()
|
||||
|
||||
|
||||
def ls():
|
||||
"""Yields containers from active AfterEffects document.
|
||||
|
||||
This is the host-equivalent of api.ls(), but instead of listing
|
||||
assets on disk, it lists assets already loaded in AE; once loaded
|
||||
they are called 'containers'. Used in Manage tool.
|
||||
|
||||
Containers could be on multiple levels, single images/videos/was as a
|
||||
FootageItem, or multiple items - backgrounds (folder with automatically
|
||||
created composition and all imported layers).
|
||||
|
||||
Yields:
|
||||
dict: container
|
||||
|
||||
"""
|
||||
try:
|
||||
stub = get_stub() # only after AfterEffects is up
|
||||
except ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
layers_meta = stub.get_metadata()
|
||||
for item in stub.get_items(comps=True,
|
||||
folders=True,
|
||||
footages=True):
|
||||
data = stub.read(item, layers_meta)
|
||||
# Skip non-tagged layers.
|
||||
if not data:
|
||||
continue
|
||||
|
||||
# Filter to only containers.
|
||||
if "container" not in data["id"]:
|
||||
continue
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = item.name.replace(stub.LOADED_ICON, '')
|
||||
data["layer"] = item
|
||||
yield data
|
||||
|
||||
|
||||
def check_inventory():
|
||||
"""Checks loaded containers if they are of highest version"""
|
||||
if not any_outdated_containers():
|
||||
return
|
||||
|
||||
# Warn about outdated containers.
|
||||
_app = get_ayon_qt_app()
|
||||
|
||||
message_box = QtWidgets.QMessageBox()
|
||||
message_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
|
||||
def containerise(name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
loader=None,
|
||||
suffix="_CON"):
|
||||
"""
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Creates dictionary payloads that gets saved into file metadata. Each
|
||||
container contains of who loaded (loader) and members (single or multiple
|
||||
in case of background).
|
||||
|
||||
Arguments:
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
comp (AEItem): Composition to containerise
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of loader used to produce this container.
|
||||
suffix (str, optional): Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
container (str): Name of container assembly
|
||||
"""
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"members": comp.members or [comp.id]
|
||||
}
|
||||
|
||||
stub = get_stub()
|
||||
stub.imprint(comp.id, data)
|
||||
|
||||
return comp
|
||||
|
||||
|
||||
def cache_and_get_instances(creator):
|
||||
"""Cache instances in shared data.
|
||||
|
||||
Storing all instances as a list as legacy instances might be still present.
|
||||
Args:
|
||||
creator (Creator): Plugin which would like to get instances from host.
|
||||
Returns:
|
||||
List[]: list of all instances stored in metadata
|
||||
"""
|
||||
shared_key = "openpype.photoshop.instances"
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
creator.collection_shared_data[shared_key] = \
|
||||
creator.host.list_instances()
|
||||
return creator.collection_shared_data[shared_key]
|
||||
12
client/ayon_core/hosts/aftereffects/api/plugin.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import six
|
||||
from abc import ABCMeta
|
||||
|
||||
from ayon_core.pipeline import LoaderPlugin
|
||||
from .launch_logic import get_stub
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AfterEffectsLoader(LoaderPlugin):
|
||||
@staticmethod
|
||||
def get_stub():
|
||||
return get_stub()
|
||||
|
|
@ -0,0 +1,271 @@
|
|||
import os.path
|
||||
import uuid
|
||||
import shutil
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
PLACEHOLDER_ID = "openpype.placeholder"
|
||||
|
||||
|
||||
class AETemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for AE"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Whether the template was successfully imported or not
|
||||
"""
|
||||
stub = get_stub()
|
||||
if not os.path.exists(path):
|
||||
stub.print_msg(f"Template file on {path} doesn't exist.")
|
||||
return
|
||||
|
||||
stub.save()
|
||||
workfile_path = stub.get_active_document_full_name()
|
||||
shutil.copy2(path, workfile_path)
|
||||
stub.open(workfile_path)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class AEPlaceholderPlugin(PlaceholderPlugin):
|
||||
"""Contains generic methods for all PlaceholderPlugins."""
|
||||
|
||||
def collect_placeholders(self):
|
||||
"""Collect info from file metadata about created placeholders.
|
||||
|
||||
Returns:
|
||||
(list) (LoadPlaceholderItem)
|
||||
"""
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for item in scene_placeholders:
|
||||
if item.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
if isinstance(self, AEPlaceholderLoadPlugin):
|
||||
item = LoadPlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
elif isinstance(self, AEPlaceholderCreatePlugin):
|
||||
item = CreatePlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
else:
|
||||
raise NotImplementedError(f"Not implemented for {type(self)}")
|
||||
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
"""Resave changed properties for placeholders"""
|
||||
item_id, metadata_item = self._get_item(placeholder_item)
|
||||
stub = get_stub()
|
||||
if not item_id:
|
||||
stub.print_msg("Cannot find item for "
|
||||
f"{placeholder_item.scene_identifier}")
|
||||
return
|
||||
metadata_item["data"] = placeholder_data
|
||||
stub.imprint(item_id, metadata_item)
|
||||
|
||||
def _get_item(self, placeholder_item):
|
||||
"""Returns item id and item metadata for placeholder from file meta"""
|
||||
stub = get_stub()
|
||||
placeholder_uuid = placeholder_item.scene_identifier
|
||||
for metadata_item in stub.get_metadata():
|
||||
if not metadata_item.get("is_placeholder"):
|
||||
continue
|
||||
if placeholder_uuid in metadata_item.get("uuid"):
|
||||
return metadata_item["members"][0], metadata_item
|
||||
return None, None
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
"""" Cache placeholder data to shared data.
|
||||
Returns:
|
||||
(list) of dicts
|
||||
"""
|
||||
placeholder_items = self.builder.get_shared_populate_data(
|
||||
"placeholder_items"
|
||||
)
|
||||
if not placeholder_items:
|
||||
placeholder_items = []
|
||||
for item in get_stub().get_metadata():
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
placeholder_items.append(item)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_items", placeholder_items
|
||||
)
|
||||
return placeholder_items
|
||||
|
||||
def _imprint_item(self, item_id, name, placeholder_data, stub):
|
||||
if not item_id:
|
||||
raise ValueError("Couldn't create a placeholder")
|
||||
container_data = {
|
||||
"id": "openpype.placeholder",
|
||||
"name": name,
|
||||
"is_placeholder": True,
|
||||
"plugin_identifier": self.identifier,
|
||||
"uuid": str(uuid.uuid4()), # scene_identifier
|
||||
"data": placeholder_data,
|
||||
"members": [item_id]
|
||||
}
|
||||
stub.imprint(item_id, container_data)
|
||||
|
||||
|
||||
class AEPlaceholderCreatePlugin(AEPlaceholderPlugin, PlaceholderCreateMixin):
|
||||
"""Adds Create placeholder.
|
||||
|
||||
This adds composition and runs Create
|
||||
"""
|
||||
identifier = "aftereffects.create"
|
||||
label = "AfterEffects create"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
stub = get_stub()
|
||||
name = "CREATEPLACEHOLDER"
|
||||
item_id = stub.add_item(name, "COMP")
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Replace 'placeholder' with publishable instance.
|
||||
|
||||
Renames prepared composition name, creates publishable instance, sets
|
||||
frame/duration settings according to DB.
|
||||
"""
|
||||
pre_create_data = {"use_selection": True}
|
||||
item_id, item = self._get_item(placeholder)
|
||||
get_stub().select_items([item_id])
|
||||
self.populate_create_placeholder(placeholder, pre_create_data)
|
||||
|
||||
# apply settings for populated composition
|
||||
item_id, metadata_item = self._get_item(placeholder)
|
||||
set_settings(True, True, [item_id])
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
|
||||
class AEPlaceholderLoadPlugin(AEPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "aftereffects.load"
|
||||
label = "AfterEffects load"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
"""Creates AE's Placeholder item in Project items list.
|
||||
|
||||
Sets dummy resolution/duration/fps settings, will be replaced when
|
||||
populated.
|
||||
"""
|
||||
stub = get_stub()
|
||||
name = "LOADERPLACEHOLDER"
|
||||
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Use Openpype Loader from `placeholder` to create new FootageItems
|
||||
|
||||
New FootageItems are created, files are imported.
|
||||
"""
|
||||
self.populate_load_placeholder(placeholder)
|
||||
errors = placeholder.get_errors()
|
||||
stub = get_stub()
|
||||
if errors:
|
||||
stub.print_msg("\n".join(errors))
|
||||
else:
|
||||
if not placeholder.data["keep_placeholder"]:
|
||||
metadata = stub.get_metadata()
|
||||
for item in metadata:
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
scene_identifier = item.get("uuid")
|
||||
if (scene_identifier and
|
||||
scene_identifier == placeholder.scene_identifier):
|
||||
stub.delete_item(item["members"][0])
|
||||
stub.remove_instance(placeholder.scene_identifier, metadata)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder_item_id, _ = self._get_item(placeholder)
|
||||
item_id = container.id
|
||||
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = AETemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
||||
|
||||
def update_workfile_template(*args):
|
||||
builder = AETemplateBuilder(registered_host())
|
||||
builder.rebuild_template()
|
||||
|
||||
|
||||
def create_placeholder(*args):
|
||||
"""Called when new workile placeholder should be created."""
|
||||
host = registered_host()
|
||||
builder = AETemplateBuilder(host)
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.exec_()
|
||||
|
||||
|
||||
def update_placeholder(*args):
|
||||
"""Called after placeholder item is selected to modify it."""
|
||||
host = registered_host()
|
||||
builder = AETemplateBuilder(host)
|
||||
|
||||
stub = get_stub()
|
||||
selected_items = stub.get_selected_items(True, True, True)
|
||||
|
||||
if len(selected_items) != 1:
|
||||
stub.print_msg("Please select just 1 placeholder")
|
||||
return
|
||||
|
||||
selected_id = selected_items[0].id
|
||||
placeholder_item = None
|
||||
|
||||
placeholder_items_by_id = {
|
||||
placeholder_item.scene_identifier: placeholder_item
|
||||
for placeholder_item in builder.get_placeholders()
|
||||
}
|
||||
for metadata_item in stub.get_metadata():
|
||||
if not metadata_item.get("is_placeholder"):
|
||||
continue
|
||||
if selected_id in metadata_item.get("members"):
|
||||
placeholder_item = placeholder_items_by_id.get(
|
||||
metadata_item["uuid"])
|
||||
break
|
||||
|
||||
if not placeholder_item:
|
||||
stub.print_msg("Didn't find placeholder metadata. "
|
||||
"Remove and re-create placeholder.")
|
||||
return
|
||||
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.set_update_mode(placeholder_item)
|
||||
window.exec_()
|
||||
731
client/ayon_core/hosts/aftereffects/api/ws_stub.py
Normal file
|
|
@ -0,0 +1,731 @@
|
|||
"""
|
||||
Stub handling connection from server to client.
|
||||
Used anywhere solution is calling client methods.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
|
||||
import attr
|
||||
|
||||
from wsrpc_aiohttp import WebSocketAsync
|
||||
from ayon_core.tools.adobe_webserver.app import WebServerTool
|
||||
|
||||
|
||||
class ConnectionNotEstablishedYet(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@attr.s
|
||||
class AEItem(object):
|
||||
"""
|
||||
Object denoting Item in AE. Each item is created in AE by any Loader,
|
||||
but contains same fields, which are being used in later processing.
|
||||
"""
|
||||
# metadata
|
||||
id = attr.ib() # id created by AE, could be used for querying
|
||||
name = attr.ib() # name of item
|
||||
item_type = attr.ib(default=None) # item type (footage, folder, comp)
|
||||
# all imported elements, single for
|
||||
# regular image, array for Backgrounds
|
||||
members = attr.ib(factory=list)
|
||||
frameStart = attr.ib(default=None)
|
||||
framesDuration = attr.ib(default=None)
|
||||
frameRate = attr.ib(default=None)
|
||||
file_name = attr.ib(default=None)
|
||||
instance_id = attr.ib(default=None) # New Publisher
|
||||
width = attr.ib(default=None)
|
||||
height = attr.ib(default=None)
|
||||
is_placeholder = attr.ib(default=False)
|
||||
uuid = attr.ib(default=False)
|
||||
path = attr.ib(default=False) # path to FootageItem to validate
|
||||
# list of composition Footage is in
|
||||
containing_comps = attr.ib(factory=list)
|
||||
|
||||
|
||||
class AfterEffectsServerStub():
|
||||
"""
|
||||
Stub for calling function on client (Photoshop js) side.
|
||||
Expects that client is already connected (started when avalon menu
|
||||
is opened).
|
||||
'self.websocketserver.call' is used as async wrapper
|
||||
"""
|
||||
PUBLISH_ICON = '\u2117 '
|
||||
LOADED_ICON = '\u25bc'
|
||||
|
||||
def __init__(self):
|
||||
self.websocketserver = WebServerTool.get_instance()
|
||||
self.client = self.get_client()
|
||||
self.log = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
@staticmethod
|
||||
def get_client():
|
||||
"""
|
||||
Return first connected client to WebSocket
|
||||
TODO implement selection by Route
|
||||
:return: <WebSocketAsync> client
|
||||
"""
|
||||
clients = WebSocketAsync.get_clients()
|
||||
client = None
|
||||
if len(clients) > 0:
|
||||
key = list(clients.keys())[0]
|
||||
client = clients.get(key)
|
||||
|
||||
return client
|
||||
|
||||
def open(self, path):
|
||||
"""
|
||||
Open file located at 'path' (local).
|
||||
Args:
|
||||
path(string): file path locally
|
||||
Returns: None
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.open', path=path))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_metadata(self):
|
||||
"""
|
||||
Get complete stored JSON with metadata from AE.Metadata.Label
|
||||
field.
|
||||
|
||||
It contains containers loaded by any Loader OR instances created
|
||||
by Creator.
|
||||
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_metadata'))
|
||||
metadata = self._handle_return(res)
|
||||
|
||||
return metadata or []
|
||||
|
||||
def read(self, item, layers_meta=None):
|
||||
"""
|
||||
Parses item metadata from Label field of active document.
|
||||
Used as filter to pick metadata for specific 'item' only.
|
||||
|
||||
Args:
|
||||
item (AEItem): pulled info from AE
|
||||
layers_meta (dict): full list from Headline
|
||||
(load and inject for better performance in loops)
|
||||
Returns:
|
||||
(dict):
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_metadata()
|
||||
for item_meta in layers_meta:
|
||||
if 'container' in item_meta.get('id') and \
|
||||
str(item.id) == str(item_meta.get('members')[0]):
|
||||
return item_meta
|
||||
|
||||
self.log.debug("Couldn't find layer metadata")
|
||||
|
||||
def imprint(self, item_id, data, all_items=None, items_meta=None):
|
||||
"""
|
||||
Save item metadata to Label field of metadata of active document
|
||||
Args:
|
||||
item_id (int|str): id of FootageItem or instance_id for workfiles
|
||||
data(string): json representation for single layer
|
||||
all_items (list of item): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
triggered
|
||||
items_meta(string): json representation from Headline
|
||||
(for performance - provide only if imprint is in
|
||||
loop - value should be same)
|
||||
Returns: None
|
||||
"""
|
||||
if not items_meta:
|
||||
items_meta = self.get_metadata()
|
||||
|
||||
result_meta = []
|
||||
# fix existing
|
||||
is_new = True
|
||||
|
||||
for item_meta in items_meta:
|
||||
if ((item_meta.get('members') and
|
||||
str(item_id) == str(item_meta.get('members')[0])) or
|
||||
item_meta.get("instance_id") == item_id):
|
||||
is_new = False
|
||||
if data:
|
||||
item_meta.update(data)
|
||||
result_meta.append(item_meta)
|
||||
else:
|
||||
result_meta.append(item_meta)
|
||||
|
||||
if is_new:
|
||||
result_meta.append(data)
|
||||
|
||||
# Ensure only valid ids are stored.
|
||||
if not all_items:
|
||||
# loaders create FootageItem now
|
||||
all_items = self.get_items(comps=True,
|
||||
folders=True,
|
||||
footages=True)
|
||||
item_ids = [int(item.id) for item in all_items]
|
||||
cleaned_data = []
|
||||
for meta in result_meta:
|
||||
# do not added instance with nonexistend item id
|
||||
if meta.get("members"):
|
||||
if int(meta["members"][0]) not in item_ids:
|
||||
continue
|
||||
|
||||
cleaned_data.append(meta)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.imprint',
|
||||
payload=payload))
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_active_document_full_name(self):
|
||||
"""
|
||||
Returns absolute path of active document via ws call
|
||||
Returns(string): file name
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_active_document_full_name'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_active_document_name(self):
|
||||
"""
|
||||
Returns just a name of active document via ws call
|
||||
Returns(string): file name
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_active_document_name'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_items(self, comps, folders=False, footages=False):
|
||||
"""
|
||||
Get all items from Project panel according to arguments.
|
||||
There are multiple different types:
|
||||
CompItem (could have multiple layers - source for Creator,
|
||||
will be rendered)
|
||||
FolderItem (collection type, currently used for Background
|
||||
loading)
|
||||
FootageItem (imported file - created by Loader)
|
||||
Args:
|
||||
comps (bool): return CompItems
|
||||
folders (bool): return FolderItem
|
||||
footages (bool: return FootageItem
|
||||
|
||||
Returns:
|
||||
(list) of namedtuples
|
||||
"""
|
||||
res = self.websocketserver.call(
|
||||
self.client.call('AfterEffects.get_items',
|
||||
comps=comps,
|
||||
folders=folders,
|
||||
footages=footages)
|
||||
)
|
||||
return self._to_records(self._handle_return(res))
|
||||
|
||||
def select_items(self, items):
|
||||
"""
|
||||
Select items in Project list
|
||||
Args:
|
||||
items (list): of int item ids
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call('AfterEffects.select_items', items=items))
|
||||
|
||||
|
||||
def get_selected_items(self, comps, folders=False, footages=False):
|
||||
"""
|
||||
Same as get_items but using selected items only
|
||||
Args:
|
||||
comps (bool): return CompItems
|
||||
folders (bool): return FolderItem
|
||||
footages (bool: return FootageItem
|
||||
|
||||
Returns:
|
||||
(list) of namedtuples
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_selected_items',
|
||||
comps=comps,
|
||||
folders=folders,
|
||||
footages=footages)
|
||||
)
|
||||
return self._to_records(self._handle_return(res))
|
||||
|
||||
def add_item(self, name, item_type):
|
||||
"""
|
||||
Adds either composition or folder to project item list.
|
||||
|
||||
Args:
|
||||
name (str)
|
||||
item_type (str): COMP|FOLDER
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.add_item',
|
||||
name=name,
|
||||
item_type=item_type))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_item(self, item_id):
|
||||
"""
|
||||
Returns metadata for particular 'item_id' or None
|
||||
|
||||
Args:
|
||||
item_id (int, or string)
|
||||
"""
|
||||
for item in self.get_items(True, True, True):
|
||||
if str(item.id) == str(item_id):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
def import_file(self, path, item_name, import_options=None):
|
||||
"""
|
||||
Imports file as a FootageItem. Used in Loader
|
||||
Args:
|
||||
path (string): absolute path for asset file
|
||||
item_name (string): label for created FootageItem
|
||||
import_options (dict): different files (img vs psd) need different
|
||||
config
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(
|
||||
self.client.call('AfterEffects.import_file',
|
||||
path=path,
|
||||
item_name=item_name,
|
||||
import_options=import_options)
|
||||
)
|
||||
records = self._to_records(self._handle_return(res))
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
def replace_item(self, item_id, path, item_name):
|
||||
""" Replace FootageItem with new file
|
||||
|
||||
Args:
|
||||
item_id (int):
|
||||
path (string):absolute path
|
||||
item_name (string): label on item in Project list
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.replace_item',
|
||||
item_id=item_id,
|
||||
path=path, item_name=item_name))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def rename_item(self, item_id, item_name):
|
||||
""" Replace item with item_name
|
||||
|
||||
Args:
|
||||
item_id (int):
|
||||
item_name (string): label on item in Project list
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.rename_item',
|
||||
item_id=item_id,
|
||||
item_name=item_name))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def delete_item(self, item_id):
|
||||
""" Deletes *Item in a file
|
||||
Args:
|
||||
item_id (int):
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.delete_item',
|
||||
item_id=item_id))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def remove_instance(self, instance_id, metadata=None):
|
||||
"""
|
||||
Removes instance with 'instance_id' from file's metadata and
|
||||
saves them.
|
||||
|
||||
Keep matching item in file though.
|
||||
|
||||
Args:
|
||||
instance_id(string): instance id
|
||||
"""
|
||||
cleaned_data = []
|
||||
|
||||
if metadata is None:
|
||||
metadata = self.get_metadata()
|
||||
|
||||
for instance in metadata:
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid")
|
||||
if inst_id != instance_id:
|
||||
cleaned_data.append(instance)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.imprint',
|
||||
payload=payload))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def is_saved(self):
|
||||
# TODO
|
||||
return True
|
||||
|
||||
def set_label_color(self, item_id, color_idx):
|
||||
"""
|
||||
Used for highlight additional information in Project panel.
|
||||
Green color is loaded asset, blue is created asset
|
||||
Args:
|
||||
item_id (int):
|
||||
color_idx (int): 0-16 Label colors from AE Project view
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.set_label_color',
|
||||
item_id=item_id,
|
||||
color_idx=color_idx))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_comp_properties(self, comp_id):
|
||||
""" Get composition information for render purposes
|
||||
|
||||
Returns startFrame, frameDuration, fps, width, height.
|
||||
|
||||
Args:
|
||||
comp_id (int):
|
||||
|
||||
Returns:
|
||||
(AEItem)
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_comp_properties',
|
||||
item_id=comp_id
|
||||
))
|
||||
|
||||
records = self._to_records(self._handle_return(res))
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
def set_comp_properties(self, comp_id, start, duration, frame_rate,
|
||||
width, height):
|
||||
"""
|
||||
Set work area to predefined values (from Ftrack).
|
||||
Work area directs what gets rendered.
|
||||
Beware of rounding, AE expects seconds, not frames directly.
|
||||
|
||||
Args:
|
||||
comp_id (int):
|
||||
start (int): workAreaStart in frames
|
||||
duration (int): in frames
|
||||
frame_rate (float): frames in seconds
|
||||
width (int): resolution width
|
||||
height (int): resolution height
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.set_comp_properties',
|
||||
item_id=comp_id,
|
||||
start=start,
|
||||
duration=duration,
|
||||
frame_rate=frame_rate,
|
||||
width=width,
|
||||
height=height))
|
||||
return self._handle_return(res)
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Saves active document
|
||||
Returns: None
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.save'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def saveAs(self, project_path, as_copy):
|
||||
"""
|
||||
Saves active project to aep (copy) or png or jpg
|
||||
Args:
|
||||
project_path(string): full local path
|
||||
as_copy: <boolean>
|
||||
Returns: None
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.saveAs',
|
||||
image_path=project_path,
|
||||
as_copy=as_copy))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_render_info(self, comp_id):
|
||||
""" Get render queue info for render purposes
|
||||
|
||||
Returns:
|
||||
(list) of (AEItem): with 'file_name' field
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_render_info',
|
||||
comp_id=comp_id))
|
||||
|
||||
records = self._to_records(self._handle_return(res))
|
||||
return records
|
||||
|
||||
def get_audio_url(self, item_id):
|
||||
""" Get audio layer absolute url for comp
|
||||
|
||||
Args:
|
||||
item_id (int): composition id
|
||||
Returns:
|
||||
(str): absolute path url
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_audio_url',
|
||||
item_id=item_id))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def import_background(self, comp_id, comp_name, files):
|
||||
"""
|
||||
Imports backgrounds images to existing or new composition.
|
||||
|
||||
If comp_id is not provided, new composition is created, basic
|
||||
values (width, heights, frameRatio) takes from first imported
|
||||
image.
|
||||
|
||||
All images from background json are imported as a FootageItem and
|
||||
separate layer is created for each of them under composition.
|
||||
|
||||
Order of imported 'files' is important.
|
||||
|
||||
Args:
|
||||
comp_id (int): id of existing composition (null if new)
|
||||
comp_name (str): used when new composition
|
||||
files (list): list of absolute paths to import and
|
||||
add as layers
|
||||
|
||||
Returns:
|
||||
(AEItem): object with id of created folder, all imported images
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.import_background',
|
||||
comp_id=comp_id,
|
||||
comp_name=comp_name,
|
||||
files=files))
|
||||
|
||||
records = self._to_records(self._handle_return(res))
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
def reload_background(self, comp_id, comp_name, files):
|
||||
"""
|
||||
Reloads backgrounds images to existing composition.
|
||||
|
||||
It actually deletes complete folder with imported images and
|
||||
created composition for safety.
|
||||
|
||||
Args:
|
||||
comp_id (int): id of existing composition to be overwritten
|
||||
comp_name (str): new name of composition (could be same as old
|
||||
if version up only)
|
||||
files (list): list of absolute paths to import and
|
||||
add as layers
|
||||
Returns:
|
||||
(AEItem): object with id of created folder, all imported images
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.reload_background',
|
||||
comp_id=comp_id,
|
||||
comp_name=comp_name,
|
||||
files=files))
|
||||
|
||||
records = self._to_records(self._handle_return(res))
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
def add_item_as_layer(self, comp_id, item_id):
|
||||
"""
|
||||
Adds already imported FootageItem ('item_id') as a new
|
||||
layer to composition ('comp_id').
|
||||
|
||||
Args:
|
||||
comp_id (int): id of target composition
|
||||
item_id (int): FootageItem.id
|
||||
comp already found previously
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.add_item_as_layer',
|
||||
comp_id=comp_id,
|
||||
item_id=item_id))
|
||||
|
||||
records = self._to_records(self._handle_return(res))
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
def add_item_instead_placeholder(self, placeholder_item_id, item_id):
|
||||
"""
|
||||
Adds item_id to layers where plaeholder_item_id is present.
|
||||
|
||||
1 placeholder could result in multiple loaded containers (eg items)
|
||||
|
||||
Args:
|
||||
placeholder_item_id (int): id of placeholder item
|
||||
item_id (int): loaded FootageItem id
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.add_item_instead_placeholder', # noqa
|
||||
placeholder_item_id=placeholder_item_id, # noqa
|
||||
item_id=item_id))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def add_placeholder(self, name, width, height, fps, duration):
|
||||
"""
|
||||
Adds new FootageItem as a placeholder for workfile builder
|
||||
|
||||
Placeholder requires width etc, currently probably only hardcoded
|
||||
values.
|
||||
|
||||
Args:
|
||||
name (str)
|
||||
width (int)
|
||||
height (int)
|
||||
fps (float)
|
||||
duration (int)
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.add_placeholder',
|
||||
name=name,
|
||||
width=width,
|
||||
height=height,
|
||||
fps=fps,
|
||||
duration=duration))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def render(self, folder_url, comp_id):
|
||||
"""
|
||||
Render all renderqueueitem to 'folder_url'
|
||||
Args:
|
||||
folder_url(string): local folder path for collecting
|
||||
Returns: None
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.render',
|
||||
folder_url=folder_url,
|
||||
comp_id=comp_id))
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_extension_version(self):
|
||||
"""Returns version number of installed extension."""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_extension_version'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_app_version(self):
|
||||
"""Returns version number of installed application (17.5...)."""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_app_version'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def close(self):
|
||||
res = self.websocketserver.call(self.client.call('AfterEffects.close'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def print_msg(self, msg):
|
||||
"""Triggers Javascript alert dialog."""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.print_msg',
|
||||
msg=msg))
|
||||
|
||||
def _handle_return(self, res):
|
||||
"""Wraps return, throws ValueError if 'error' key is present."""
|
||||
if res and isinstance(res, str) and res != "undefined":
|
||||
try:
|
||||
parsed = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Received broken JSON {}".format(res))
|
||||
|
||||
if not parsed: # empty list
|
||||
return parsed
|
||||
|
||||
first_item = parsed
|
||||
if isinstance(parsed, list):
|
||||
first_item = parsed[0]
|
||||
|
||||
if first_item:
|
||||
if first_item.get("error"):
|
||||
raise ValueError(first_item["error"])
|
||||
# singular values (file name etc)
|
||||
if first_item.get("result") is not None:
|
||||
return first_item["result"]
|
||||
return parsed # parsed
|
||||
return res
|
||||
|
||||
def _to_records(self, payload):
|
||||
"""
|
||||
Converts string json representation into list of AEItem
|
||||
dot notation access to work.
|
||||
Returns: <list of AEItem>
|
||||
payload(dict): - dictionary from json representation, expected to
|
||||
come from _handle_return
|
||||
"""
|
||||
if not payload:
|
||||
return []
|
||||
|
||||
if isinstance(payload, str): # safety fallback
|
||||
try:
|
||||
payload = json.loads(payload)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Received broken JSON {}".format(payload))
|
||||
|
||||
if isinstance(payload, dict):
|
||||
payload = [payload]
|
||||
|
||||
ret = []
|
||||
# convert to AEItem to use dot donation
|
||||
for d in payload:
|
||||
if not d:
|
||||
continue
|
||||
# currently implemented and expected fields
|
||||
item = AEItem(d.get('id'),
|
||||
d.get('name'),
|
||||
d.get('type'),
|
||||
d.get('members'),
|
||||
d.get('frameStart'),
|
||||
d.get('framesDuration'),
|
||||
d.get('frameRate'),
|
||||
d.get('file_name'),
|
||||
d.get("instance_id"),
|
||||
d.get("width"),
|
||||
d.get("height"),
|
||||
d.get("is_placeholder"),
|
||||
d.get("uuid"),
|
||||
d.get("path"),
|
||||
d.get("containing_comps"),)
|
||||
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
||||
|
||||
def get_stub():
|
||||
"""
|
||||
Convenience function to get server RPC stub to call methods directed
|
||||
for host (Photoshop).
|
||||
It expects already created connection, started from client.
|
||||
Currently created when panel is opened (PS: Window>Extensions>Avalon)
|
||||
:return: <PhotoshopClientStub> where functions could be called from
|
||||
"""
|
||||
ae_stub = AfterEffectsServerStub()
|
||||
if not ae_stub.client:
|
||||
raise ConnectionNotEstablishedYet("Connection is not created yet")
|
||||
|
||||
return ae_stub
|
||||
0
client/ayon_core/hosts/aftereffects/plugins/__init__.py
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
import re
|
||||
|
||||
from ayon_core import resources
|
||||
from ayon_core.lib import BoolDef, UISeparatorDef
|
||||
from ayon_core.hosts.aftereffects import api
|
||||
from ayon_core.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
from ayon_core.lib import prepare_template_data
|
||||
from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
|
||||
|
||||
|
||||
class RenderCreator(Creator):
|
||||
"""Creates 'render' instance for publishing.
|
||||
|
||||
Result of 'render' instance is video or sequence of images for particular
|
||||
composition based of configuration in its RenderQueue.
|
||||
"""
|
||||
identifier = "render"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
description = "Render creator"
|
||||
|
||||
create_allow_context_change = True
|
||||
|
||||
# Settings
|
||||
mark_for_review = True
|
||||
force_setting_values = True
|
||||
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
stub = api.get_stub() # only after After Effects is up
|
||||
|
||||
try:
|
||||
_ = stub.get_active_document_full_name()
|
||||
except ValueError:
|
||||
raise CreatorError(
|
||||
"Please save workfile via Workfile app first!"
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
comps = stub.get_selected_items(
|
||||
comps=True, folders=False, footages=False
|
||||
)
|
||||
else:
|
||||
comps = stub.get_items(comps=True, folders=False, footages=False)
|
||||
|
||||
if not comps:
|
||||
raise CreatorError(
|
||||
"Nothing to create. Select composition in Project Bin if "
|
||||
"'Use selection' is toggled or create at least "
|
||||
"one composition."
|
||||
)
|
||||
use_composition_name = (pre_create_data.get("use_composition_name") or
|
||||
len(comps) > 1)
|
||||
for comp in comps:
|
||||
composition_name = re.sub(
|
||||
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
|
||||
"",
|
||||
comp.name
|
||||
)
|
||||
if use_composition_name:
|
||||
if "{composition}" not in subset_name_from_ui.lower():
|
||||
subset_name_from_ui += "{Composition}"
|
||||
|
||||
dynamic_fill = prepare_template_data({"composition":
|
||||
composition_name})
|
||||
subset_name = subset_name_from_ui.format(**dynamic_fill)
|
||||
data["composition_name"] = composition_name
|
||||
else:
|
||||
subset_name = subset_name_from_ui
|
||||
subset_name = re.sub(r"\{composition\}", '', subset_name,
|
||||
flags=re.IGNORECASE)
|
||||
|
||||
for inst in self.create_context.instances:
|
||||
if subset_name == inst.subset_name:
|
||||
raise CreatorError("{} already exists".format(
|
||||
inst.subset_name))
|
||||
|
||||
data["members"] = [comp.id]
|
||||
data["orig_comp_name"] = composition_name
|
||||
|
||||
new_instance = CreatedInstance(self.family, subset_name, data,
|
||||
self)
|
||||
if "farm" in pre_create_data:
|
||||
use_farm = pre_create_data["farm"]
|
||||
new_instance.creator_attributes["farm"] = use_farm
|
||||
|
||||
review = pre_create_data["mark_for_review"]
|
||||
new_instance. creator_attributes["mark_for_review"] = review
|
||||
|
||||
api.get_stub().imprint(new_instance.id,
|
||||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
stub.rename_item(comp.id, subset_name)
|
||||
if self.force_setting_values:
|
||||
set_settings(True, True, [comp.id], print_msg=False)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection",
|
||||
tooltip="Composition for publishable instance should be "
|
||||
"selected by default.",
|
||||
default=True, label="Use selection"),
|
||||
BoolDef("use_composition_name",
|
||||
label="Use composition name in subset"),
|
||||
UISeparatorDef(),
|
||||
BoolDef("farm", label="Render on farm"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=self.mark_for_review
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("farm", label="Render on farm"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=False
|
||||
)
|
||||
]
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
# legacy instances have family=='render' or 'renderLocal', use them
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family", '').replace("Local", ''))
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
api.get_stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change.new_value)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Removes metadata and renames to original comp name if available."""
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
self.host.remove_instance(instance)
|
||||
|
||||
comp_id = instance.data["members"][0]
|
||||
comp = api.get_stub().get_item(comp_id)
|
||||
orig_comp_name = instance.data.get("orig_comp_name")
|
||||
if comp:
|
||||
if orig_comp_name:
|
||||
new_comp_name = orig_comp_name
|
||||
else:
|
||||
new_comp_name = "dummyCompName"
|
||||
api.get_stub().rename_item(comp_id,
|
||||
new_comp_name)
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["aftereffects"]["create"]["RenderCreator"]
|
||||
)
|
||||
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
self.default_variants = plugin_settings.get(
|
||||
"default_variants",
|
||||
plugin_settings.get("defaults") or []
|
||||
)
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Render instances
|
||||
|
||||
Main publishable item in AfterEffects will be of `render` family.
|
||||
Result of this item (instance) is picture sequence or video that could
|
||||
be a final delivery product or loaded and used in another DCCs.
|
||||
|
||||
Select single composition and create instance of 'render' family or
|
||||
turn off 'Use selection' to create instance for all compositions.
|
||||
|
||||
'Use composition name in subset' allows to explicitly add composition
|
||||
name into created subset name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
|
||||
If composition name should be used, it will be cleaned up of characters
|
||||
that would cause an issue in published file names.
|
||||
"""
|
||||
|
||||
def get_dynamic_data(self, variant, task_name, asset_doc,
|
||||
project_name, host_name, instance):
|
||||
dynamic_data = {}
|
||||
if instance is not None:
|
||||
composition_name = instance.get("composition_name")
|
||||
if composition_name:
|
||||
dynamic_data["composition"] = composition_name
|
||||
else:
|
||||
dynamic_data["composition"] = "{composition}"
|
||||
|
||||
return dynamic_data
|
||||
|
||||
def _handle_legacy(self, instance_data):
|
||||
"""Converts old instances to new format."""
|
||||
if not instance_data.get("members"):
|
||||
instance_data["members"] = [instance_data.get("uuid")]
|
||||
|
||||
if instance_data.get("uuid"):
|
||||
# uuid not needed, replaced with unique instance_id
|
||||
api.get_stub().remove_instance(instance_data.get("uuid"))
|
||||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
instance_data["creator_attributes"] = {"farm": is_old_farm}
|
||||
instance_data["family"] = self.family
|
||||
|
||||
if instance_data["creator_attributes"].get("mark_for_review") is None:
|
||||
instance_data["creator_attributes"]["mark_for_review"] = True
|
||||
|
||||
return instance_data
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
import ayon_core.hosts.aftereffects.api as api
|
||||
from ayon_core.client import get_asset_by_name
|
||||
from ayon_core.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
||||
class AEWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
# nothing to change on workfiles
|
||||
pass
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
existing_asset_name = None
|
||||
if existing_instance is not None:
|
||||
existing_asset_name = existing_instance.get("folderPath")
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"folderPath": asset_name,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant,
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name, None
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
api.get_stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif (
|
||||
existing_asset_name != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
existing_instance["folderPath"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_core.hosts.aftereffects import api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api.lib import (
|
||||
get_background_layers,
|
||||
get_unique_layer_name,
|
||||
)
|
||||
|
||||
|
||||
class BackgroundLoader(api.AfterEffectsLoader):
|
||||
"""
|
||||
Load images from Background family
|
||||
Creates for each background separate folder with all imported images
|
||||
from background json AND automatically created composition with layers,
|
||||
each layer for separate image.
|
||||
|
||||
For each load container is created and stored in project (.aep)
|
||||
metadata
|
||||
"""
|
||||
label = "Load JSON Background"
|
||||
families = ["background"]
|
||||
representations = ["json"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
stub = self.get_stub()
|
||||
items = stub.get_items(comps=True)
|
||||
existing_items = [layer.name.replace(stub.LOADED_ICON, '')
|
||||
for layer in items]
|
||||
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"]["name"], name))
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
layers = get_background_layers(path)
|
||||
if not layers:
|
||||
raise ValueError("No layers found in {}".format(path))
|
||||
|
||||
comp = stub.import_background(None, stub.LOADED_ICON + comp_name,
|
||||
layers)
|
||||
|
||||
if not comp:
|
||||
raise ValueError("Import background failed. "
|
||||
"Please contact support")
|
||||
|
||||
self[:] = [comp]
|
||||
namespace = namespace or comp_name
|
||||
|
||||
return api.containerise(
|
||||
name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
""" Switch asset or change version """
|
||||
stub = self.get_stub()
|
||||
context = representation.get("context", {})
|
||||
_ = container.pop("layer")
|
||||
|
||||
# without iterator number (_001, 002...)
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
comp_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
|
||||
# switching assets
|
||||
if namespace_from_container != comp_name:
|
||||
items = stub.get_items(comps=True)
|
||||
existing_items = [layer.name for layer in items]
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
else: # switching version - keep same name
|
||||
comp_name = container["namespace"]
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
layers = get_background_layers(path)
|
||||
comp = stub.reload_background(container["members"][1],
|
||||
stub.LOADED_ICON + comp_name,
|
||||
layers)
|
||||
|
||||
# update container
|
||||
container["representation"] = str(representation["_id"])
|
||||
container["name"] = context["subset"]
|
||||
container["namespace"] = comp_name
|
||||
container["members"] = comp.members
|
||||
|
||||
stub.imprint(comp.id, container)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Removes element from scene: deletes layer + removes from file
|
||||
metadata.
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
107
client/ayon_core/hosts/aftereffects/plugins/load/load_file.py
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_core.hosts.aftereffects import api
|
||||
from ayon_core.hosts.aftereffects.api.lib import get_unique_layer_name
|
||||
|
||||
|
||||
class FileLoader(api.AfterEffectsLoader):
|
||||
"""Load images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
label = "Load file"
|
||||
|
||||
families = ["image",
|
||||
"plate",
|
||||
"render",
|
||||
"prerender",
|
||||
"review",
|
||||
"audio"]
|
||||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
stub = self.get_stub()
|
||||
layers = stub.get_items(comps=True, folders=True, footages=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
||||
|
||||
import_options = {}
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
if len(context["representation"]["files"]) > 1:
|
||||
import_options['sequence'] = True
|
||||
|
||||
if not path:
|
||||
repr_id = context["representation"]["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
path = path.replace("\\", "/")
|
||||
if '.psd' in path:
|
||||
import_options['ImportAsType'] = 'ImportAsType.COMP'
|
||||
|
||||
comp = stub.import_file(path, stub.LOADED_ICON + comp_name,
|
||||
import_options)
|
||||
|
||||
if not comp:
|
||||
self.log.warning(
|
||||
"Representation `{}` is failing to load".format(path))
|
||||
self.log.warning("Check host app for alert error.")
|
||||
return
|
||||
|
||||
self[:] = [comp]
|
||||
namespace = namespace or comp_name
|
||||
|
||||
return api.containerise(
|
||||
name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
""" Switch asset or change version """
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
|
||||
context = representation.get("context", {})
|
||||
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
layer_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layers = stub.get_items(comps=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
layer_name = get_unique_layer_name(
|
||||
existing_layers,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
path = get_representation_path(representation)
|
||||
# with aftereffects.maintained_selection(): # TODO
|
||||
stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name)
|
||||
stub.imprint(
|
||||
layer.id, {"representation": str(representation["_id"]),
|
||||
"name": context["subset"],
|
||||
"namespace": layer_name}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Removes element from scene: deletes layer + removes from Headline
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class AddPublishHighlight(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Revert back rendered comp name and add publish highlight
|
||||
"""
|
||||
|
||||
label = "Add render highlight"
|
||||
order = pyblish.api.IntegratorOrder + 8.0
|
||||
hosts = ["aftereffects"]
|
||||
families = ["render.farm"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
stub = get_stub()
|
||||
item = instance.data
|
||||
# comp name contains highlight icon
|
||||
stub.rename_item(item["comp_id"], item["comp_name"])
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Close AE after publish. For Webpublishing only."""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class CloseAE(pyblish.api.ContextPlugin):
|
||||
"""Close AE after publish. For Webpublishing only.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 14
|
||||
label = "Close AE"
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
hosts = ["aftereffects"]
|
||||
targets = ["automated"]
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("CloseAE")
|
||||
|
||||
stub = get_stub()
|
||||
self.log.info("Shutting down AE")
|
||||
stub.save()
|
||||
stub.close()
|
||||
self.log.info("AE closed")
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class CollectAudio(pyblish.api.ContextPlugin):
|
||||
"""Inject audio file url for rendered composition into context.
|
||||
Needs to run AFTER 'collect_render'. Use collected comp_id to check
|
||||
if there is an AVLayer in this composition
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Audio"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
if 'render.farm' in instance.data.get("families", []):
|
||||
comp_id = instance.data["comp_id"]
|
||||
if not comp_id:
|
||||
self.log.debug("No comp_id filled in instance")
|
||||
continue
|
||||
context.data["audioFile"] = os.path.normpath(
|
||||
get_stub().get_audio_url(comp_id)
|
||||
).replace("\\", "/")
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Current File"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
def process(self, context):
|
||||
context.data["currentFile"] = os.path.normpath(
|
||||
get_stub().get_active_document_full_name()
|
||||
).replace("\\", "/")
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
get_extension_manifest_path
|
||||
)
|
||||
|
||||
|
||||
class CollectExtensionVersion(pyblish.api.ContextPlugin):
|
||||
""" Pulls and compares version of installed extension.
|
||||
|
||||
It is recommended to use same extension as in provided Openpype code.
|
||||
|
||||
Please use Anastasiy’s Extension Manager or ZXPInstaller to update
|
||||
extension in case of an error.
|
||||
|
||||
You can locate extension.zxp in your installed Openpype code in
|
||||
`repos/avalon-core/avalon/aftereffects`
|
||||
"""
|
||||
# This technically should be a validator, but other collectors might be
|
||||
# impacted with usage of obsolete extension, so collector that runs first
|
||||
# was chosen
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Collect extension version"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, context):
|
||||
installed_version = get_stub().get_extension_version()
|
||||
|
||||
if not installed_version:
|
||||
raise ValueError("Unknown version, probably old extension")
|
||||
|
||||
manifest_url = get_extension_manifest_path()
|
||||
|
||||
if not os.path.exists(manifest_url):
|
||||
self.log.debug("Unable to locate extension manifest, not checking")
|
||||
return
|
||||
|
||||
expected_version = None
|
||||
with open(manifest_url) as fp:
|
||||
content = fp.read()
|
||||
found = re.findall(r'(ExtensionBundleVersion=")([0-9\.]+)(")',
|
||||
content)
|
||||
if found:
|
||||
expected_version = found[0][1]
|
||||
|
||||
if expected_version != installed_version:
|
||||
msg = (
|
||||
"Expected version '{}' found '{}'\n Please update"
|
||||
" your installed extension, it might not work properly."
|
||||
).format(expected_version, installed_version)
|
||||
|
||||
raise ValueError(msg)
|
||||
|
|
@ -0,0 +1,225 @@
|
|||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import attr
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.pipeline.publish import RenderInstance
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
@attr.s
|
||||
class AERenderInstance(RenderInstance):
|
||||
# extend generic, composition name is needed
|
||||
comp_name = attr.ib(default=None)
|
||||
comp_id = attr.ib(default=None)
|
||||
fps = attr.ib(default=None)
|
||||
projectEntity = attr.ib(default=None)
|
||||
stagingDir = attr.ib(default=None)
|
||||
app_version = attr.ib(default=None)
|
||||
publish_attributes = attr.ib(default={})
|
||||
file_names = attr.ib(default=[])
|
||||
|
||||
|
||||
class CollectAERender(publish.AbstractCollectRender):
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.405
|
||||
label = "Collect After Effects Render Layers"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
padding_width = 6
|
||||
rendered_extension = 'png'
|
||||
|
||||
_stub = None
|
||||
|
||||
@classmethod
|
||||
def get_stub(cls):
|
||||
if not cls._stub:
|
||||
cls._stub = get_stub()
|
||||
return cls._stub
|
||||
|
||||
def get_instances(self, context):
|
||||
instances = []
|
||||
instances_to_remove = []
|
||||
|
||||
app_version = CollectAERender.get_stub().get_app_version()
|
||||
app_version = app_version[0:4]
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
version = context.data["version"]
|
||||
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
compositions = CollectAERender.get_stub().get_items(True)
|
||||
compositions_by_id = {item.id: item for item in compositions}
|
||||
for inst in context:
|
||||
if not inst.data.get("active", True):
|
||||
continue
|
||||
|
||||
family = inst.data["family"]
|
||||
if family not in ["render", "renderLocal"]: # legacy
|
||||
continue
|
||||
|
||||
comp_id = int(inst.data["members"][0])
|
||||
|
||||
comp_info = CollectAERender.get_stub().get_comp_properties(
|
||||
comp_id)
|
||||
|
||||
if not comp_info:
|
||||
self.log.warning("Orphaned instance, deleting metadata")
|
||||
inst_id = inst.data.get("instance_id") or str(comp_id)
|
||||
CollectAERender.get_stub().remove_instance(inst_id)
|
||||
continue
|
||||
|
||||
frame_start = comp_info.frameStart
|
||||
frame_end = round(comp_info.frameStart +
|
||||
comp_info.framesDuration) - 1
|
||||
fps = comp_info.frameRate
|
||||
# TODO add resolution when supported by extension
|
||||
|
||||
task_name = inst.data.get("task") # legacy
|
||||
|
||||
render_q = CollectAERender.get_stub().get_render_info(comp_id)
|
||||
if not render_q:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
render_item = render_q[0]
|
||||
|
||||
instance_families = inst.data.get("families", [])
|
||||
subset_name = inst.data["subset"]
|
||||
instance = AERenderInstance(
|
||||
family="render",
|
||||
families=instance_families,
|
||||
version=version,
|
||||
time="",
|
||||
source=current_file,
|
||||
label="{} - {}".format(subset_name, family),
|
||||
subset=subset_name,
|
||||
asset=inst.data["asset"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
publish=True,
|
||||
name=subset_name,
|
||||
resolutionWidth=render_item.width,
|
||||
resolutionHeight=render_item.height,
|
||||
pixelAspect=1,
|
||||
tileRendering=False,
|
||||
tilesX=0,
|
||||
tilesY=0,
|
||||
review="review" in instance_families,
|
||||
frameStart=frame_start,
|
||||
frameEnd=frame_end,
|
||||
frameStep=1,
|
||||
fps=fps,
|
||||
app_version=app_version,
|
||||
publish_attributes=inst.data.get("publish_attributes", {}),
|
||||
file_names=[item.file_name for item in render_q]
|
||||
)
|
||||
|
||||
comp = compositions_by_id.get(comp_id)
|
||||
if not comp:
|
||||
raise ValueError("There is no composition for item {}".
|
||||
format(comp_id))
|
||||
instance.outputDir = self._get_output_dir(instance)
|
||||
instance.comp_name = comp.name
|
||||
instance.comp_id = comp_id
|
||||
|
||||
is_local = "renderLocal" in inst.data["family"] # legacy
|
||||
if inst.data.get("creator_attributes"):
|
||||
is_local = not inst.data["creator_attributes"].get("farm")
|
||||
if is_local:
|
||||
# for local renders
|
||||
instance = self._update_for_local(instance, project_entity)
|
||||
else:
|
||||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.renderer = "aerender"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
|
||||
instances.append(instance)
|
||||
instances_to_remove.append(inst)
|
||||
|
||||
for instance in instances_to_remove:
|
||||
context.remove(instance)
|
||||
return instances
|
||||
|
||||
def get_expected_files(self, render_instance):
|
||||
"""
|
||||
Returns list of rendered files that should be created by
|
||||
Deadline. These are not published directly, they are source
|
||||
for later 'submit_publish_job'.
|
||||
|
||||
Args:
|
||||
render_instance (RenderInstance): to pull anatomy and parts used
|
||||
in url
|
||||
|
||||
Returns:
|
||||
(list) of absolute urls to rendered file
|
||||
"""
|
||||
start = render_instance.frameStart
|
||||
end = render_instance.frameEnd
|
||||
|
||||
base_dir = self._get_output_dir(render_instance)
|
||||
expected_files = []
|
||||
for file_name in render_instance.file_names:
|
||||
_, ext = os.path.splitext(os.path.basename(file_name))
|
||||
ext = ext.replace('.', '')
|
||||
version_str = "v{:03d}".format(render_instance.version)
|
||||
if "#" not in file_name: # single frame (mov)W
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.subset,
|
||||
version_str,
|
||||
ext
|
||||
))
|
||||
expected_files.append(path)
|
||||
else:
|
||||
for frame in range(start, end + 1):
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.subset,
|
||||
version_str,
|
||||
str(frame).zfill(self.padding_width),
|
||||
ext
|
||||
))
|
||||
expected_files.append(path)
|
||||
return expected_files
|
||||
|
||||
def _get_output_dir(self, render_instance):
|
||||
"""
|
||||
Returns dir path of rendered files, used in submit_publish_job
|
||||
for metadata.json location.
|
||||
Should be in separate folder inside of work area.
|
||||
|
||||
Args:
|
||||
render_instance (RenderInstance):
|
||||
|
||||
Returns:
|
||||
(str): absolute path to rendered files
|
||||
"""
|
||||
# render to folder of workfile
|
||||
base_dir = os.path.dirname(render_instance.source)
|
||||
file_name, _ = os.path.splitext(
|
||||
os.path.basename(render_instance.source))
|
||||
base_dir = os.path.join(base_dir, 'renders', 'aftereffects', file_name)
|
||||
|
||||
# for submit_publish_job
|
||||
return base_dir
|
||||
|
||||
def _update_for_local(self, instance, project_entity):
|
||||
"""Update old saved instances to current publishing format"""
|
||||
instance.stagingDir = tempfile.mkdtemp()
|
||||
instance.projectEntity = project_entity
|
||||
fam = "render.local"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
|
||||
return instance
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Add review to families if instance created with 'mark_for_review' flag
|
||||
"""
|
||||
label = "Collect Review"
|
||||
hosts = ["aftereffects"]
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
creator_attributes = instance.data.get("creator_attributes") or {}
|
||||
if (
|
||||
creator_attributes.get("mark_for_review")
|
||||
and "review" not in instance.data["families"]
|
||||
):
|
||||
instance.data["families"].append("review")
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.client import get_asset_name_identifier
|
||||
from ayon_core.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
""" Adds the AE render instances """
|
||||
|
||||
label = "Collect After Effects Workfile Instance"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
if existing_instance is None: # old publish
|
||||
instance = self._get_new_instance(context, scene_file)
|
||||
else:
|
||||
instance = existing_instance
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'aep',
|
||||
'ext': 'aep',
|
||||
'files': scene_file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.data["publish"] = instance.data["active"] # for DL
|
||||
|
||||
def _get_new_instance(self, context, scene_file):
|
||||
task = context.data["task"]
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_entity)
|
||||
|
||||
instance_data = {
|
||||
"active": True,
|
||||
"asset": asset_name,
|
||||
"task": task,
|
||||
"frameStart": context.data['frameStart'],
|
||||
"frameEnd": context.data['frameEnd'],
|
||||
"handleStart": context.data['handleStart'],
|
||||
"handleEnd": context.data['handleEnd'],
|
||||
"fps": asset_entity["data"]["fps"],
|
||||
"resolutionWidth": asset_entity["data"].get(
|
||||
"resolutionWidth",
|
||||
project_entity["data"]["resolutionWidth"]),
|
||||
"resolutionHeight": asset_entity["data"].get(
|
||||
"resolutionHeight",
|
||||
project_entity["data"]["resolutionHeight"]),
|
||||
"pixelAspect": 1,
|
||||
"step": 1,
|
||||
"version": version
|
||||
}
|
||||
|
||||
# workfile instance
|
||||
family = "workfile"
|
||||
subset = get_subset_name(
|
||||
family,
|
||||
self.default_variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
||||
# creating instance data
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": scene_file,
|
||||
"family": family,
|
||||
"families": [family],
|
||||
"representations": list()
|
||||
})
|
||||
|
||||
instance.data.update(instance_data)
|
||||
|
||||
return instance
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
import os
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ExtractLocalRender(publish.Extractor):
|
||||
"""Render RenderQueue locally."""
|
||||
|
||||
order = publish.Extractor.order - 0.47
|
||||
label = "Extract Local Render"
|
||||
hosts = ["aftereffects"]
|
||||
families = ["renderLocal", "render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
stub = get_stub()
|
||||
staging_dir = instance.data["stagingDir"]
|
||||
self.log.debug("staging_dir::{}".format(staging_dir))
|
||||
|
||||
# pull file name collected value from Render Queue Output module
|
||||
if not instance.data["file_names"]:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
|
||||
comp_id = instance.data['comp_id']
|
||||
stub.render(staging_dir, comp_id)
|
||||
|
||||
representations = []
|
||||
for file_name in instance.data["file_names"]:
|
||||
_, ext = os.path.splitext(os.path.basename(file_name))
|
||||
ext = ext[1:]
|
||||
|
||||
first_file_path = None
|
||||
files = []
|
||||
for found_file_name in os.listdir(staging_dir):
|
||||
if not found_file_name.endswith(ext):
|
||||
continue
|
||||
|
||||
files.append(found_file_name)
|
||||
if first_file_path is None:
|
||||
first_file_path = os.path.join(staging_dir,
|
||||
found_file_name)
|
||||
|
||||
if not files:
|
||||
self.log.info("no files")
|
||||
return
|
||||
|
||||
# single file cannot be wrapped in array
|
||||
resulting_files = files
|
||||
if len(files) == 1:
|
||||
resulting_files = files[0]
|
||||
|
||||
repre_data = {
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": resulting_files,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
first_repre = not representations
|
||||
if instance.data["review"] and first_repre:
|
||||
repre_data["tags"] = ["review"]
|
||||
# TODO return back when Extract from source same as regular
|
||||
# thumbnail_path = os.path.join(staging_dir, files[0])
|
||||
# instance.data["thumbnailSource"] = thumbnail_path
|
||||
|
||||
representations.append(repre_data)
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ExtractSaveScene(pyblish.api.ContextPlugin):
|
||||
"""Save scene before extraction."""
|
||||
|
||||
order = publish.Extractor.order - 0.48
|
||||
label = "Extract Save Scene"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
def process(self, context):
|
||||
stub = get_stub()
|
||||
stub.save()
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Footage item missing</title>
|
||||
<description>
|
||||
## Footage item missing
|
||||
|
||||
FootageItem `{name}` contains missing `{path}`. Render will not produce any frames and AE will stop react to any integration
|
||||
### How to repair?
|
||||
|
||||
Remove `{name}` or provide missing file.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<description>
|
||||
## Invalid subset context
|
||||
|
||||
Context of the given subset doesn't match your current scene.
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with "repair" button on the right and refresh Publish at the bottom right.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are reuse old workfile and open it in different context.
|
||||
(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.)
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid scene setting found
|
||||
|
||||
One of the settings in a scene doesn't match to asset settings in database.
|
||||
|
||||
{invalid_setting_str}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
|
||||
In the scene it is right mouse click on published composition > `Composition Settings`.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database.
|
||||
Either value in the database or in the scene is wrong.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="file_not_found">
|
||||
<title>Scene file doesn't exist</title>
|
||||
<description>
|
||||
## Scene file doesn't exist
|
||||
|
||||
Collected scene {scene_url} doesn't exist.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Re-save file, start publish from the beginning again.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import pyblish.api
|
||||
from ayon_core.lib import version_up
|
||||
from ayon_core.pipeline.publish import get_errored_plugins_from_context
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class IncrementWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current workfile.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
"""
|
||||
|
||||
label = "Increment Workfile"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["aftereffects"]
|
||||
families = ["workfile"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
errored_plugins = get_errored_plugins_from_context(instance.context)
|
||||
if errored_plugins:
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because publishing failed."
|
||||
)
|
||||
|
||||
scene_path = version_up(instance.context.data["currentFile"])
|
||||
get_stub().saveAs(scene_path, True)
|
||||
|
||||
self.log.info("Incremented workfile to: {}".format(scene_path))
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
from ayon_core.hosts.aftereffects.api import AfterEffectsHost
|
||||
|
||||
|
||||
class PreCollectRender(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Checks if render instance is of old type, adds to families to both
|
||||
existing collectors work same way.
|
||||
|
||||
Could be removed in the future when no one uses old publish.
|
||||
"""
|
||||
|
||||
label = "PreCollect Render"
|
||||
order = pyblish.api.CollectorOrder + 0.400
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
family_remapping = {
|
||||
"render": ("render.farm", "farm"), # (family, label)
|
||||
"renderLocal": ("render.local", "local")
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
if context.data.get("newPublishing"):
|
||||
self.log.debug("Not applicable for New Publisher, skip")
|
||||
return
|
||||
|
||||
for inst in AfterEffectsHost().list_instances():
|
||||
if inst.get("creator_attributes"):
|
||||
raise ValueError("Instance created in New publisher, "
|
||||
"cannot be published in Pyblish.\n"
|
||||
"Please publish in New Publisher "
|
||||
"or recreate instances with legacy Creators")
|
||||
|
||||
if inst["family"] not in self.family_remapping.keys():
|
||||
continue
|
||||
|
||||
if not inst["members"]:
|
||||
raise ValueError("Couldn't find id, unable to publish. " +
|
||||
"Please recreate instance.")
|
||||
|
||||
instance = context.create_instance(inst["subset"])
|
||||
inst["families"] = [self.family_remapping[inst["family"]][0]]
|
||||
instance.data.update(inst)
|
||||
|
||||
self._debug_log(instance)
|
||||
|
||||
def _debug_log(self, instance):
|
||||
def _default_json(value):
|
||||
return str(value)
|
||||
|
||||
self.log.info(
|
||||
json.dumps(instance.data, indent=4, default=_default_json)
|
||||
)
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class RemovePublishHighlight(publish.Extractor):
|
||||
"""Clean utf characters which are not working in DL
|
||||
|
||||
Published compositions are marked with unicode icon which causes
|
||||
problems on specific render environments. Clean it first, sent to
|
||||
rendering, add it later back to avoid confusion.
|
||||
"""
|
||||
|
||||
order = publish.Extractor.order - 0.49 # just before save
|
||||
label = "Clean render comp"
|
||||
hosts = ["aftereffects"]
|
||||
families = ["render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
stub = get_stub()
|
||||
self.log.debug("instance::{}".format(instance.data))
|
||||
item = instance.data
|
||||
comp_name = item["comp_name"].replace(stub.PUBLISH_ICON, '')
|
||||
stub.rename_item(item["comp_id"], comp_name)
|
||||
instance.data["comp_name"] = comp_name
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate presence of footage items in composition
|
||||
Requires:
|
||||
"""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
PublishXmlValidationError
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ValidateFootageItems(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Validates if FootageItems contained in composition exist.
|
||||
|
||||
AE fails silently and doesn't render anything if footage item file is
|
||||
missing. This will result in nonresponsiveness of AE UI as it expects
|
||||
reaction from user, but it will not provide dialog.
|
||||
This validator tries to check existence of the files.
|
||||
It will not protect from missing frame in multiframes though
|
||||
(as AE api doesn't provide this information and it cannot be told how many
|
||||
frames should be there easily). Missing frame is replaced by placeholder.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Footage Items"
|
||||
families = ["render.farm", "render.local", "render"]
|
||||
hosts = ["aftereffects"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
|
||||
comp_id = instance.data["comp_id"]
|
||||
for footage_item in get_stub().get_items(comps=False, folders=False,
|
||||
footages=True):
|
||||
self.log.info(footage_item)
|
||||
if comp_id not in footage_item.containing_comps:
|
||||
continue
|
||||
|
||||
path = footage_item.path
|
||||
if path and not os.path.exists(path):
|
||||
msg = f"File {path} not found."
|
||||
formatting = {"name": footage_item.name, "path": path}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting)
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_asset_name
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishXmlValidationError,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset with value from Context."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
stub = get_stub()
|
||||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
|
||||
data["asset"] = get_current_asset_name()
|
||||
stub.imprint(instance[0].instance_id, data)
|
||||
|
||||
|
||||
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
|
||||
As it might happen that multiple worfiles are opened at same time,
|
||||
switching between them would mess with selected context. (From Launcher
|
||||
or Ftrack).
|
||||
|
||||
In that case outputs might be output under wrong asset!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate Instance Asset"
|
||||
hosts = ["aftereffects"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
current_asset = get_current_asset_name()
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
f"as current context {current_asset}."
|
||||
)
|
||||
|
||||
if instance_asset != current_asset:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
@ -0,0 +1,161 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate scene settings.
|
||||
Requires:
|
||||
instance -> assetEntity
|
||||
instance -> anatomyData
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_asset_settings
|
||||
|
||||
|
||||
class ValidateSceneSettings(OptionalPyblishPluginMixin,
|
||||
pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Ensures that Composition Settings (right mouse on comp) are same as
|
||||
in FTrack on task.
|
||||
|
||||
By default checks only duration - how many frames should be rendered.
|
||||
Compares:
|
||||
Frame start - Frame end + 1 from FTrack
|
||||
against
|
||||
Duration in Composition Settings.
|
||||
|
||||
If this complains:
|
||||
Check error message where is discrepancy.
|
||||
Check FTrack task 'pype' section of task attributes for expected
|
||||
values.
|
||||
Check/modify rendered Composition Settings.
|
||||
|
||||
If you know what you are doing run publishing again, uncheck this
|
||||
validation before Validation phase.
|
||||
"""
|
||||
|
||||
"""
|
||||
Dev docu:
|
||||
Could be configured by 'presets/plugins/aftereffects/publish'
|
||||
|
||||
skip_timelines_check - fill task name for which skip validation of
|
||||
frameStart
|
||||
frameEnd
|
||||
fps
|
||||
handleStart
|
||||
handleEnd
|
||||
skip_resolution_check - fill entity type ('asset') to skip validation
|
||||
resolutionWidth
|
||||
resolutionHeight
|
||||
TODO support in extension is missing for now
|
||||
|
||||
By defaults validates duration (how many frames should be published)
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Scene Settings"
|
||||
families = ["render.farm", "render.local", "render"]
|
||||
hosts = ["aftereffects"]
|
||||
optional = True
|
||||
|
||||
skip_timelines_check = [".*"] # * >> skip for all
|
||||
skip_resolution_check = [".*"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# Skip the instance if is not active by data on the instance
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
expected_settings = get_asset_settings(asset_doc)
|
||||
self.log.info("config from DB::{}".format(expected_settings))
|
||||
|
||||
task_name = instance.data["anatomyData"]["task"]["name"]
|
||||
if any(re.search(pattern, task_name)
|
||||
for pattern in self.skip_resolution_check):
|
||||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
if any(re.search(pattern, task_name)
|
||||
for pattern in self.skip_timelines_check):
|
||||
expected_settings.pop('fps', None)
|
||||
expected_settings.pop('frameStart', None)
|
||||
expected_settings.pop('frameEnd', None)
|
||||
expected_settings.pop('handleStart', None)
|
||||
expected_settings.pop('handleEnd', None)
|
||||
|
||||
# handle case where ftrack uses only two decimal places
|
||||
# 23.976023976023978 vs. 23.98
|
||||
fps = instance.data.get("fps")
|
||||
if fps:
|
||||
if isinstance(fps, float):
|
||||
fps = float(
|
||||
"{:.2f}".format(fps))
|
||||
expected_settings["fps"] = fps
|
||||
|
||||
duration = instance.data.get("frameEndHandle") - \
|
||||
instance.data.get("frameStartHandle") + 1
|
||||
|
||||
self.log.debug("validated items::{}".format(expected_settings))
|
||||
|
||||
current_settings = {
|
||||
"fps": fps,
|
||||
"frameStart": instance.data.get("frameStart"),
|
||||
"frameEnd": instance.data.get("frameEnd"),
|
||||
"handleStart": instance.data.get("handleStart"),
|
||||
"handleEnd": instance.data.get("handleEnd"),
|
||||
"frameStartHandle": instance.data.get("frameStartHandle"),
|
||||
"frameEndHandle": instance.data.get("frameEndHandle"),
|
||||
"resolutionWidth": instance.data.get("resolutionWidth"),
|
||||
"resolutionHeight": instance.data.get("resolutionHeight"),
|
||||
"duration": duration
|
||||
}
|
||||
self.log.info("current_settings:: {}".format(current_settings))
|
||||
|
||||
invalid_settings = []
|
||||
invalid_keys = set()
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
msg = "'{}' expected: '{}' found: '{}'".format(
|
||||
key, value, current_settings[key])
|
||||
|
||||
if key == "duration" and expected_settings.get("handleStart"):
|
||||
msg += "Handles included in calculation. Remove " \
|
||||
"handles in DB or extend frame range in " \
|
||||
"Composition Setting."
|
||||
|
||||
invalid_settings.append(msg)
|
||||
invalid_keys.add(key)
|
||||
|
||||
if invalid_settings:
|
||||
msg = "Found invalid settings:\n{}".format(
|
||||
"\n".join(invalid_settings)
|
||||
)
|
||||
|
||||
invalid_keys_str = ",".join(invalid_keys)
|
||||
break_str = "<br/>"
|
||||
invalid_setting_str = "<b>Found invalid settings:</b><br/>{}".\
|
||||
format(break_str.join(invalid_settings))
|
||||
|
||||
formatting_data = {
|
||||
"invalid_setting_str": invalid_setting_str,
|
||||
"invalid_keys_str": invalid_keys_str
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
if not os.path.exists(instance.data.get("source")):
|
||||
scene_url = instance.data.get("source")
|
||||
msg = "Scene file {} not found (saved under wrong name)".format(
|
||||
scene_url
|
||||
)
|
||||
formatting_data = {
|
||||
"scene_url": scene_url
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg, key="file_not_found",
|
||||
formatting_data=formatting_data)
|
||||
BIN
client/ayon_core/hosts/aftereffects/resources/template.aep
Normal file
6
client/ayon_core/hosts/blender/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .addon import BlenderAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"BlenderAddon",
|
||||
)
|
||||
72
client/ayon_core/hosts/blender/addon.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import os
|
||||
from ayon_core.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class BlenderAddon(OpenPypeModule, IHostAddon):
|
||||
name = "blender"
|
||||
host_name = "blender"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Prepare path to implementation script
|
||||
implementation_user_script_path = os.path.join(
|
||||
BLENDER_ROOT_DIR,
|
||||
"blender_addon"
|
||||
)
|
||||
|
||||
# Add blender implementation script path to PYTHONPATH
|
||||
python_path = env.get("PYTHONPATH") or ""
|
||||
python_path_parts = [
|
||||
path
|
||||
for path in python_path.split(os.pathsep)
|
||||
if path
|
||||
]
|
||||
python_path_parts.insert(0, implementation_user_script_path)
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
||||
|
||||
# Modify Blender user scripts path
|
||||
previous_user_scripts = set()
|
||||
# Implementation path is added to set for easier paths check inside
|
||||
# loops - will be removed at the end
|
||||
previous_user_scripts.add(implementation_user_script_path)
|
||||
|
||||
ayon_blender_user_scripts = (
|
||||
env.get("AYON_BLENDER_USER_SCRIPTS") or ""
|
||||
)
|
||||
for path in ayon_blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
||||
for path in blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
# Remove implementation path from user script paths as is set to
|
||||
# `BLENDER_USER_SCRIPTS`
|
||||
previous_user_scripts.remove(implementation_user_script_path)
|
||||
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
|
||||
|
||||
# Set custom user scripts env
|
||||
env["AYON_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
|
||||
previous_user_scripts
|
||||
)
|
||||
|
||||
# Define Qt binding if not defined
|
||||
if not env.get("QT_PREFERRED_BINDING"):
|
||||
env["QT_PREFERRED_BINDING"] = "PySide2"
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(BLENDER_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".blend"]
|
||||
74
client/ayon_core/hosts/blender/api/__init__.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
"""Public API
|
||||
|
||||
Anything that isn't defined here is INTERNAL and unreliable for external use.
|
||||
|
||||
"""
|
||||
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
publish,
|
||||
containerise,
|
||||
BlenderHost,
|
||||
)
|
||||
|
||||
from .plugin import (
|
||||
Creator,
|
||||
Loader,
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
lsattr,
|
||||
lsattrs,
|
||||
read,
|
||||
maintained_selection,
|
||||
maintained_time,
|
||||
get_selection,
|
||||
# unique_name,
|
||||
)
|
||||
|
||||
from .capture import capture
|
||||
|
||||
from .render_lib import prepare_rendering
|
||||
|
||||
|
||||
__all__ = [
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"publish",
|
||||
"containerise",
|
||||
"BlenderHost",
|
||||
|
||||
"Creator",
|
||||
"Loader",
|
||||
|
||||
# Workfiles API
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
# Utility functions
|
||||
"maintained_selection",
|
||||
"maintained_time",
|
||||
"lsattr",
|
||||
"lsattrs",
|
||||
"read",
|
||||
"get_selection",
|
||||
"capture",
|
||||
# "unique_name",
|
||||
"prepare_rendering",
|
||||
]
|
||||
47
client/ayon_core/hosts/blender/api/action.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid objects in Blender when a publish plug-in failed."""
|
||||
label = "Select Invalid"
|
||||
on = "failed"
|
||||
icon = "search"
|
||||
|
||||
def process(self, context, plugin):
|
||||
errored_instances = get_errored_instances_from_context(context,
|
||||
plugin=plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes...")
|
||||
invalid = list()
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Failed plug-in doesn't have any selectable objects."
|
||||
)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
# Make sure every node is only processed once
|
||||
invalid = list(set(invalid))
|
||||
if not invalid:
|
||||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
|
||||
invalid_names = [obj.name for obj in invalid]
|
||||
self.log.info(
|
||||
"Selecting invalid objects: %s", ", ".join(invalid_names)
|
||||
)
|
||||
# Select the objects and also make the last one the active object.
|
||||
for obj in invalid:
|
||||
obj.select_set(True)
|
||||
|
||||
bpy.context.view_layer.objects.active = invalid[-1]
|
||||
282
client/ayon_core/hosts/blender/api/capture.py
Normal file
|
|
@ -0,0 +1,282 @@
|
|||
|
||||
"""Blender Capture
|
||||
Playblasting with independent viewport, camera and display options
|
||||
"""
|
||||
import contextlib
|
||||
import bpy
|
||||
|
||||
from .lib import maintained_time
|
||||
from .plugin import deselect_all, create_blender_context
|
||||
|
||||
|
||||
def capture(
|
||||
camera=None,
|
||||
width=None,
|
||||
height=None,
|
||||
filename=None,
|
||||
start_frame=None,
|
||||
end_frame=None,
|
||||
step_frame=None,
|
||||
sound=None,
|
||||
isolate=None,
|
||||
maintain_aspect_ratio=True,
|
||||
overwrite=False,
|
||||
image_settings=None,
|
||||
display_options=None
|
||||
):
|
||||
"""Playblast in an independent windows
|
||||
Arguments:
|
||||
camera (str, optional): Name of camera, defaults to "Camera"
|
||||
width (int, optional): Width of output in pixels
|
||||
height (int, optional): Height of output in pixels
|
||||
filename (str, optional): Name of output file path. Defaults to current
|
||||
render output path.
|
||||
start_frame (int, optional): Defaults to current start frame.
|
||||
end_frame (int, optional): Defaults to current end frame.
|
||||
step_frame (int, optional): Defaults to 1.
|
||||
sound (str, optional): Specify the sound node to be used during
|
||||
playblast. When None (default) no sound will be used.
|
||||
isolate (list): List of nodes to isolate upon capturing
|
||||
maintain_aspect_ratio (bool, optional): Modify height in order to
|
||||
maintain aspect ratio.
|
||||
overwrite (bool, optional): Whether or not to overwrite if file
|
||||
already exists. If disabled and file exists and error will be
|
||||
raised.
|
||||
image_settings (dict, optional): Supplied image settings for render,
|
||||
using `ImageSettings`
|
||||
display_options (dict, optional): Supplied display options for render
|
||||
"""
|
||||
|
||||
scene = bpy.context.scene
|
||||
camera = camera or "Camera"
|
||||
|
||||
# Ensure camera exists.
|
||||
if camera not in scene.objects and camera != "AUTO":
|
||||
raise RuntimeError("Camera does not exist: {0}".format(camera))
|
||||
|
||||
# Ensure resolution.
|
||||
if width and height:
|
||||
maintain_aspect_ratio = False
|
||||
width = width or scene.render.resolution_x
|
||||
height = height or scene.render.resolution_y
|
||||
if maintain_aspect_ratio:
|
||||
ratio = scene.render.resolution_x / scene.render.resolution_y
|
||||
height = round(width / ratio)
|
||||
|
||||
# Get frame range.
|
||||
if start_frame is None:
|
||||
start_frame = scene.frame_start
|
||||
if end_frame is None:
|
||||
end_frame = scene.frame_end
|
||||
if step_frame is None:
|
||||
step_frame = 1
|
||||
frame_range = (start_frame, end_frame, step_frame)
|
||||
|
||||
if filename is None:
|
||||
filename = scene.render.filepath
|
||||
|
||||
render_options = {
|
||||
"filepath": "{}.".format(filename.rstrip(".")),
|
||||
"resolution_x": width,
|
||||
"resolution_y": height,
|
||||
"use_overwrite": overwrite,
|
||||
}
|
||||
|
||||
with _independent_window() as window:
|
||||
|
||||
applied_view(window, camera, isolate, options=display_options)
|
||||
|
||||
with contextlib.ExitStack() as stack:
|
||||
stack.enter_context(maintain_camera(window, camera))
|
||||
stack.enter_context(applied_frame_range(window, *frame_range))
|
||||
stack.enter_context(applied_render_options(window, render_options))
|
||||
stack.enter_context(applied_image_settings(window, image_settings))
|
||||
stack.enter_context(maintained_time())
|
||||
|
||||
bpy.ops.render.opengl(
|
||||
animation=True,
|
||||
render_keyed_only=False,
|
||||
sequencer=False,
|
||||
write_still=False,
|
||||
view_context=True
|
||||
)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
ImageSettings = {
|
||||
"file_format": "FFMPEG",
|
||||
"color_mode": "RGB",
|
||||
"ffmpeg": {
|
||||
"format": "QUICKTIME",
|
||||
"use_autosplit": False,
|
||||
"codec": "H264",
|
||||
"constant_rate_factor": "MEDIUM",
|
||||
"gopsize": 18,
|
||||
"use_max_b_frames": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def isolate_objects(window, objects):
|
||||
"""Isolate selection"""
|
||||
deselect_all()
|
||||
|
||||
for obj in objects:
|
||||
obj.select_set(True)
|
||||
|
||||
context = create_blender_context(selected=objects, window=window)
|
||||
|
||||
with bpy.context.temp_override(**context):
|
||||
bpy.ops.view3d.view_axis(type="FRONT")
|
||||
bpy.ops.view3d.localview()
|
||||
|
||||
deselect_all()
|
||||
|
||||
|
||||
def _apply_options(entity, options):
|
||||
for option, value in options.items():
|
||||
if isinstance(value, dict):
|
||||
_apply_options(getattr(entity, option), value)
|
||||
else:
|
||||
setattr(entity, option, value)
|
||||
|
||||
|
||||
def applied_view(window, camera, isolate=None, options=None):
|
||||
"""Apply view options to window."""
|
||||
area = window.screen.areas[0]
|
||||
space = area.spaces[0]
|
||||
|
||||
area.ui_type = "VIEW_3D"
|
||||
|
||||
types = {"MESH", "GPENCIL"}
|
||||
objects = [obj for obj in window.scene.objects if obj.type in types]
|
||||
|
||||
if camera == "AUTO":
|
||||
space.region_3d.view_perspective = "ORTHO"
|
||||
isolate_objects(window, isolate or objects)
|
||||
else:
|
||||
isolate_objects(window, isolate or objects)
|
||||
space.camera = window.scene.objects.get(camera)
|
||||
space.region_3d.view_perspective = "CAMERA"
|
||||
|
||||
if isinstance(options, dict):
|
||||
_apply_options(space, options)
|
||||
else:
|
||||
space.shading.type = "SOLID"
|
||||
space.shading.color_type = "MATERIAL"
|
||||
space.show_gizmo = False
|
||||
space.overlay.show_overlays = False
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def applied_frame_range(window, start, end, step):
|
||||
"""Context manager for setting frame range."""
|
||||
# Store current frame range
|
||||
current_frame_start = window.scene.frame_start
|
||||
current_frame_end = window.scene.frame_end
|
||||
current_frame_step = window.scene.frame_step
|
||||
# Apply frame range
|
||||
window.scene.frame_start = start
|
||||
window.scene.frame_end = end
|
||||
window.scene.frame_step = step
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Restore frame range
|
||||
window.scene.frame_start = current_frame_start
|
||||
window.scene.frame_end = current_frame_end
|
||||
window.scene.frame_step = current_frame_step
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def applied_render_options(window, options):
|
||||
"""Context manager for setting render options."""
|
||||
render = window.scene.render
|
||||
|
||||
# Store current settings
|
||||
original = {}
|
||||
for opt in options.copy():
|
||||
try:
|
||||
original[opt] = getattr(render, opt)
|
||||
except ValueError:
|
||||
options.pop(opt)
|
||||
|
||||
# Apply settings
|
||||
_apply_options(render, options)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Restore previous settings
|
||||
_apply_options(render, original)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def applied_image_settings(window, options):
|
||||
"""Context manager to override image settings."""
|
||||
|
||||
options = options or ImageSettings.copy()
|
||||
ffmpeg = options.pop("ffmpeg", {})
|
||||
render = window.scene.render
|
||||
|
||||
# Store current image settings
|
||||
original = {}
|
||||
for opt in options.copy():
|
||||
try:
|
||||
original[opt] = getattr(render.image_settings, opt)
|
||||
except ValueError:
|
||||
options.pop(opt)
|
||||
|
||||
# Store current ffmpeg settings
|
||||
original_ffmpeg = {}
|
||||
for opt in ffmpeg.copy():
|
||||
try:
|
||||
original_ffmpeg[opt] = getattr(render.ffmpeg, opt)
|
||||
except ValueError:
|
||||
ffmpeg.pop(opt)
|
||||
|
||||
# Apply image settings
|
||||
for opt, value in options.items():
|
||||
setattr(render.image_settings, opt, value)
|
||||
|
||||
# Apply ffmpeg settings
|
||||
for opt, value in ffmpeg.items():
|
||||
setattr(render.ffmpeg, opt, value)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Restore previous settings
|
||||
for opt, value in original.items():
|
||||
setattr(render.image_settings, opt, value)
|
||||
for opt, value in original_ffmpeg.items():
|
||||
setattr(render.ffmpeg, opt, value)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintain_camera(window, camera):
|
||||
"""Context manager to override camera."""
|
||||
current_camera = window.scene.camera
|
||||
if camera in window.scene.objects:
|
||||
window.scene.camera = window.scene.objects.get(camera)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
window.scene.camera = current_camera
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _independent_window():
|
||||
"""Create capture-window context."""
|
||||
context = create_blender_context()
|
||||
current_windows = set(bpy.context.window_manager.windows)
|
||||
with bpy.context.temp_override(**context):
|
||||
bpy.ops.wm.window_new()
|
||||
window = list(
|
||||
set(bpy.context.window_manager.windows) - current_windows)[0]
|
||||
context["window"] = window
|
||||
try:
|
||||
yield window
|
||||
finally:
|
||||
bpy.ops.wm.window_close()
|
||||
51
client/ayon_core/hosts/blender/api/colorspace.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import attr
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
@attr.s
|
||||
class LayerMetadata(object):
|
||||
"""Data class for Render Layer metadata."""
|
||||
frameStart = attr.ib()
|
||||
frameEnd = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RenderProduct(object):
|
||||
"""
|
||||
Getting Colorspace as Specific Render Product Parameter for submitting
|
||||
publish job.
|
||||
"""
|
||||
colorspace = attr.ib() # colorspace
|
||||
view = attr.ib() # OCIO view transform
|
||||
productName = attr.ib(default=None)
|
||||
|
||||
|
||||
class ARenderProduct(object):
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
# Initialize
|
||||
self.layer_data = self._get_layer_data()
|
||||
self.layer_data.products = self.get_render_products()
|
||||
|
||||
def _get_layer_data(self):
|
||||
scene = bpy.context.scene
|
||||
|
||||
return LayerMetadata(
|
||||
frameStart=int(scene.frame_start),
|
||||
frameEnd=int(scene.frame_end),
|
||||
)
|
||||
|
||||
def get_render_products(self):
|
||||
"""To be implemented by renderer class.
|
||||
This should return a list of RenderProducts.
|
||||
Returns:
|
||||
list: List of RenderProduct
|
||||
"""
|
||||
return [
|
||||
RenderProduct(
|
||||
colorspace="sRGB",
|
||||
view="ACES 1.0",
|
||||
productName=""
|
||||
)
|
||||
]
|
||||
BIN
client/ayon_core/hosts/blender/api/icons/pyblish-32x32.png
Normal file
|
After Width: | Height: | Size: 632 B |
367
client/ayon_core/hosts/blender/api/lib.py
Normal file
|
|
@ -0,0 +1,367 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
import contextlib
|
||||
from typing import Dict, List, Union
|
||||
|
||||
import bpy
|
||||
import addon_utils
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from . import pipeline
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def load_scripts(paths):
|
||||
"""Copy of `load_scripts` from Blender's implementation.
|
||||
|
||||
It is possible that this function will be changed in future and usage will
|
||||
be based on Blender version.
|
||||
"""
|
||||
import bpy_types
|
||||
|
||||
loaded_modules = set()
|
||||
|
||||
previous_classes = [
|
||||
cls
|
||||
for cls in bpy.types.bpy_struct.__subclasses__()
|
||||
]
|
||||
|
||||
def register_module_call(mod):
|
||||
register = getattr(mod, "register", None)
|
||||
if register:
|
||||
try:
|
||||
register()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
else:
|
||||
print("\nWarning! '%s' has no register function, "
|
||||
"this is now a requirement for registerable scripts" %
|
||||
mod.__file__)
|
||||
|
||||
def unregister_module_call(mod):
|
||||
unregister = getattr(mod, "unregister", None)
|
||||
if unregister:
|
||||
try:
|
||||
unregister()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
def test_reload(mod):
|
||||
# reloading this causes internal errors
|
||||
# because the classes from this module are stored internally
|
||||
# possibly to refresh internal references too but for now, best not to.
|
||||
if mod == bpy_types:
|
||||
return mod
|
||||
|
||||
try:
|
||||
return importlib.reload(mod)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
def test_register(mod):
|
||||
if mod:
|
||||
register_module_call(mod)
|
||||
bpy.utils._global_loaded_modules.append(mod.__name__)
|
||||
|
||||
from bpy_restrict_state import RestrictBlend
|
||||
|
||||
with RestrictBlend():
|
||||
for base_path in paths:
|
||||
for path_subdir in bpy.utils._script_module_dirs:
|
||||
path = os.path.join(base_path, path_subdir)
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
|
||||
bpy.utils._sys_path_ensure_prepend(path)
|
||||
|
||||
# Only add to 'sys.modules' unless this is 'startup'.
|
||||
if path_subdir != "startup":
|
||||
continue
|
||||
for mod in bpy.utils.modules_from_path(path, loaded_modules):
|
||||
test_register(mod)
|
||||
|
||||
addons_paths = []
|
||||
for base_path in paths:
|
||||
addons_path = os.path.join(base_path, "addons")
|
||||
if not os.path.exists(addons_path):
|
||||
continue
|
||||
addons_paths.append(addons_path)
|
||||
addons_module_path = os.path.join(addons_path, "modules")
|
||||
if os.path.exists(addons_module_path):
|
||||
bpy.utils._sys_path_ensure_prepend(addons_module_path)
|
||||
|
||||
if addons_paths:
|
||||
# Fake addons
|
||||
origin_paths = addon_utils.paths
|
||||
|
||||
def new_paths():
|
||||
paths = origin_paths() + addons_paths
|
||||
return paths
|
||||
|
||||
addon_utils.paths = new_paths
|
||||
addon_utils.modules_refresh()
|
||||
|
||||
# load template (if set)
|
||||
if any(bpy.utils.app_template_paths()):
|
||||
import bl_app_template_utils
|
||||
bl_app_template_utils.reset(reload_scripts=False)
|
||||
del bl_app_template_utils
|
||||
|
||||
for cls in bpy.types.bpy_struct.__subclasses__():
|
||||
if cls in previous_classes:
|
||||
continue
|
||||
if not getattr(cls, "is_registered", False):
|
||||
continue
|
||||
for subcls in cls.__subclasses__():
|
||||
if not subcls.is_registered:
|
||||
print(
|
||||
"Warning, unregistered class: %s(%s)" %
|
||||
(subcls.__name__, cls.__name__)
|
||||
)
|
||||
|
||||
|
||||
def append_user_scripts():
|
||||
user_scripts = os.environ.get("AYON_BLENDER_USER_SCRIPTS")
|
||||
if not user_scripts:
|
||||
return
|
||||
|
||||
try:
|
||||
load_scripts(user_scripts.split(os.pathsep))
|
||||
except Exception:
|
||||
print("Couldn't load user scripts \"{}\"".format(user_scripts))
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def set_app_templates_path():
|
||||
# Blender requires the app templates to be in `BLENDER_USER_SCRIPTS`.
|
||||
# After running Blender, we set that variable to our custom path, so
|
||||
# that the user can use their custom app templates.
|
||||
|
||||
# We look among the scripts paths for one of the paths that contains
|
||||
# the app templates. The path must contain the subfolder
|
||||
# `startup/bl_app_templates_user`.
|
||||
paths = os.environ.get("AYON_BLENDER_USER_SCRIPTS").split(os.pathsep)
|
||||
|
||||
app_templates_path = None
|
||||
for path in paths:
|
||||
if os.path.isdir(
|
||||
os.path.join(path, "startup", "bl_app_templates_user")):
|
||||
app_templates_path = path
|
||||
break
|
||||
|
||||
if app_templates_path and os.path.isdir(app_templates_path):
|
||||
os.environ["BLENDER_USER_SCRIPTS"] = app_templates_path
|
||||
|
||||
|
||||
def imprint(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
|
||||
r"""Write `data` to `node` as userDefined attributes
|
||||
|
||||
Arguments:
|
||||
node: Long name of node
|
||||
data: Dictionary of key/value pairs
|
||||
|
||||
Example:
|
||||
>>> import bpy
|
||||
>>> def compute():
|
||||
... return 6
|
||||
...
|
||||
>>> bpy.ops.mesh.primitive_cube_add()
|
||||
>>> cube = bpy.context.view_layer.objects.active
|
||||
>>> imprint(cube, {
|
||||
... "regularString": "myFamily",
|
||||
... "computedValue": lambda: compute()
|
||||
... })
|
||||
...
|
||||
>>> cube['avalon']['computedValue']
|
||||
6
|
||||
"""
|
||||
|
||||
imprint_data = dict()
|
||||
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if callable(value):
|
||||
# Support values evaluated at imprint
|
||||
value = value()
|
||||
|
||||
if not isinstance(value, (int, float, bool, str, list, dict)):
|
||||
raise TypeError(f"Unsupported type: {type(value)}")
|
||||
|
||||
imprint_data[key] = value
|
||||
|
||||
pipeline.metadata_update(node, imprint_data)
|
||||
|
||||
|
||||
def lsattr(attr: str,
|
||||
value: Union[str, int, bool, List, Dict, None] = None) -> List:
|
||||
r"""Return nodes matching `attr` and `value`
|
||||
|
||||
Arguments:
|
||||
attr: Name of Blender property
|
||||
value: Value of attribute. If none
|
||||
is provided, return all nodes with this attribute.
|
||||
|
||||
Example:
|
||||
>>> lsattr("id", "myId")
|
||||
... [bpy.data.objects["myNode"]
|
||||
>>> lsattr("id")
|
||||
... [bpy.data.objects["myNode"], bpy.data.objects["myOtherNode"]]
|
||||
|
||||
Returns:
|
||||
list
|
||||
"""
|
||||
|
||||
return lsattrs({attr: value})
|
||||
|
||||
|
||||
def lsattrs(attrs: Dict) -> List:
|
||||
r"""Return nodes with the given attribute(s).
|
||||
|
||||
Arguments:
|
||||
attrs: Name and value pairs of expected matches
|
||||
|
||||
Example:
|
||||
>>> lsattrs({"age": 5}) # Return nodes with an `age` of 5
|
||||
# Return nodes with both `age` and `color` of 5 and blue
|
||||
>>> lsattrs({"age": 5, "color": "blue"})
|
||||
|
||||
Returns a list.
|
||||
|
||||
"""
|
||||
|
||||
# For now return all objects, not filtered by scene/collection/view_layer.
|
||||
matches = set()
|
||||
for coll in dir(bpy.data):
|
||||
if not isinstance(
|
||||
getattr(bpy.data, coll),
|
||||
bpy.types.bpy_prop_collection,
|
||||
):
|
||||
continue
|
||||
for node in getattr(bpy.data, coll):
|
||||
for attr, value in attrs.items():
|
||||
avalon_prop = node.get(pipeline.AVALON_PROPERTY)
|
||||
if not avalon_prop:
|
||||
continue
|
||||
if (avalon_prop.get(attr)
|
||||
and (value is None or avalon_prop.get(attr) == value)):
|
||||
matches.add(node)
|
||||
return list(matches)
|
||||
|
||||
|
||||
def read(node: bpy.types.bpy_struct_meta_idprop):
|
||||
"""Return user-defined attributes from `node`"""
|
||||
|
||||
data = dict(node.get(pipeline.AVALON_PROPERTY, {}))
|
||||
|
||||
# Ignore hidden/internal data
|
||||
data = {
|
||||
key: value
|
||||
for key, value in data.items() if not key.startswith("_")
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_selected_collections():
|
||||
"""
|
||||
Returns a list of the currently selected collections in the outliner.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the outliner cannot be found in the main Blender
|
||||
window.
|
||||
|
||||
Returns:
|
||||
list: A list of `bpy.types.Collection` objects that are currently
|
||||
selected in the outliner.
|
||||
"""
|
||||
window = bpy.context.window or bpy.context.window_manager.windows[0]
|
||||
|
||||
try:
|
||||
area = next(
|
||||
area for area in window.screen.areas
|
||||
if area.type == 'OUTLINER')
|
||||
region = next(
|
||||
region for region in area.regions
|
||||
if region.type == 'WINDOW')
|
||||
except StopIteration as e:
|
||||
raise RuntimeError("Could not find outliner. An outliner space "
|
||||
"must be in the main Blender window.") from e
|
||||
|
||||
with bpy.context.temp_override(
|
||||
window=window,
|
||||
area=area,
|
||||
region=region,
|
||||
screen=window.screen
|
||||
):
|
||||
ids = bpy.context.selected_ids
|
||||
|
||||
return [id for id in ids if isinstance(id, bpy.types.Collection)]
|
||||
|
||||
|
||||
def get_selection(include_collections: bool = False) -> List[bpy.types.Object]:
|
||||
"""
|
||||
Returns a list of selected objects in the current Blender scene.
|
||||
|
||||
Args:
|
||||
include_collections (bool, optional): Whether to include selected
|
||||
collections in the result. Defaults to False.
|
||||
|
||||
Returns:
|
||||
List[bpy.types.Object]: A list of selected objects.
|
||||
"""
|
||||
selection = [obj for obj in bpy.context.scene.objects if obj.select_get()]
|
||||
|
||||
if include_collections:
|
||||
selection.extend(get_selected_collections())
|
||||
|
||||
return selection
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
r"""Maintain selection during context
|
||||
|
||||
Example:
|
||||
>>> with maintained_selection():
|
||||
... # Modify selection
|
||||
... bpy.ops.object.select_all(action='DESELECT')
|
||||
>>> # Selection restored
|
||||
"""
|
||||
|
||||
previous_selection = get_selection()
|
||||
previous_active = bpy.context.view_layer.objects.active
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Clear the selection
|
||||
for node in get_selection():
|
||||
node.select_set(state=False)
|
||||
if previous_selection:
|
||||
for node in previous_selection:
|
||||
try:
|
||||
node.select_set(state=True)
|
||||
except ReferenceError:
|
||||
# This could happen if a selected node was deleted during
|
||||
# the context.
|
||||
log.exception("Failed to reselect")
|
||||
continue
|
||||
try:
|
||||
bpy.context.view_layer.objects.active = previous_active
|
||||
except ReferenceError:
|
||||
# This could happen if the active node was deleted during the
|
||||
# context.
|
||||
log.exception("Failed to set active object.")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
"""Maintain current frame during context."""
|
||||
current_time = bpy.context.scene.frame_current
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
bpy.context.scene.frame_current = current_time
|
||||