Merge branch 'develop' of https://github.com/ynput/ayon-core into feature/AY-745_Deadline-webservice-password

This commit is contained in:
Petr Kalis 2024-04-18 13:06:38 +02:00
commit 1525443c94
238 changed files with 1901 additions and 826 deletions

View file

@ -15,6 +15,7 @@ from abc import ABCMeta, abstractmethod
import six
import appdirs
import ayon_api
from semver import VersionInfo
from ayon_core import AYON_CORE_ROOT
from ayon_core.lib import Logger, is_dev_mode_enabled
@ -46,6 +47,11 @@ IGNORED_HOSTS_IN_AYON = {
}
IGNORED_MODULES_IN_AYON = set()
# When addon was moved from ayon-core codebase
# - this is used to log the missing addon
MOVED_ADDON_MILESTONE_VERSIONS = {
"applications": VersionInfo(0, 2, 0),
}
# Inherit from `object` for Python 2 hosts
class _ModuleClass(object):
@ -192,6 +198,45 @@ def _get_ayon_addons_information(bundle_info):
return output
def _handle_moved_addons(addon_name, milestone_version, log):
"""Log message that addon version is not compatible with current core.
The function can return path to addon client code, but that can happen
only if ayon-core is used from code (for development), but still
logs a warning.
Args:
addon_name (str): Addon name.
milestone_version (str): Milestone addon version.
log (logging.Logger): Logger object.
Returns:
Union[str, None]: Addon dir or None.
"""
# Handle addons which were moved out of ayon-core
# - Try to fix it by loading it directly from server addons dir in
# ayon-core repository. But that will work only if ayon-core is
# used from code.
addon_dir = os.path.join(
os.path.dirname(os.path.dirname(AYON_CORE_ROOT)),
"server_addon",
addon_name,
"client",
)
if not os.path.exists(addon_dir):
log.error((
"Addon '{}' is not be available."
" Please update applications addon to '{}' or higher."
).format(addon_name, milestone_version))
return None
log.warning((
"Please update '{}' addon to '{}' or higher."
" Using client code from ayon-core repository."
).format(addon_name, milestone_version))
return addon_dir
def _load_ayon_addons(openpype_modules, modules_key, log):
"""Load AYON addons based on information from server.
@ -249,6 +294,7 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
use_dev_path = dev_addon_info.get("enabled", False)
addon_dir = None
milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name)
if use_dev_path:
addon_dir = dev_addon_info["path"]
if not addon_dir or not os.path.exists(addon_dir):
@ -257,6 +303,16 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
).format(addon_name, addon_version, addon_dir))
continue
elif (
milestone_version is not None
and VersionInfo.parse(addon_version) < milestone_version
):
addon_dir = _handle_moved_addons(
addon_name, milestone_version, log
)
if not addon_dir:
continue
elif addons_dir_exists:
folder_name = "{}_{}".format(addon_name, addon_version)
addon_dir = os.path.join(addons_dir, folder_name)
@ -336,66 +392,9 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
return addons_to_skip_in_core
def _load_ayon_core_addons_dir(
ignore_addon_names, openpype_modules, modules_key, log
):
addons_dir = os.path.join(AYON_CORE_ROOT, "addons")
if not os.path.exists(addons_dir):
return
imported_modules = []
# Make sure that addons which already have client code are not loaded
# from core again, with older code
filtered_paths = []
for name in os.listdir(addons_dir):
if name in ignore_addon_names:
continue
path = os.path.join(addons_dir, name)
if os.path.isdir(path):
filtered_paths.append(path)
for path in filtered_paths:
while path in sys.path:
sys.path.remove(path)
sys.path.insert(0, path)
for name in os.listdir(path):
fullpath = os.path.join(path, name)
if os.path.isfile(fullpath):
basename, ext = os.path.splitext(name)
if ext != ".py":
continue
else:
basename = name
try:
module = __import__(basename, fromlist=("",))
for attr_name in dir(module):
attr = getattr(module, attr_name)
if (
inspect.isclass(attr)
and issubclass(attr, AYONAddon)
):
new_import_str = "{}.{}".format(modules_key, basename)
sys.modules[new_import_str] = module
setattr(openpype_modules, basename, module)
imported_modules.append(module)
break
except Exception:
log.error(
"Failed to import addon '{}'.".format(fullpath),
exc_info=True
)
return imported_modules
def _load_addons_in_core(
ignore_addon_names, openpype_modules, modules_key, log
):
_load_ayon_core_addons_dir(
ignore_addon_names, openpype_modules, modules_key, log
)
# Add current directory at first place
# - has small differences in import logic
hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts")

View file

@ -1,58 +0,0 @@
from .constants import (
APPLICATIONS_ADDON_ROOT,
DEFAULT_ENV_SUBGROUP,
PLATFORM_NAMES,
)
from .exceptions import (
ApplicationNotFound,
ApplicationExecutableNotFound,
ApplicationLaunchFailed,
MissingRequiredKey,
)
from .defs import (
LaunchTypes,
ApplicationExecutable,
UndefinedApplicationExecutable,
ApplicationGroup,
Application,
EnvironmentToolGroup,
EnvironmentTool,
)
from .hooks import (
LaunchHook,
PreLaunchHook,
PostLaunchHook,
)
from .manager import (
ApplicationManager,
ApplicationLaunchContext,
)
from .addon import ApplicationsAddon
__all__ = (
"DEFAULT_ENV_SUBGROUP",
"PLATFORM_NAMES",
"ApplicationNotFound",
"ApplicationExecutableNotFound",
"ApplicationLaunchFailed",
"MissingRequiredKey",
"LaunchTypes",
"ApplicationExecutable",
"UndefinedApplicationExecutable",
"ApplicationGroup",
"Application",
"EnvironmentToolGroup",
"EnvironmentTool",
"LaunchHook",
"PreLaunchHook",
"PostLaunchHook",
"ApplicationManager",
"ApplicationLaunchContext",
"ApplicationsAddon",
)

View file

@ -1,173 +0,0 @@
import os
import json
from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap
from .constants import APPLICATIONS_ADDON_ROOT
from .defs import LaunchTypes
from .manager import ApplicationManager
class ApplicationsAddon(AYONAddon, IPluginPaths):
name = "applications"
def initialize(self, settings):
# TODO remove when addon is removed from ayon-core
self.enabled = self.name in settings
def get_app_environments_for_context(
self,
project_name,
folder_path,
task_name,
full_app_name,
env_group=None,
launch_type=None,
env=None,
):
"""Calculate environment variables for launch context.
Args:
project_name (str): Project name.
folder_path (str): Folder path.
task_name (str): Task name.
full_app_name (str): Full application name.
env_group (Optional[str]): Environment group.
launch_type (Optional[str]): Launch type.
env (Optional[dict[str, str]]): Environment variables to update.
Returns:
dict[str, str]: Environment variables for context.
"""
from ayon_applications.utils import get_app_environments_for_context
if not full_app_name:
return {}
return get_app_environments_for_context(
project_name,
folder_path,
task_name,
full_app_name,
env_group=env_group,
launch_type=launch_type,
env=env,
addons_manager=self.manager
)
def get_farm_publish_environment_variables(
self,
project_name,
folder_path,
task_name,
full_app_name=None,
env_group=None,
):
"""Calculate environment variables for farm publish.
Args:
project_name (str): Project name.
folder_path (str): Folder path.
task_name (str): Task name.
env_group (Optional[str]): Environment group.
full_app_name (Optional[str]): Full application name. Value from
environment variable 'AYON_APP_NAME' is used if 'None' is
passed.
Returns:
dict[str, str]: Environment variables for farm publish.
"""
if full_app_name is None:
full_app_name = os.getenv("AYON_APP_NAME")
return self.get_app_environments_for_context(
project_name,
folder_path,
task_name,
full_app_name,
env_group=env_group,
launch_type=LaunchTypes.farm_publish
)
def get_applications_manager(self, settings=None):
"""Get applications manager.
Args:
settings (Optional[dict]): Studio/project settings.
Returns:
ApplicationManager: Applications manager.
"""
return ApplicationManager(settings)
def get_plugin_paths(self):
return {
"publish": [
os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish")
]
}
# --- CLI ---
def cli(self, addon_click_group):
main_group = click_wrap.group(
self._cli_main, name=self.name, help="Applications addon"
)
(
main_group.command(
self._cli_extract_environments,
name="extractenvironments",
help=(
"Extract environment variables for context into json file"
)
)
.argument("output_json_path")
.option("--project", help="Project name", default=None)
.option("--folder", help="Folder path", default=None)
.option("--task", help="Task name", default=None)
.option("--app", help="Application name", default=None)
.option(
"--envgroup",
help="Environment group (e.g. \"farm\")",
default=None
)
)
# Convert main command to click object and add it to parent group
addon_click_group.add_command(
main_group.to_click_obj()
)
def _cli_main(self):
pass
def _cli_extract_environments(
self, output_json_path, project, folder, task, app, envgroup
):
"""Produces json file with environment based on project and app.
Called by farm integration to propagate environment into farm jobs.
Args:
output_json_path (str): Output json file path.
project (str): Project name.
folder (str): Folder path.
task (str): Task name.
app (str): Full application name e.g. 'maya/2024'.
envgroup (str): Environment group.
"""
if all((project, folder, task, app)):
env = self.get_farm_publish_environment_variables(
project, folder, task, app, env_group=envgroup,
)
else:
env = os.environ.copy()
output_dir = os.path.dirname(output_json_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(output_json_path, "w") as file_stream:
json.dump(env, file_stream, indent=4)

View file

@ -1,6 +0,0 @@
import os
APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
PLATFORM_NAMES = {"windows", "linux", "darwin"}
DEFAULT_ENV_SUBGROUP = "standard"

View file

@ -1,404 +0,0 @@
import os
import platform
import json
import copy
from ayon_core.lib import find_executable
class LaunchTypes:
"""Launch types are filters for pre/post-launch hooks.
Please use these variables in case they'll change values.
"""
# Local launch - application is launched on local machine
local = "local"
# Farm render job - application is on farm
farm_render = "farm-render"
# Farm publish job - integration post-render job
farm_publish = "farm-publish"
# Remote launch - application is launched on remote machine from which
# can be started publishing
remote = "remote"
# Automated launch - application is launched with automated publishing
automated = "automated"
class ApplicationExecutable:
"""Representation of executable loaded from settings."""
def __init__(self, executable):
# Try to format executable with environments
try:
executable = executable.format(**os.environ)
except Exception:
pass
# On MacOS check if exists path to executable when ends with `.app`
# - it is common that path will lead to "/Applications/Blender" but
# real path is "/Applications/Blender.app"
if platform.system().lower() == "darwin":
executable = self.macos_executable_prep(executable)
self.executable_path = executable
def __str__(self):
return self.executable_path
def __repr__(self):
return "<{}> {}".format(self.__class__.__name__, self.executable_path)
@staticmethod
def macos_executable_prep(executable):
"""Try to find full path to executable file.
Real executable is stored in '*.app/Contents/MacOS/<executable>'.
Having path to '*.app' gives ability to read it's plist info and
use "CFBundleExecutable" key from plist to know what is "executable."
Plist is stored in '*.app/Contents/Info.plist'.
This is because some '*.app' directories don't have same permissions
as real executable.
"""
# Try to find if there is `.app` file
if not os.path.exists(executable):
_executable = executable + ".app"
if os.path.exists(_executable):
executable = _executable
# Try to find real executable if executable has `Contents` subfolder
contents_dir = os.path.join(executable, "Contents")
if os.path.exists(contents_dir):
executable_filename = None
# Load plist file and check for bundle executable
plist_filepath = os.path.join(contents_dir, "Info.plist")
if os.path.exists(plist_filepath):
import plistlib
if hasattr(plistlib, "load"):
with open(plist_filepath, "rb") as stream:
parsed_plist = plistlib.load(stream)
else:
parsed_plist = plistlib.readPlist(plist_filepath)
executable_filename = parsed_plist.get("CFBundleExecutable")
if executable_filename:
executable = os.path.join(
contents_dir, "MacOS", executable_filename
)
return executable
def as_args(self):
return [self.executable_path]
def _realpath(self):
"""Check if path is valid executable path."""
# Check for executable in PATH
result = find_executable(self.executable_path)
if result is not None:
return result
# This is not 100% validation but it is better than remove ability to
# launch .bat, .sh or extentionless files
if os.path.exists(self.executable_path):
return self.executable_path
return None
def exists(self):
if not self.executable_path:
return False
return bool(self._realpath())
class UndefinedApplicationExecutable(ApplicationExecutable):
"""Some applications do not require executable path from settings.
In that case this class is used to "fake" existing executable.
"""
def __init__(self):
pass
def __str__(self):
return self.__class__.__name__
def __repr__(self):
return "<{}>".format(self.__class__.__name__)
def as_args(self):
return []
def exists(self):
return True
class ApplicationGroup:
"""Hold information about application group.
Application group wraps different versions(variants) of application.
e.g. "maya" is group and "maya_2020" is variant.
Group hold `host_name` which is implementation name used in AYON. Also
holds `enabled` if whole app group is enabled or `icon` for application
icon path in resources.
Group has also `environment` which hold same environments for all variants.
Args:
name (str): Groups' name.
data (dict): Group defying data loaded from settings.
manager (ApplicationManager): Manager that created the group.
"""
def __init__(self, name, data, manager):
self.name = name
self.manager = manager
self._data = data
self.enabled = data["enabled"]
self.label = data["label"] or None
self.icon = data["icon"] or None
env = {}
try:
env = json.loads(data["environment"])
except Exception:
pass
self._environment = env
host_name = data["host_name"] or None
self.is_host = host_name is not None
self.host_name = host_name
settings_variants = data["variants"]
variants = {}
for variant_data in settings_variants:
app_variant = Application(variant_data, self)
variants[app_variant.name] = app_variant
self.variants = variants
def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.name)
def __iter__(self):
for variant in self.variants.values():
yield variant
@property
def environment(self):
return copy.deepcopy(self._environment)
class Application:
"""Hold information about application.
Object by itself does nothing special.
Args:
data (dict): Data for the version containing information about
executables, variant label or if is enabled.
Only required key is `executables`.
group (ApplicationGroup): App group object that created the application
and under which application belongs.
"""
def __init__(self, data, group):
self._data = data
name = data["name"]
label = data["label"] or name
enabled = False
if group.enabled:
enabled = data.get("enabled", True)
if group.label:
full_label = " ".join((group.label, label))
else:
full_label = label
env = {}
try:
env = json.loads(data["environment"])
except Exception:
pass
arguments = data["arguments"]
if isinstance(arguments, dict):
arguments = arguments.get(platform.system().lower())
if not arguments:
arguments = []
_executables = data["executables"].get(platform.system().lower(), [])
executables = [
ApplicationExecutable(executable)
for executable in _executables
]
self.group = group
self.name = name
self.label = label
self.enabled = enabled
self.use_python_2 = data.get("use_python_2", False)
self.full_name = "/".join((group.name, name))
self.full_label = full_label
self.arguments = arguments
self.executables = executables
self._environment = env
def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
@property
def environment(self):
return copy.deepcopy(self._environment)
@property
def manager(self):
return self.group.manager
@property
def host_name(self):
return self.group.host_name
@property
def icon(self):
return self.group.icon
@property
def is_host(self):
return self.group.is_host
def find_executable(self):
"""Try to find existing executable for application.
Returns (str): Path to executable from `executables` or None if any
exists.
"""
for executable in self.executables:
if executable.exists():
return executable
return None
def launch(self, *args, **kwargs):
"""Launch the application.
For this purpose is used manager's launch method to keep logic at one
place.
Arguments must match with manager's launch method. That's why *args
**kwargs are used.
Returns:
subprocess.Popen: Return executed process as Popen object.
"""
return self.manager.launch(self.full_name, *args, **kwargs)
class EnvironmentToolGroup:
"""Hold information about environment tool group.
Environment tool group may hold different variants of same tool and set
environments that are same for all of them.
e.g. "mtoa" may have different versions but all environments except one
are same.
Args:
data (dict): Group information with variants.
manager (ApplicationManager): Manager that creates the group.
"""
def __init__(self, data, manager):
name = data["name"]
label = data["label"]
self.name = name
self.label = label
self._data = data
self.manager = manager
environment = {}
try:
environment = json.loads(data["environment"])
except Exception:
pass
self._environment = environment
variants = data.get("variants") or []
variants_by_name = {}
for variant_data in variants:
tool = EnvironmentTool(variant_data, self)
variants_by_name[tool.name] = tool
self.variants = variants_by_name
def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.name)
def __iter__(self):
for variant in self.variants.values():
yield variant
@property
def environment(self):
return copy.deepcopy(self._environment)
class EnvironmentTool:
"""Hold information about application tool.
Structure of tool information.
Args:
variant_data (dict): Variant data with environments and
host and app variant filters.
group (EnvironmentToolGroup): Name of group which wraps tool.
"""
def __init__(self, variant_data, group):
# Backwards compatibility 3.9.1 - 3.9.2
# - 'variant_data' contained only environments but contain also host
# and application variant filters
name = variant_data["name"]
label = variant_data["label"]
host_names = variant_data["host_names"]
app_variants = variant_data["app_variants"]
environment = {}
try:
environment = json.loads(variant_data["environment"])
except Exception:
pass
self.host_names = host_names
self.app_variants = app_variants
self.name = name
self.variant_label = label
self.label = " ".join((group.label, label))
self.group = group
self._environment = environment
self.full_name = "/".join((group.name, name))
def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
@property
def environment(self):
return copy.deepcopy(self._environment)
def is_valid_for_app(self, app):
"""Is tool valid for application.
Args:
app (Application): Application for which are prepared environments.
"""
if self.app_variants and app.full_name not in self.app_variants:
return False
if self.host_names and app.host_name not in self.host_names:
return False
return True

View file

@ -1,50 +0,0 @@
class ApplicationNotFound(Exception):
"""Application was not found in ApplicationManager by name."""
def __init__(self, app_name):
self.app_name = app_name
super(ApplicationNotFound, self).__init__(
"Application \"{}\" was not found.".format(app_name)
)
class ApplicationExecutableNotFound(Exception):
"""Defined executable paths are not available on the machine."""
def __init__(self, application):
self.application = application
details = None
if not application.executables:
msg = (
"Executable paths for application \"{}\"({}) are not set."
)
else:
msg = (
"Defined executable paths for application \"{}\"({})"
" are not available on this machine."
)
details = "Defined paths:"
for executable in application.executables:
details += "\n- " + executable.executable_path
self.msg = msg.format(application.full_label, application.full_name)
self.details = details
exc_mgs = str(self.msg)
if details:
# Is good idea to pass new line symbol to exception message?
exc_mgs += "\n" + details
self.exc_msg = exc_mgs
super(ApplicationExecutableNotFound, self).__init__(exc_mgs)
class ApplicationLaunchFailed(Exception):
"""Application launch failed due to known reason.
Message should be self explanatory as traceback won't be shown.
"""
pass
class MissingRequiredKey(KeyError):
pass

View file

@ -1,150 +0,0 @@
import platform
from abc import ABCMeta, abstractmethod
import six
from ayon_core.lib import Logger
from .defs import LaunchTypes
@six.add_metaclass(ABCMeta)
class LaunchHook:
"""Abstract base class of launch hook."""
# Order of prelaunch hook, will be executed as last if set to None.
order = None
# List of host implementations, skipped if empty.
hosts = set()
# Set of application groups
app_groups = set()
# Set of specific application names
app_names = set()
# Set of platform availability
platforms = set()
# Set of launch types for which is available
# - if empty then is available for all launch types
# - by default has 'local' which is most common reason for launc hooks
launch_types = {LaunchTypes.local}
def __init__(self, launch_context):
"""Constructor of launch hook.
Always should be called
"""
self.log = Logger.get_logger(self.__class__.__name__)
self.launch_context = launch_context
is_valid = self.class_validation(launch_context)
if is_valid:
is_valid = self.validate()
self.is_valid = is_valid
@classmethod
def class_validation(cls, launch_context):
"""Validation of class attributes by launch context.
Args:
launch_context (ApplicationLaunchContext): Context of launching
application.
Returns:
bool: Is launch hook valid for the context by class attributes.
"""
if cls.platforms:
low_platforms = tuple(
_platform.lower()
for _platform in cls.platforms
)
if platform.system().lower() not in low_platforms:
return False
if cls.hosts:
if launch_context.host_name not in cls.hosts:
return False
if cls.app_groups:
if launch_context.app_group.name not in cls.app_groups:
return False
if cls.app_names:
if launch_context.app_name not in cls.app_names:
return False
if cls.launch_types:
if launch_context.launch_type not in cls.launch_types:
return False
return True
@property
def data(self):
return self.launch_context.data
@property
def application(self):
return getattr(self.launch_context, "application", None)
@property
def manager(self):
return getattr(self.application, "manager", None)
@property
def host_name(self):
return getattr(self.application, "host_name", None)
@property
def app_group(self):
return getattr(self.application, "group", None)
@property
def app_name(self):
return getattr(self.application, "full_name", None)
@property
def addons_manager(self):
return getattr(self.launch_context, "addons_manager", None)
@property
def modules_manager(self):
"""
Deprecated:
Use 'addons_wrapper' instead.
"""
return self.addons_manager
def validate(self):
"""Optional validation of launch hook on initialization.
Returns:
bool: Hook is valid (True) or invalid (False).
"""
# QUESTION Not sure if this method has any usable potential.
# - maybe result can be based on settings
return True
@abstractmethod
def execute(self, *args, **kwargs):
"""Abstract execute method where logic of hook is."""
pass
class PreLaunchHook(LaunchHook):
"""Abstract class of prelaunch hook.
This launch hook will be processed before application is launched.
If any exception will happen during processing the application won't be
launched.
"""
class PostLaunchHook(LaunchHook):
"""Abstract class of postlaunch hook.
This launch hook will be processed after application is launched.
Nothing will happen if any exception will happen during processing. And
processing of other postlaunch hooks won't stop either.
"""

View file

@ -1,676 +0,0 @@
import os
import sys
import copy
import json
import tempfile
import platform
import inspect
import subprocess
import six
from ayon_core import AYON_CORE_ROOT
from ayon_core.settings import get_studio_settings
from ayon_core.lib import (
Logger,
modules_from_path,
classes_from_module,
get_linux_launcher_args,
)
from ayon_core.addon import AddonsManager
from .constants import DEFAULT_ENV_SUBGROUP
from .exceptions import (
ApplicationNotFound,
ApplicationExecutableNotFound,
)
from .hooks import PostLaunchHook, PreLaunchHook
from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes
class ApplicationManager:
"""Load applications and tools and store them by their full name.
Args:
studio_settings (dict): Preloaded studio settings. When passed manager
will always use these values. Gives ability to create manager
using different settings.
"""
def __init__(self, studio_settings=None):
self.log = Logger.get_logger(self.__class__.__name__)
self.app_groups = {}
self.applications = {}
self.tool_groups = {}
self.tools = {}
self._studio_settings = studio_settings
self.refresh()
def set_studio_settings(self, studio_settings):
"""Ability to change init system settings.
This will trigger refresh of manager.
"""
self._studio_settings = studio_settings
self.refresh()
def refresh(self):
"""Refresh applications from settings."""
self.app_groups.clear()
self.applications.clear()
self.tool_groups.clear()
self.tools.clear()
if self._studio_settings is not None:
settings = copy.deepcopy(self._studio_settings)
else:
settings = get_studio_settings(
clear_metadata=False, exclude_locals=False
)
applications_addon_settings = settings["applications"]
# Prepare known applications
app_defs = applications_addon_settings["applications"]
additional_apps = app_defs.pop("additional_apps")
for additional_app in additional_apps:
app_name = additional_app.pop("name")
if app_name in app_defs:
self.log.warning((
"Additional application '{}' is already"
" in built-in applications."
).format(app_name))
app_defs[app_name] = additional_app
for group_name, variant_defs in app_defs.items():
group = ApplicationGroup(group_name, variant_defs, self)
self.app_groups[group_name] = group
for app in group:
self.applications[app.full_name] = app
tools_definitions = applications_addon_settings["tool_groups"]
for tool_group_data in tools_definitions:
group = EnvironmentToolGroup(tool_group_data, self)
self.tool_groups[group.name] = group
for tool in group:
self.tools[tool.full_name] = tool
def find_latest_available_variant_for_group(self, group_name):
group = self.app_groups.get(group_name)
if group is None or not group.enabled:
return None
output = None
for _, variant in reversed(sorted(group.variants.items())):
executable = variant.find_executable()
if executable:
output = variant
break
return output
def create_launch_context(self, app_name, **data):
"""Prepare launch context for application.
Args:
app_name (str): Name of application that should be launched.
**data (Any): Any additional data. Data may be used during
Returns:
ApplicationLaunchContext: Launch context for application.
Raises:
ApplicationNotFound: Application was not found by entered name.
"""
app = self.applications.get(app_name)
if not app:
raise ApplicationNotFound(app_name)
executable = app.find_executable()
return ApplicationLaunchContext(
app, executable, **data
)
def launch_with_context(self, launch_context):
"""Launch application using existing launch context.
Args:
launch_context (ApplicationLaunchContext): Prepared launch
context.
"""
if not launch_context.executable:
raise ApplicationExecutableNotFound(launch_context.application)
return launch_context.launch()
def launch(self, app_name, **data):
"""Launch procedure.
For host application it's expected to contain "project_name",
"folder_path" and "task_name".
Args:
app_name (str): Name of application that should be launched.
**data (dict): Any additional data. Data may be used during
preparation to store objects usable in multiple places.
Raises:
ApplicationNotFound: Application was not found by entered
argument `app_name`.
ApplicationExecutableNotFound: Executables in application definition
were not found on this machine.
ApplicationLaunchFailed: Something important for application launch
failed. Exception should contain explanation message,
traceback should not be needed.
"""
context = self.create_launch_context(app_name, **data)
return self.launch_with_context(context)
class ApplicationLaunchContext:
"""Context of launching application.
Main purpose of context is to prepare launch arguments and keyword
arguments for new process. Most important part of keyword arguments
preparations are environment variables.
During the whole process is possible to use `data` attribute to store
object usable in multiple places.
Launch arguments are strings in list. It is possible to "chain" argument
when order of them matters. That is possible to do with adding list where
order is right and should not change.
NOTE: This is recommendation, not requirement.
e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may
insert argument between `nuke.exe` and `--NukeX`. To keep them together
it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`.
Notes:
It is possible to use launch context only to prepare environment
variables. In that case `executable` may be None and can be used
'run_prelaunch_hooks' method to run prelaunch hooks which prepare
them.
Args:
application (Application): Application definition.
executable (ApplicationExecutable): Object with path to executable.
env_group (Optional[str]): Environment variable group. If not set
'DEFAULT_ENV_SUBGROUP' is used.
launch_type (Optional[str]): Launch type. If not set 'local' is used.
**data (dict): Any additional data. Data may be used during
preparation to store objects usable in multiple places.
"""
def __init__(
self,
application,
executable,
env_group=None,
launch_type=None,
**data
):
# Application object
self.application = application
self.addons_manager = AddonsManager()
# Logger
logger_name = "{}-{}".format(self.__class__.__name__,
self.application.full_name)
self.log = Logger.get_logger(logger_name)
self.executable = executable
if launch_type is None:
launch_type = LaunchTypes.local
self.launch_type = launch_type
if env_group is None:
env_group = DEFAULT_ENV_SUBGROUP
self.env_group = env_group
self.data = dict(data)
launch_args = []
if executable is not None:
launch_args = executable.as_args()
# subprocess.Popen launch arguments (first argument in constructor)
self.launch_args = launch_args
self.launch_args.extend(application.arguments)
if self.data.get("app_args"):
self.launch_args.extend(self.data.pop("app_args"))
# Handle launch environemtns
src_env = self.data.pop("env", None)
if src_env is not None and not isinstance(src_env, dict):
self.log.warning((
"Passed `env` kwarg has invalid type: {}. Expected: `dict`."
" Using `os.environ` instead."
).format(str(type(src_env))))
src_env = None
if src_env is None:
src_env = os.environ
ignored_env = {"QT_API", }
env = {
key: str(value)
for key, value in src_env.items()
if key not in ignored_env
}
# subprocess.Popen keyword arguments
self.kwargs = {"env": env}
if platform.system().lower() == "windows":
# Detach new process from currently running process on Windows
flags = (
subprocess.CREATE_NEW_PROCESS_GROUP
| subprocess.DETACHED_PROCESS
)
self.kwargs["creationflags"] = flags
if not sys.stdout:
self.kwargs["stdout"] = subprocess.DEVNULL
self.kwargs["stderr"] = subprocess.DEVNULL
self.prelaunch_hooks = None
self.postlaunch_hooks = None
self.process = None
self._prelaunch_hooks_executed = False
@property
def env(self):
if (
"env" not in self.kwargs
or self.kwargs["env"] is None
):
self.kwargs["env"] = {}
return self.kwargs["env"]
@env.setter
def env(self, value):
if not isinstance(value, dict):
raise ValueError(
"'env' attribute expect 'dict' object. Got: {}".format(
str(type(value))
)
)
self.kwargs["env"] = value
@property
def modules_manager(self):
"""
Deprecated:
Use 'addons_manager' instead.
"""
return self.addons_manager
def _collect_addons_launch_hook_paths(self):
"""Helper to collect application launch hooks from addons.
Module have to have implemented 'get_launch_hook_paths' method which
can expect application as argument or nothing.
Returns:
List[str]: Paths to launch hook directories.
"""
expected_types = (list, tuple, set)
output = []
for module in self.addons_manager.get_enabled_addons():
# Skip module if does not have implemented 'get_launch_hook_paths'
func = getattr(module, "get_launch_hook_paths", None)
if func is None:
continue
func = module.get_launch_hook_paths
if hasattr(inspect, "signature"):
sig = inspect.signature(func)
expect_args = len(sig.parameters) > 0
else:
expect_args = len(inspect.getargspec(func)[0]) > 0
# Pass application argument if method expect it.
try:
if expect_args:
hook_paths = func(self.application)
else:
hook_paths = func()
except Exception:
self.log.warning(
"Failed to call 'get_launch_hook_paths'",
exc_info=True
)
continue
if not hook_paths:
continue
# Convert string to list
if isinstance(hook_paths, six.string_types):
hook_paths = [hook_paths]
# Skip invalid types
if not isinstance(hook_paths, expected_types):
self.log.warning((
"Result of `get_launch_hook_paths`"
" has invalid type {}. Expected {}"
).format(type(hook_paths), expected_types))
continue
output.extend(hook_paths)
return output
def paths_to_launch_hooks(self):
"""Directory paths where to look for launch hooks."""
# This method has potential to be part of application manager (maybe).
paths = []
# TODO load additional studio paths from settings
global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks")
hooks_dirs = [
global_hooks_dir
]
if self.host_name:
# If host requires launch hooks and is module then launch hooks
# should be collected using 'collect_launch_hook_paths'
# - module have to implement 'get_launch_hook_paths'
host_module = self.addons_manager.get_host_addon(self.host_name)
if not host_module:
hooks_dirs.append(os.path.join(
AYON_CORE_ROOT, "hosts", self.host_name, "hooks"
))
for path in hooks_dirs:
if (
os.path.exists(path)
and os.path.isdir(path)
and path not in paths
):
paths.append(path)
# Load modules paths
paths.extend(self._collect_addons_launch_hook_paths())
return paths
def discover_launch_hooks(self, force=False):
"""Load and prepare launch hooks."""
if (
self.prelaunch_hooks is not None
or self.postlaunch_hooks is not None
):
if not force:
self.log.info("Launch hooks were already discovered.")
return
self.prelaunch_hooks.clear()
self.postlaunch_hooks.clear()
self.log.debug("Discovery of launch hooks started.")
paths = self.paths_to_launch_hooks()
self.log.debug("Paths searched for launch hooks:\n{}".format(
"\n".join("- {}".format(path) for path in paths)
))
all_classes = {
"pre": [],
"post": []
}
for path in paths:
if not os.path.exists(path):
self.log.info(
"Path to launch hooks does not exist: \"{}\"".format(path)
)
continue
modules, _crashed = modules_from_path(path)
for _filepath, module in modules:
all_classes["pre"].extend(
classes_from_module(PreLaunchHook, module)
)
all_classes["post"].extend(
classes_from_module(PostLaunchHook, module)
)
for launch_type, classes in all_classes.items():
hooks_with_order = []
hooks_without_order = []
for klass in classes:
try:
hook = klass(self)
if not hook.is_valid:
self.log.debug(
"Skipped hook invalid for current launch context: "
"{}".format(klass.__name__)
)
continue
if inspect.isabstract(hook):
self.log.debug("Skipped abstract hook: {}".format(
klass.__name__
))
continue
# Separate hooks by pre/post class
if hook.order is None:
hooks_without_order.append(hook)
else:
hooks_with_order.append(hook)
except Exception:
self.log.warning(
"Initialization of hook failed: "
"{}".format(klass.__name__),
exc_info=True
)
# Sort hooks with order by order
ordered_hooks = list(sorted(
hooks_with_order, key=lambda obj: obj.order
))
# Extend ordered hooks with hooks without defined order
ordered_hooks.extend(hooks_without_order)
if launch_type == "pre":
self.prelaunch_hooks = ordered_hooks
else:
self.postlaunch_hooks = ordered_hooks
self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format(
len(self.prelaunch_hooks), len(self.postlaunch_hooks)
))
@property
def app_name(self):
return self.application.name
@property
def host_name(self):
return self.application.host_name
@property
def app_group(self):
return self.application.group
@property
def manager(self):
return self.application.manager
def _run_process(self):
# Windows and MacOS have easier process start
low_platform = platform.system().lower()
if low_platform in ("windows", "darwin"):
return subprocess.Popen(self.launch_args, **self.kwargs)
# Linux uses mid process
# - it is possible that the mid process executable is not
# available for this version of AYON in that case use standard
# launch
launch_args = get_linux_launcher_args()
if launch_args is None:
return subprocess.Popen(self.launch_args, **self.kwargs)
# Prepare data that will be passed to midprocess
# - store arguments to a json and pass path to json as last argument
# - pass environments to set
app_env = self.kwargs.pop("env", {})
json_data = {
"args": self.launch_args,
"env": app_env
}
if app_env:
# Filter environments of subprocess
self.kwargs["env"] = {
key: value
for key, value in os.environ.items()
if key in app_env
}
# Create temp file
json_temp = tempfile.NamedTemporaryFile(
mode="w", prefix="op_app_args", suffix=".json", delete=False
)
json_temp.close()
json_temp_filpath = json_temp.name
with open(json_temp_filpath, "w") as stream:
json.dump(json_data, stream)
launch_args.append(json_temp_filpath)
# Create mid-process which will launch application
process = subprocess.Popen(launch_args, **self.kwargs)
# Wait until the process finishes
# - This is important! The process would stay in "open" state.
process.wait()
# Remove the temp file
os.remove(json_temp_filpath)
# Return process which is already terminated
return process
def run_prelaunch_hooks(self):
"""Run prelaunch hooks.
This method will be executed only once, any future calls will skip
the processing.
"""
if self._prelaunch_hooks_executed:
self.log.warning("Prelaunch hooks were already executed.")
return
# Discover launch hooks
self.discover_launch_hooks()
# Execute prelaunch hooks
for prelaunch_hook in self.prelaunch_hooks:
self.log.debug("Executing prelaunch hook: {}".format(
str(prelaunch_hook.__class__.__name__)
))
prelaunch_hook.execute()
self._prelaunch_hooks_executed = True
def launch(self):
"""Collect data for new process and then create it.
This method must not be executed more than once.
Returns:
subprocess.Popen: Created process as Popen object.
"""
if self.process is not None:
self.log.warning("Application was already launched.")
return
if not self._prelaunch_hooks_executed:
self.run_prelaunch_hooks()
self.log.debug("All prelaunch hook executed. Starting new process.")
# Prepare subprocess args
args_len_str = ""
if isinstance(self.launch_args, str):
args = self.launch_args
else:
args = self.clear_launch_args(self.launch_args)
args_len_str = " ({})".format(len(args))
self.log.info(
"Launching \"{}\" with args{}: {}".format(
self.application.full_name, args_len_str, args
)
)
self.launch_args = args
# Run process
self.process = self._run_process()
# Process post launch hooks
for postlaunch_hook in self.postlaunch_hooks:
self.log.debug("Executing postlaunch hook: {}".format(
str(postlaunch_hook.__class__.__name__)
))
# TODO how to handle errors?
# - store to variable to let them accessible?
try:
postlaunch_hook.execute()
except Exception:
self.log.warning(
"After launch procedures were not successful.",
exc_info=True
)
self.log.debug("Launch of {} finished.".format(
self.application.full_name
))
return self.process
@staticmethod
def clear_launch_args(args):
"""Collect launch arguments to final order.
Launch argument should be list that may contain another lists this
function will upack inner lists and keep ordering.
```
# source
[ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]]
# result
[ arg1, arg2, arg3, arg4, arg5, arg6]
Args:
args (list): Source arguments in list may contain inner lists.
Return:
list: Unpacked arguments.
"""
if isinstance(args, str):
return args
all_cleared = False
while not all_cleared:
all_cleared = True
new_args = []
for arg in args:
if isinstance(arg, (list, tuple, set)):
all_cleared = False
for _arg in arg:
new_args.append(_arg)
else:
new_args.append(arg)
args = new_args
return args

View file

@ -1,48 +0,0 @@
"""
Run after global plugin 'CollectHostName' in ayon_core.
Requires:
None
Provides:
context -> hostName (str)
context -> appName (str)
context -> appLabel (str)
"""
import os
import pyblish.api
from ayon_applications import ApplicationManager
class CollectAppName(pyblish.api.ContextPlugin):
"""Collect avalon host name to context."""
label = "Collect App Name"
order = pyblish.api.CollectorOrder - 0.499999
def process(self, context):
host_name = context.data.get("hostName")
app_name = context.data.get("appName")
app_label = context.data.get("appLabel")
# Don't override value if is already set
if host_name and app_name and app_label:
return
# Use AYON_APP_NAME to get full app name
if not app_name:
app_name = os.environ.get("AYON_APP_NAME")
# Fill missing values based on app full name
if (not host_name or not app_label) and app_name:
app_manager = ApplicationManager()
app = app_manager.applications.get(app_name)
if app:
if not host_name:
host_name = app.host_name
if not app_label:
app_label = app.full_label
context.data["hostName"] = host_name
context.data["appName"] = app_name
context.data["appLabel"] = app_label

View file

@ -1,609 +0,0 @@
import os
import copy
import json
import platform
import collections
import six
import acre
from ayon_core import AYON_CORE_ROOT
from ayon_core.settings import get_project_settings
from ayon_core.lib import Logger, get_ayon_username
from ayon_core.addon import AddonsManager
from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS
from ayon_core.pipeline.template_data import get_template_data
from ayon_core.pipeline.workfile import (
get_workfile_template_key,
get_workdir_with_workdir_data,
get_last_workfile,
should_use_last_workfile_on_launch,
should_open_workfiles_tool_on_launch,
)
from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP
from .exceptions import MissingRequiredKey, ApplicationLaunchFailed
from .manager import ApplicationManager
def parse_environments(env_data, env_group=None, platform_name=None):
"""Parse environment values from settings byt group and platform.
Data may contain up to 2 hierarchical levels of dictionaries. At the end
of the last level must be string or list. List is joined using platform
specific joiner (';' for windows and ':' for linux and mac).
Hierarchical levels can contain keys for subgroups and platform name.
Platform specific values must be always last level of dictionary. Platform
names are "windows" (MS Windows), "linux" (any linux distribution) and
"darwin" (any MacOS distribution).
Subgroups are helpers added mainly for standard and on farm usage. Farm
may require different environments for e.g. licence related values or
plugins. Default subgroup is "standard".
Examples:
```
{
# Unchanged value
"ENV_KEY1": "value",
# Empty values are kept (unset environment variable)
"ENV_KEY2": "",
# Join list values with ':' or ';'
"ENV_KEY3": ["value1", "value2"],
# Environment groups
"ENV_KEY4": {
"standard": "DEMO_SERVER_URL",
"farm": "LICENCE_SERVER_URL"
},
# Platform specific (and only for windows and mac)
"ENV_KEY5": {
"windows": "windows value",
"darwin": ["value 1", "value 2"]
},
# Environment groups and platform combination
"ENV_KEY6": {
"farm": "FARM_VALUE",
"standard": {
"windows": ["value1", "value2"],
"linux": "value1",
"darwin": ""
}
}
}
```
"""
output = {}
if not env_data:
return output
if not env_group:
env_group = DEFAULT_ENV_SUBGROUP
if not platform_name:
platform_name = platform.system().lower()
for key, value in env_data.items():
if isinstance(value, dict):
# Look if any key is platform key
# - expect that represents environment group if does not contain
# platform keys
if not PLATFORM_NAMES.intersection(set(value.keys())):
# Skip the key if group is not available
if env_group not in value:
continue
value = value[env_group]
# Check again if value is dictionary
# - this time there should be only platform keys
if isinstance(value, dict):
value = value.get(platform_name)
# Check if value is list and join it's values
# QUESTION Should empty values be skipped?
if isinstance(value, (list, tuple)):
value = os.pathsep.join(value)
# Set key to output if value is string
if isinstance(value, six.string_types):
output[key] = value
return output
class EnvironmentPrepData(dict):
"""Helper dictionary for storin temp data during environment prep.
Args:
data (dict): Data must contain required keys.
"""
required_keys = (
"project_entity", "folder_entity", "task_entity", "app", "anatomy"
)
def __init__(self, data):
for key in self.required_keys:
if key not in data:
raise MissingRequiredKey(key)
if not data.get("log"):
data["log"] = Logger.get_logger("EnvironmentPrepData")
if data.get("env") is None:
data["env"] = os.environ.copy()
project_name = data["project_entity"]["name"]
if "project_settings" not in data:
data["project_settings"] = get_project_settings(project_name)
super(EnvironmentPrepData, self).__init__(data)
def get_app_environments_for_context(
project_name,
folder_path,
task_name,
app_name,
env_group=None,
launch_type=None,
env=None,
addons_manager=None
):
"""Prepare environment variables by context.
Args:
project_name (str): Name of project.
folder_path (str): Folder path.
task_name (str): Name of task.
app_name (str): Name of application that is launched and can be found
by ApplicationManager.
env_group (Optional[str]): Name of environment group. If not passed
default group is used.
launch_type (Optional[str]): Type for which prelaunch hooks are
executed.
env (Optional[dict[str, str]]): Initial environment variables.
`os.environ` is used when not passed.
addons_manager (Optional[AddonsManager]): Initialized modules
manager.
Returns:
dict: Environments for passed context and application.
"""
# Prepare app object which can be obtained only from ApplicationManager
app_manager = ApplicationManager()
context = app_manager.create_launch_context(
app_name,
project_name=project_name,
folder_path=folder_path,
task_name=task_name,
env_group=env_group,
launch_type=launch_type,
env=env,
addons_manager=addons_manager,
modules_manager=addons_manager,
)
context.run_prelaunch_hooks()
return context.env
def _merge_env(env, current_env):
"""Modified function(merge) from acre module."""
result = current_env.copy()
for key, value in env.items():
# Keep missing keys by not filling `missing` kwarg
value = acre.lib.partial_format(value, data=current_env)
result[key] = value
return result
def _add_python_version_paths(app, env, logger, addons_manager):
"""Add vendor packages specific for a Python version."""
for addon in addons_manager.get_enabled_addons():
addon.modify_application_launch_arguments(app, env)
# Skip adding if host name is not set
if not app.host_name:
return
# Add Python 2/3 modules
python_vendor_dir = os.path.join(
AYON_CORE_ROOT,
"vendor",
"python"
)
if app.use_python_2:
pythonpath = os.path.join(python_vendor_dir, "python_2")
else:
pythonpath = os.path.join(python_vendor_dir, "python_3")
if not os.path.exists(pythonpath):
return
logger.debug("Adding Python version specific paths to PYTHONPATH")
python_paths = [pythonpath]
# Load PYTHONPATH from current launch context
python_path = env.get("PYTHONPATH")
if python_path:
python_paths.append(python_path)
# Set new PYTHONPATH to launch context environments
env["PYTHONPATH"] = os.pathsep.join(python_paths)
def prepare_app_environments(
data, env_group=None, implementation_envs=True, addons_manager=None
):
"""Modify launch environments based on launched app and context.
Args:
data (EnvironmentPrepData): Dictionary where result and intermediate
result will be stored.
"""
app = data["app"]
log = data["log"]
source_env = data["env"].copy()
if addons_manager is None:
addons_manager = AddonsManager()
_add_python_version_paths(app, source_env, log, addons_manager)
# Use environments from local settings
filtered_local_envs = {}
# NOTE Overrides for environment variables are not implemented in AYON.
# project_settings = data["project_settings"]
# whitelist_envs = project_settings["general"].get("local_env_white_list")
# if whitelist_envs:
# local_settings = get_local_settings()
# local_envs = local_settings.get("environments") or {}
# filtered_local_envs = {
# key: value
# for key, value in local_envs.items()
# if key in whitelist_envs
# }
# Apply local environment variables for already existing values
for key, value in filtered_local_envs.items():
if key in source_env:
source_env[key] = value
# `app_and_tool_labels` has debug purpose
app_and_tool_labels = [app.full_name]
# Environments for application
environments = [
app.group.environment,
app.environment
]
folder_entity = data.get("folder_entity")
# Add tools environments
groups_by_name = {}
tool_by_group_name = collections.defaultdict(dict)
if folder_entity:
# Make sure each tool group can be added only once
for key in folder_entity["attrib"].get("tools") or []:
tool = app.manager.tools.get(key)
if not tool or not tool.is_valid_for_app(app):
continue
groups_by_name[tool.group.name] = tool.group
tool_by_group_name[tool.group.name][tool.name] = tool
for group_name in sorted(groups_by_name.keys()):
group = groups_by_name[group_name]
environments.append(group.environment)
for tool_name in sorted(tool_by_group_name[group_name].keys()):
tool = tool_by_group_name[group_name][tool_name]
environments.append(tool.environment)
app_and_tool_labels.append(tool.full_name)
log.debug(
"Will add environments for apps and tools: {}".format(
", ".join(app_and_tool_labels)
)
)
env_values = {}
for _env_values in environments:
if not _env_values:
continue
# Choose right platform
tool_env = parse_environments(_env_values, env_group)
# Apply local environment variables
# - must happen between all values because they may be used during
# merge
for key, value in filtered_local_envs.items():
if key in tool_env:
tool_env[key] = value
# Merge dictionaries
env_values = _merge_env(tool_env, env_values)
merged_env = _merge_env(env_values, source_env)
loaded_env = acre.compute(merged_env, cleanup=False)
final_env = None
# Add host specific environments
if app.host_name and implementation_envs:
host_addon = addons_manager.get_host_addon(app.host_name)
add_implementation_envs = None
if host_addon:
add_implementation_envs = getattr(
host_addon, "add_implementation_envs", None
)
if add_implementation_envs:
# Function may only modify passed dict without returning value
final_env = add_implementation_envs(loaded_env, app)
if final_env is None:
final_env = loaded_env
keys_to_remove = set(source_env.keys()) - set(final_env.keys())
# Update env
data["env"].update(final_env)
for key in keys_to_remove:
data["env"].pop(key, None)
def apply_project_environments_value(
project_name, env, project_settings=None, env_group=None
):
"""Apply project specific environments on passed environments.
The environments are applied on passed `env` argument value so it is not
required to apply changes back.
Args:
project_name (str): Name of project for which environments should be
received.
env (dict): Environment values on which project specific environments
will be applied.
project_settings (dict): Project settings for passed project name.
Optional if project settings are already prepared.
Returns:
dict: Passed env values with applied project environments.
Raises:
KeyError: If project settings do not contain keys for project specific
environments.
"""
if project_settings is None:
project_settings = get_project_settings(project_name)
env_value = project_settings["core"]["project_environments"]
if env_value:
env_value = json.loads(env_value)
parsed_value = parse_environments(env_value, env_group)
env.update(acre.compute(
_merge_env(parsed_value, env),
cleanup=False
))
return env
def prepare_context_environments(data, env_group=None, addons_manager=None):
"""Modify launch environments with context data for launched host.
Args:
data (EnvironmentPrepData): Dictionary where result and intermediate
result will be stored.
"""
# Context environments
log = data["log"]
project_entity = data["project_entity"]
folder_entity = data["folder_entity"]
task_entity = data["task_entity"]
if not project_entity:
log.info(
"Skipping context environments preparation."
" Launch context does not contain required data."
)
return
# Load project specific environments
project_name = project_entity["name"]
project_settings = get_project_settings(project_name)
data["project_settings"] = project_settings
app = data["app"]
context_env = {
"AYON_PROJECT_NAME": project_entity["name"],
"AYON_APP_NAME": app.full_name
}
if folder_entity:
folder_path = folder_entity["path"]
context_env["AYON_FOLDER_PATH"] = folder_path
if task_entity:
context_env["AYON_TASK_NAME"] = task_entity["name"]
log.debug(
"Context environments set:\n{}".format(
json.dumps(context_env, indent=4)
)
)
data["env"].update(context_env)
# Apply project specific environments on current env value
# - apply them once the context environments are set
apply_project_environments_value(
project_name, data["env"], project_settings, env_group
)
if not app.is_host:
return
data["env"]["AYON_HOST_NAME"] = app.host_name
if not folder_entity or not task_entity:
# QUESTION replace with log.info and skip workfile discovery?
# - technically it should be possible to launch host without context
raise ApplicationLaunchFailed(
"Host launch require folder and task context."
)
workdir_data = get_template_data(
project_entity,
folder_entity,
task_entity,
app.host_name,
project_settings
)
data["workdir_data"] = workdir_data
anatomy = data["anatomy"]
task_type = workdir_data["task"]["type"]
# Temp solution how to pass task type to `_prepare_last_workfile`
data["task_type"] = task_type
try:
workdir = get_workdir_with_workdir_data(
workdir_data,
anatomy.project_name,
anatomy,
project_settings=project_settings
)
except Exception as exc:
raise ApplicationLaunchFailed(
"Error in anatomy.format: {}".format(str(exc))
)
if not os.path.exists(workdir):
log.debug(
"Creating workdir folder: \"{}\"".format(workdir)
)
try:
os.makedirs(workdir)
except Exception as exc:
raise ApplicationLaunchFailed(
"Couldn't create workdir because: {}".format(str(exc))
)
data["env"]["AYON_WORKDIR"] = workdir
_prepare_last_workfile(data, workdir, addons_manager)
def _prepare_last_workfile(data, workdir, addons_manager):
"""last workfile workflow preparation.
Function check if should care about last workfile workflow and tries
to find the last workfile. Both information are stored to `data` and
environments.
Last workfile is filled always (with version 1) even if any workfile
exists yet.
Args:
data (EnvironmentPrepData): Dictionary where result and intermediate
result will be stored.
workdir (str): Path to folder where workfiles should be stored.
"""
if not addons_manager:
addons_manager = AddonsManager()
log = data["log"]
_workdir_data = data.get("workdir_data")
if not _workdir_data:
log.info(
"Skipping last workfile preparation."
" Key `workdir_data` not filled."
)
return
app = data["app"]
workdir_data = copy.deepcopy(_workdir_data)
project_name = data["project_name"]
task_name = data["task_name"]
task_type = data["task_type"]
start_last_workfile = data.get("start_last_workfile")
if start_last_workfile is None:
start_last_workfile = should_use_last_workfile_on_launch(
project_name, app.host_name, task_name, task_type
)
else:
log.info("Opening of last workfile was disabled by user")
data["start_last_workfile"] = start_last_workfile
workfile_startup = should_open_workfiles_tool_on_launch(
project_name, app.host_name, task_name, task_type
)
data["workfile_startup"] = workfile_startup
# Store boolean as "0"(False) or "1"(True)
data["env"]["AVALON_OPEN_LAST_WORKFILE"] = (
str(int(bool(start_last_workfile)))
)
data["env"]["AYON_WORKFILE_TOOL_ON_START"] = (
str(int(bool(workfile_startup)))
)
_sub_msg = "" if start_last_workfile else " not"
log.debug(
"Last workfile should{} be opened on start.".format(_sub_msg)
)
# Last workfile path
last_workfile_path = data.get("last_workfile_path") or ""
if not last_workfile_path:
host_addon = addons_manager.get_host_addon(app.host_name)
if host_addon:
extensions = host_addon.get_workfile_extensions()
else:
extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name)
if extensions:
anatomy = data["anatomy"]
project_settings = data["project_settings"]
task_type = workdir_data["task"]["type"]
template_key = get_workfile_template_key(
project_name,
task_type,
app.host_name,
project_settings=project_settings
)
# Find last workfile
file_template = anatomy.get_template_item(
"work", template_key, "file"
).template
workdir_data.update({
"version": 1,
"user": get_ayon_username(),
"ext": extensions[0]
})
last_workfile_path = get_last_workfile(
workdir, file_template, workdir_data, extensions, True
)
if os.path.exists(last_workfile_path):
log.debug((
"Workfiles for launch context does not exists"
" yet but path will be set."
))
log.debug(
"Setting last workfile path: {}".format(last_workfile_path)
)
data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path
data["last_workfile_path"] = last_workfile_path

View file

@ -176,9 +176,9 @@ def run(script):
# future versions might remove it.
first_arg = sys.argv[0]
if is_running_from_build():
comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py")
else:
comp_path = os.getenv("AYON_EXECUTABLE")
else:
comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py")
# Compare paths and remove first argument if it is the same as AYON
if Path(first_arg).resolve() == Path(comp_path).resolve():
sys.argv.pop(0)

View file

@ -21,7 +21,7 @@ class BackgroundLoader(api.AfterEffectsLoader):
"""
label = "Load JSON Background"
product_types = {"background"}
representations = ["json"]
representations = {"json"}
def load(self, context, name=None, namespace=None, data=None):
stub = self.get_stub()

View file

@ -20,7 +20,7 @@ class FileLoader(api.AfterEffectsLoader):
"review",
"audio",
}
representations = ["*"]
representations = {"*"}
def load(self, context, name=None, namespace=None, data=None):
stub = self.get_stub()

View file

@ -41,7 +41,6 @@ class CollectAERender(publish.AbstractCollectRender):
def get_instances(self, context):
instances = []
instances_to_remove = []
app_version = CollectAERender.get_stub().get_app_version()
app_version = app_version[0:4]
@ -117,7 +116,10 @@ class CollectAERender(publish.AbstractCollectRender):
fps=fps,
app_version=app_version,
publish_attributes=inst.data.get("publish_attributes", {}),
file_names=[item.file_name for item in render_q]
file_names=[item.file_name for item in render_q],
# The source instance this render instance replaces
source_instance=inst
)
comp = compositions_by_id.get(comp_id)
@ -146,10 +148,7 @@ class CollectAERender(publish.AbstractCollectRender):
instance.deadline = inst.data.get("deadline")
instances.append(instance)
instances_to_remove.append(inst)
for instance in instances_to_remove:
context.remove(instance)
return instances
def get_expected_files(self, render_instance):

View file

@ -55,8 +55,7 @@ class BlenderAddon(AYONAddon, IHostAddon):
)
# Define Qt binding if not defined
if not env.get("QT_PREFERRED_BINDING"):
env["QT_PREFERRED_BINDING"] = "PySide2"
env.pop("QT_PREFERRED_BINDING", None)
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:

View file

@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook):
def inner_execute(self):
# Get blender's python directory
version_regex = re.compile(r"^[2-4]\.[0-9]+$")
version_regex = re.compile(r"^([2-4])\.[0-9]+$")
platform = system().lower()
executable = self.launch_context.executable.executable_path
@ -42,7 +42,8 @@ class InstallPySideToBlender(PreLaunchHook):
if os.path.basename(executable).lower() != expected_executable:
self.log.info((
f"Executable does not lead to {expected_executable} file."
"Can't determine blender's python to check/install PySide2."
"Can't determine blender's python to check/install"
" Qt binding."
))
return
@ -73,6 +74,15 @@ class InstallPySideToBlender(PreLaunchHook):
return
version_subfolder = version_subfolders[0]
before_blender_4 = False
if int(version_regex.match(version_subfolder).group(1)) < 4:
before_blender_4 = True
# Blender 4 has Python 3.11 which does not support 'PySide2'
# QUESTION could we always install PySide6?
qt_binding = "PySide2" if before_blender_4 else "PySide6"
# Use PySide6 6.6.3 because 6.7.0 had a bug
# - 'QTextEdit' can't be added to 'QBoxLayout'
qt_binding_version = None if before_blender_4 else "6.6.3"
python_dir = os.path.join(versions_dir, version_subfolder, "python")
python_lib = os.path.join(python_dir, "lib")
@ -116,22 +126,41 @@ class InstallPySideToBlender(PreLaunchHook):
return
# Check if PySide2 is installed and skip if yes
if self.is_pyside_installed(python_executable):
if self.is_pyside_installed(python_executable, qt_binding):
self.log.debug("Blender has already installed PySide2.")
return
# Install PySide2 in blender's python
if platform == "windows":
result = self.install_pyside_windows(python_executable)
result = self.install_pyside_windows(
python_executable,
qt_binding,
qt_binding_version,
before_blender_4,
)
else:
result = self.install_pyside(python_executable)
result = self.install_pyside(
python_executable,
qt_binding,
qt_binding_version,
)
if result:
self.log.info("Successfully installed PySide2 module to blender.")
self.log.info(
f"Successfully installed {qt_binding} module to blender."
)
else:
self.log.warning("Failed to install PySide2 module to blender.")
self.log.warning(
f"Failed to install {qt_binding} module to blender."
)
def install_pyside_windows(self, python_executable):
def install_pyside_windows(
self,
python_executable,
qt_binding,
qt_binding_version,
before_blender_4,
):
"""Install PySide2 python module to blender's python.
Installation requires administration rights that's why it is required
@ -139,7 +168,6 @@ class InstallPySideToBlender(PreLaunchHook):
administration rights.
"""
try:
import win32api
import win32con
import win32process
import win32event
@ -150,12 +178,37 @@ class InstallPySideToBlender(PreLaunchHook):
self.log.warning("Couldn't import \"pywin32\" modules")
return
if qt_binding_version:
qt_binding = f"{qt_binding}=={qt_binding_version}"
try:
# Parameters
# - use "-m pip" as module pip to install PySide2 and argument
# "--ignore-installed" is to force install module to blender's
# site-packages and make sure it is binary compatible
parameters = "-m pip install --ignore-installed PySide2"
fake_exe = "fake.exe"
site_packages_prefix = os.path.dirname(
os.path.dirname(python_executable)
)
args = [
fake_exe,
"-m",
"pip",
"install",
"--ignore-installed",
qt_binding,
]
if not before_blender_4:
# Define prefix for site package
# Python in blender 4.x is installing packages in AppData and
# not in blender's directory.
args.extend(["--prefix", site_packages_prefix])
parameters = (
subprocess.list2cmdline(args)
.lstrip(fake_exe)
.lstrip(" ")
)
# Execute command and ask for administrator's rights
process_info = ShellExecuteEx(
@ -173,20 +226,29 @@ class InstallPySideToBlender(PreLaunchHook):
except pywintypes.error:
pass
def install_pyside(self, python_executable):
"""Install PySide2 python module to blender's python."""
def install_pyside(
self,
python_executable,
qt_binding,
qt_binding_version,
):
"""Install Qt binding python module to blender's python."""
if qt_binding_version:
qt_binding = f"{qt_binding}=={qt_binding_version}"
try:
# Parameters
# - use "-m pip" as module pip to install PySide2 and argument
# - use "-m pip" as module pip to install qt binding and argument
# "--ignore-installed" is to force install module to blender's
# site-packages and make sure it is binary compatible
# TODO find out if blender 4.x on linux/darwin does install
# qt binding to correct place.
args = [
python_executable,
"-m",
"pip",
"install",
"--ignore-installed",
"PySide2",
qt_binding,
]
process = subprocess.Popen(
args, stdout=subprocess.PIPE, universal_newlines=True
@ -203,13 +265,15 @@ class InstallPySideToBlender(PreLaunchHook):
except subprocess.SubprocessError:
pass
def is_pyside_installed(self, python_executable):
def is_pyside_installed(self, python_executable, qt_binding):
"""Check if PySide2 module is in blender's pip list.
Check that PySide2 is installed directly in blender's site-packages.
It is possible that it is installed in user's site-packages but that
may be incompatible with blender's python.
"""
qt_binding_low = qt_binding.lower()
# Get pip list from blender's python executable
args = [python_executable, "-m", "pip", "list"]
process = subprocess.Popen(args, stdout=subprocess.PIPE)
@ -226,6 +290,6 @@ class InstallPySideToBlender(PreLaunchHook):
if not line:
continue
package_name = line[0:package_len].strip()
if package_name.lower() == "pyside2":
if package_name.lower() == qt_binding_low:
return True
return False

View file

@ -43,7 +43,7 @@ class AppendBlendLoader(plugin.AssetLoader):
so you could also use it as a new base.
"""
representations = ["blend"]
representations = {"blend"}
product_types = {"workfile"}
label = "Append Workfile"
@ -68,7 +68,7 @@ class ImportBlendLoader(plugin.AssetLoader):
so you could also use it as a new base.
"""
representations = ["blend"]
representations = {"blend"}
product_types = {"workfile"}
label = "Import Workfile"

View file

@ -27,7 +27,7 @@ class CacheModelLoader(plugin.AssetLoader):
At least for now it only supports Alembic files.
"""
product_types = {"model", "pointcache", "animation"}
representations = ["abc"]
representations = {"abc"}
label = "Load Alembic"
icon = "code-fork"

View file

@ -25,7 +25,7 @@ class BlendActionLoader(plugin.AssetLoader):
"""
product_types = {"action"}
representations = ["blend"]
representations = {"blend"}
label = "Link Action"
icon = "code-fork"

View file

@ -17,7 +17,7 @@ class BlendAnimationLoader(plugin.AssetLoader):
"""
product_types = {"animation"}
representations = ["blend"]
representations = {"blend"}
label = "Link Animation"
icon = "code-fork"

View file

@ -21,7 +21,7 @@ class AudioLoader(plugin.AssetLoader):
"""Load audio in Blender."""
product_types = {"audio"}
representations = ["wav"]
representations = {"wav"}
label = "Load Audio"
icon = "volume-up"

View file

@ -21,7 +21,7 @@ class BlendLoader(plugin.AssetLoader):
"""Load assets from a .blend file."""
product_types = {"model", "rig", "layout", "camera"}
representations = ["blend"]
representations = {"blend"}
label = "Append Blend"
icon = "code-fork"

View file

@ -19,7 +19,7 @@ class BlendSceneLoader(plugin.AssetLoader):
"""Load assets from a .blend file."""
product_types = {"blendScene"}
representations = ["blend"]
representations = {"blend"}
label = "Append Blend"
icon = "code-fork"

View file

@ -24,7 +24,7 @@ class AbcCameraLoader(plugin.AssetLoader):
"""
product_types = {"camera"}
representations = ["abc"]
representations = {"abc"}
label = "Load Camera (ABC)"
icon = "code-fork"

View file

@ -24,7 +24,7 @@ class FbxCameraLoader(plugin.AssetLoader):
"""
product_types = {"camera"}
representations = ["fbx"]
representations = {"fbx"}
label = "Load Camera (FBX)"
icon = "code-fork"

View file

@ -24,7 +24,7 @@ class FbxModelLoader(plugin.AssetLoader):
"""
product_types = {"model", "rig"}
representations = ["fbx"]
representations = {"fbx"}
label = "Load FBX"
icon = "code-fork"

View file

@ -27,7 +27,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
"""Load layout published from Unreal."""
product_types = {"layout"}
representations = ["json"]
representations = {"json"}
label = "Load Layout"
icon = "code-fork"
@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
asset_group.empty_display_type = 'SINGLE_ARROW'
avalon_container.objects.link(asset_group)
self._process(libpath, asset, asset_group, None)
self._process(libpath, asset_name, asset_group, None)
bpy.context.scene.collection.objects.link(asset_group)

View file

@ -24,7 +24,7 @@ class BlendLookLoader(plugin.AssetLoader):
"""
product_types = {"look"}
representations = ["json"]
representations = {"json"}
label = "Load Look"
icon = "code-fork"

View file

@ -2,6 +2,7 @@ import os
import bpy
from ayon_core.lib import BoolDef
from ayon_core.pipeline import publish
from ayon_core.hosts.blender.api import plugin
@ -17,6 +18,8 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
if not self.is_active(instance.data):
return
attr_values = self.get_attr_values_from_data(instance.data)
# Define extract output file path
stagingdir = self.staging_dir(instance)
folder_name = instance.data["folderEntity"]["name"]
@ -46,7 +49,8 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
bpy.ops.wm.alembic_export(
filepath=filepath,
selected=True,
flatten=False
flatten=False,
subdiv_schema=attr_values.get("subdiv_schema", False)
)
plugin.deselect_all()
@ -65,6 +69,21 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)
@classmethod
def get_attribute_defs(cls):
return [
BoolDef(
"subdiv_schema",
label="Alembic Mesh Subdiv Schema",
tooltip="Export Meshes using Alembic's subdivision schema.\n"
"Enabling this includes creases with the export but "
"excludes the mesh's normals.\n"
"Enabling this usually result in smaller file size "
"due to lack of normals.",
default=False
)
]
class ExtractModelABC(ExtractABC):
"""Extract model as ABC."""

View file

@ -18,7 +18,7 @@ class LoadClip(opfapi.ClipLoader):
"""
product_types = {"render2d", "source", "plate", "render", "review"}
representations = ["*"]
representations = {"*"}
extensions = set(
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
)

View file

@ -17,7 +17,7 @@ class LoadClipBatch(opfapi.ClipLoader):
"""
product_types = {"render2d", "source", "plate", "render", "review"}
representations = ["*"]
representations = {"*"}
extensions = set(
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
)

View file

@ -3,8 +3,8 @@ import sys
import re
import contextlib
from ayon_core.lib import Logger
from ayon_core.lib import Logger, BoolDef, UILabelDef
from ayon_core.style import load_stylesheet
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.create import CreateContext
from ayon_core.pipeline.context_tools import get_current_folder_entity
@ -181,7 +181,6 @@ def validate_comp_prefs(comp=None, force_repair=False):
from . import menu
from ayon_core.tools.utils import SimplePopup
from ayon_core.style import load_stylesheet
dialog = SimplePopup(parent=menu.menu)
dialog.setWindowTitle("Fusion comp has invalid configuration")
@ -340,9 +339,7 @@ def prompt_reset_context():
from ayon_core.tools.attribute_defs.dialog import (
AttributeDefinitionsDialog
)
from ayon_core.style import load_stylesheet
from ayon_core.lib import BoolDef, UILabelDef
from qtpy import QtWidgets, QtCore
from qtpy import QtCore
definitions = [
UILabelDef(

View file

@ -16,6 +16,12 @@ from ayon_core.pipeline import (
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
from ayon_core.pipeline.workfile import get_workdir
from ayon_api import (
get_project,
get_folder_by_path,
get_task_by_name
)
class GenericCreateSaver(Creator):
@ -125,6 +131,8 @@ class GenericCreateSaver(Creator):
product_name = data["productName"]
if (
original_product_name != product_name
or tool.GetData("openpype.task") != data["task"]
or tool.GetData("openpype.folderPath") != data["folderPath"]
or original_format != data["creator_attributes"]["image_format"]
):
self._configure_saver_tool(data, tool, product_name)
@ -145,7 +153,30 @@ class GenericCreateSaver(Creator):
folder_path = formatting_data["folderPath"]
folder_name = folder_path.rsplit("/", 1)[-1]
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
# If the folder path and task do not match the current context then the
# workdir is not just the `AYON_WORKDIR`. Hence, we need to actually
# compute the resulting workdir
if (
data["folderPath"] == self.create_context.get_current_folder_path()
and data["task"] == self.create_context.get_current_task_name()
):
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
else:
# TODO: Optimize this logic
project_name = self.create_context.get_current_project_name()
project_entity = get_project(project_name)
folder_entity = get_folder_by_path(project_name,
data["folderPath"])
task_entity = get_task_by_name(project_name,
folder_id=folder_entity["id"],
task_name=data["task"])
workdir = get_workdir(
project_entity=project_entity,
folder_entity=folder_entity,
task_entity=task_entity,
host_name=self.create_context.host_name,
)
formatting_data.update({
"workdir": workdir,
"frame": "0" * frame_padding,

View file

@ -0,0 +1,36 @@
import os
from ayon_applications import PreLaunchHook
from ayon_core.hosts.fusion import FUSION_HOST_DIR
class FusionLaunchMenuHook(PreLaunchHook):
"""Launch AYON menu on start of Fusion"""
app_groups = ["fusion"]
order = 9
def execute(self):
# Prelaunch hook is optional
settings = self.data["project_settings"][self.host_name]
if not settings["hooks"]["FusionLaunchMenuHook"]["enabled"]:
return
variant = self.application.name
if variant.isnumeric():
version = int(variant)
if version < 18:
print("Skipping launch of OpenPype menu on Fusion start "
"because Fusion version below 18.0 does not support "
"/execute argument on launch. "
f"Version detected: {version}")
return
else:
print(f"Application variant is not numeric: {variant}. "
"Validation for Fusion version 18+ for /execute "
"prelaunch argument skipped.")
path = os.path.join(FUSION_HOST_DIR,
"deploy",
"MenuScripts",
"launch_menu.py").replace("\\", "/")
script = f"fusion:RunScript('{path}')"
self.launch_context.launch_args.extend(["/execute", script])

View file

@ -85,7 +85,6 @@ class InstallPySideToFusion(PreLaunchHook):
administration rights.
"""
try:
import win32api
import win32con
import win32process
import win32event

View file

@ -17,7 +17,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin):
"pointcache",
"render",
}
representations = ["*"]
representations = {"*"}
extensions = {"*"}
label = "Set frame range"
@ -54,7 +54,7 @@ class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin):
"pointcache",
"render",
}
representations = ["*"]
representations = {"*"}
label = "Set frame range (with handles)"
order = 12

View file

@ -13,7 +13,7 @@ class FusionLoadAlembicMesh(load.LoaderPlugin):
"""Load Alembic mesh into Fusion"""
product_types = {"pointcache", "model"}
representations = ["*"]
representations = {"*"}
extensions = {"abc"}
label = "Load alembic mesh"

View file

@ -13,7 +13,7 @@ class FusionLoadFBXMesh(load.LoaderPlugin):
"""Load FBX mesh into Fusion"""
product_types = {"*"}
representations = ["*"]
representations = {"*"}
extensions = {
"3ds",
"amc",

View file

@ -137,7 +137,7 @@ class FusionLoadSequence(load.LoaderPlugin):
"image",
"online",
}
representations = ["*"]
representations = {"*"}
extensions = set(
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
)

View file

@ -17,7 +17,7 @@ class FusionLoadUSD(load.LoaderPlugin):
"""
product_types = {"*"}
representations = ["*"]
representations = {"*"}
extensions = {"usd", "usda", "usdz"}
label = "Load USD"

View file

@ -15,7 +15,7 @@ class FusionLoadWorkfile(load.LoaderPlugin):
"""Load the content of a workfile into Fusion"""
product_types = {"workfile"}
representations = ["*"]
representations = {"*"}
extensions = {"comp"}
label = "Load Workfile"

View file

@ -37,14 +37,13 @@ class CollectFusionRender(
aspect_x = comp_frame_format_prefs["AspectX"]
aspect_y = comp_frame_format_prefs["AspectY"]
instances = []
instances_to_remove = []
current_file = context.data["currentFile"]
version = context.data["version"]
project_entity = context.data["projectEntity"]
instances = []
for inst in context:
if not inst.data.get("active", True):
continue
@ -91,7 +90,10 @@ class CollectFusionRender(
frameStep=1,
fps=comp_frame_format_prefs.get("Rate"),
app_version=comp.GetApp().Version,
publish_attributes=inst.data.get("publish_attributes", {})
publish_attributes=inst.data.get("publish_attributes", {}),
# The source instance this render instance replaces
source_instance=inst
)
render_target = inst.data["creator_attributes"]["render_target"]
@ -115,13 +117,7 @@ class CollectFusionRender(
instance.families.remove("review")
instance.deadline = inst.data.get("deadline")
# add new instance to the list and remove the original
# instance since it is not needed anymore
instances.append(instance)
instances_to_remove.append(inst)
for instance in instances_to_remove:
context.remove(instance)
return instances

View file

@ -590,7 +590,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
"reference",
"review",
}
representations = ["*"]
representations = {"*"}
extensions = {"jpeg", "png", "jpg"}
def load(self, context, name=None, namespace=None, data=None):

View file

@ -36,7 +36,7 @@ class ImportAudioLoader(load.LoaderPlugin):
"""Import audio."""
product_types = {"shot", "audio"}
representations = ["wav"]
representations = {"wav"}
label = "Import Audio"
def load(self, context, name=None, namespace=None, data=None):

View file

@ -234,7 +234,7 @@ class BackgroundLoader(load.LoaderPlugin):
Stores the imported asset in a container named after the asset.
"""
product_types = {"background"}
representations = ["json"]
representations = {"json"}
def load(self, context, name=None, namespace=None, data=None):

View file

@ -28,7 +28,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
"reference",
"review",
}
representations = ["*"]
representations = {"*"}
extensions = {"jpeg", "png", "jpg"}
def load(self, context, name=None, namespace=None, data=None):

View file

@ -12,7 +12,7 @@ class ImportPaletteLoader(load.LoaderPlugin):
"""Import palettes."""
product_types = {"palette", "harmony.palette"}
representations = ["plt"]
representations = {"plt"}
label = "Import Palette"
def load(self, context, name=None, namespace=None, data=None):

View file

@ -24,7 +24,7 @@ class TemplateLoader(load.LoaderPlugin):
"""
product_types = {"template", "workfile"}
representations = ["*"]
representations = {"*"}
label = "Load Template"
icon = "gift"

View file

@ -14,7 +14,7 @@ class ImportTemplateLoader(load.LoaderPlugin):
"""Import templates."""
product_types = {"harmony.template", "workfile"}
representations = ["*"]
representations = {"*"}
label = "Import Template"
def load(self, context, name=None, namespace=None, data=None):
@ -61,5 +61,5 @@ class ImportWorkfileLoader(ImportTemplateLoader):
"""Import workfiles."""
product_types = {"workfile"}
representations = ["zip"]
representations = {"zip"}
label = "Import Workfile"

View file

@ -137,7 +137,7 @@ class CreateShotClip(phiero.Creator):
"value": ["<track_name>", "main", "bg", "fg", "bg",
"animatic"],
"type": "QComboBox",
"label": "pRODUCT Name",
"label": "Product Name",
"target": "ui",
"toolTip": "chose product name pattern, if <track_name> is selected, name of track layer will be used", # noqa
"order": 0},
@ -159,7 +159,7 @@ class CreateShotClip(phiero.Creator):
"type": "QCheckBox",
"label": "Include audio",
"target": "tag",
"toolTip": "Process productS with corresponding audio", # noqa
"toolTip": "Process products with corresponding audio", # noqa
"order": 3},
"sourceResolution": {
"value": False,

View file

@ -16,7 +16,7 @@ class LoadClip(phiero.SequenceLoader):
"""
product_types = {"render2d", "source", "plate", "render", "review"}
representations = ["*"]
representations = {"*"}
extensions = set(
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
)

View file

@ -15,7 +15,7 @@ class LoadEffects(load.LoaderPlugin):
"""Loading colorspace soft effect exported from nukestudio"""
product_types = {"effect"}
representations = ["*"]
representations = {"*"}
extension = {"json"}
label = "Load Effects"

View file

@ -4,12 +4,12 @@ import pyblish.api
from ayon_core.pipeline import publish
class ExtractThumnail(publish.Extractor):
class ExtractThumbnail(publish.Extractor):
"""
Extractor for track item's tumnails
Extractor for track item's tumbnails
"""
label = "Extract Thumnail"
label = "Extract Thumbnail"
order = pyblish.api.ExtractorOrder
families = ["plate", "take"]
hosts = ["hiero"]
@ -48,7 +48,7 @@ class ExtractThumnail(publish.Extractor):
self.log.debug(
"__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame))
self.log.info("Thumnail was generated to: {}".format(thumb_path))
self.log.info("Thumbnail was generated to: {}".format(thumb_path))
thumb_representation = {
'files': thumb_file,
'stagingDir': staging_dir,

View file

@ -92,10 +92,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
folder_path, folder_name = self._get_folder_data(tag_data)
product_name = tag_data.get("productName")
if product_name is None:
product_name = tag_data["subset"]
families = [str(f) for f in tag_data["families"]]
# TODO: remove backward compatibility
@ -293,7 +289,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
label += " {}".format(product_name)
data.update({
"name": "{}_{}".format(folder_path, subset),
"name": "{}_{}".format(folder_path, product_name),
"label": label,
"productName": product_name,
"productType": product_type,

View file

@ -1001,6 +1001,82 @@ def add_self_publish_button(node):
node.setParmTemplateGroup(template)
def get_scene_viewer():
"""
Return an instance of a visible viewport.
There may be many, some could be closed, any visible are current
Returns:
Optional[hou.SceneViewer]: A scene viewer, if any.
"""
panes = hou.ui.paneTabs()
panes = [x for x in panes if x.type() == hou.paneTabType.SceneViewer]
panes = sorted(panes, key=lambda x: x.isCurrentTab())
if panes:
return panes[-1]
return None
def sceneview_snapshot(
sceneview,
filepath="$HIP/thumbnails/$HIPNAME.$F4.jpg",
frame_start=None,
frame_end=None):
"""Take a snapshot of your scene view.
It takes snapshot of your scene view for the given frame range.
So, it's capable of generating snapshots image sequence.
It works in different Houdini context e.g. Objects, Solaris
Example:
This is how the function can be used::
from ayon_core.hosts.houdini.api import lib
sceneview = hou.ui.paneTabOfType(hou.paneTabType.SceneViewer)
lib.sceneview_snapshot(sceneview)
Notes:
.png output will render poorly, so use .jpg.
How it works:
Get the current sceneviewer (may be more than one or hidden)
and screengrab the perspective viewport to a file in the
publish location to be picked up with the publish.
Credits:
https://www.sidefx.com/forum/topic/42808/?page=1#post-354796
Args:
sceneview (hou.SceneViewer): The scene view pane from which you want
to take a snapshot.
filepath (str): thumbnail filepath. it expects `$F4` token
when frame_end is bigger than frame_star other wise
each frame will override its predecessor.
frame_start (int): the frame at which snapshot starts
frame_end (int): the frame at which snapshot ends
"""
if frame_start is None:
frame_start = hou.frame()
if frame_end is None:
frame_end = frame_start
if not isinstance(sceneview, hou.SceneViewer):
log.debug("Wrong Input. {} is not of type hou.SceneViewer."
.format(sceneview))
return
viewport = sceneview.curViewport()
flip_settings = sceneview.flipbookSettings().stash()
flip_settings.frameRange((frame_start, frame_end))
flip_settings.output(filepath)
flip_settings.outputToMPlay(False)
sceneview.flipbook(viewport, flip_settings)
log.debug("A snapshot of sceneview has been saved to: {}".format(filepath))
def update_content_on_context_change():
"""Update all Creator instances to current asset"""
host = registered_host()

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating alembic camera products."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator):
instance = super(CreateAlembicCamera, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {

View file

@ -29,7 +29,7 @@ class CreateArnoldAss(plugin.HoudiniCreator):
instance = super(CreateArnoldAss, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -31,7 +31,7 @@ class CreateArnoldRop(plugin.HoudiniCreator):
instance = super(CreateArnoldRop, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache bgeo files."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
from ayon_core.lib import EnumDef, BoolDef
@ -25,7 +25,7 @@ class CreateBGEO(plugin.HoudiniCreator):
instance = super(CreateBGEO, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating composite sequences."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
@ -25,7 +25,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator):
instance = super(CreateCompositeSequence, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
filepath = "{}{}".format(

View file

@ -78,7 +78,7 @@ class CreateHDA(plugin.HoudiniCreator):
instance = super(CreateHDA, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
return instance

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Karma ROP."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef, EnumDef, NumberDef
@ -25,7 +24,7 @@ class CreateKarmaROP(plugin.HoudiniCreator):
instance = super(CreateKarmaROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache alembics."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef
@ -22,7 +21,7 @@ class CreateMantraIFD(plugin.HoudiniCreator):
instance = super(CreateMantraIFD, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Mantra ROP."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import EnumDef, BoolDef
@ -28,7 +27,7 @@ class CreateMantraROP(plugin.HoudiniCreator):
instance = super(CreateMantraROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USDs."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
import hou
@ -22,7 +21,7 @@ class CreateUSD(plugin.HoudiniCreator):
instance = super(CreateUSD, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USD renders."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
class CreateUSDRender(plugin.HoudiniCreator):
@ -23,7 +22,7 @@ class CreateUSDRender(plugin.HoudiniCreator):
instance = super(CreateUSDRender, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating VDB Caches."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef
import hou
@ -26,7 +25,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
instance = super(CreateVDBCache, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
file_path = "{}{}".format(

View file

@ -3,7 +3,7 @@
import hou
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
from ayon_core.lib import EnumDef, BoolDef
@ -31,7 +31,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
instance = super(CreateVrayROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -15,7 +15,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
"vdbcache",
"usd",
}
representations = ["abc", "vdb", "usd"]
representations = {"abc", "vdb", "usd"}
label = "Set frame range"
order = 11
@ -52,7 +52,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
"vdbcache",
"usd",
}
representations = ["abc", "vdb", "usd"]
representations = {"abc", "vdb", "usd"}
label = "Set frame range (with handles)"
order = 12

View file

@ -11,7 +11,7 @@ class AbcLoader(load.LoaderPlugin):
product_types = {"model", "animation", "pointcache", "gpuCache"}
label = "Load Alembic"
representations = ["*"]
representations = {"*"}
extensions = {"abc"}
order = -10
icon = "code-fork"

View file

@ -11,7 +11,7 @@ class AbcArchiveLoader(load.LoaderPlugin):
product_types = {"model", "animation", "pointcache", "gpuCache"}
label = "Load Alembic as Archive"
representations = ["*"]
representations = {"*"}
extensions = {"abc"}
order = -5
icon = "code-fork"

View file

@ -13,7 +13,7 @@ class AssLoader(load.LoaderPlugin):
product_types = {"ass"}
label = "Load Arnold Procedural"
representations = ["ass"]
representations = {"ass"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -14,9 +14,9 @@ class BgeoLoader(load.LoaderPlugin):
label = "Load bgeo"
product_types = {"model", "pointcache", "bgeo"}
representations = [
representations = {
"bgeo", "bgeosc", "bgeogz",
"bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"]
"bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -89,7 +89,7 @@ class CameraLoader(load.LoaderPlugin):
product_types = {"camera"}
label = "Load Camera (abc)"
representations = ["abc"]
representations = {"abc"}
order = -10
icon = "code-fork"

View file

@ -17,7 +17,7 @@ class FbxLoader(load.LoaderPlugin):
order = -10
product_types = {"*"}
representations = ["*"]
representations = {"*"}
extensions = {"fbx"}
def load(self, context, name=None, namespace=None, data=None):

View file

@ -22,7 +22,7 @@ class FilePathLoader(load.LoaderPlugin):
icon = "link"
color = "white"
product_types = {"*"}
representations = ["*"]
representations = {"*"}
def load(self, context, name=None, namespace=None, data=None):

View file

@ -12,7 +12,7 @@ class HdaLoader(load.LoaderPlugin):
product_types = {"hda"}
label = "Load Hda"
representations = ["hda"]
representations = {"hda"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -54,7 +54,7 @@ class ImageLoader(load.LoaderPlugin):
"online",
}
label = "Load Image (COP2)"
representations = ["*"]
representations = {"*"}
order = -10
icon = "code-fork"

View file

@ -15,7 +15,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
product_types = {"redshiftproxy"}
label = "Load Redshift Proxy"
representations = ["rs"]
representations = {"rs"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -14,7 +14,7 @@ class USDSublayerLoader(load.LoaderPlugin):
"usdCamera",
}
label = "Sublayer USD"
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
representations = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = 1
icon = "code-fork"

View file

@ -14,7 +14,7 @@ class USDReferenceLoader(load.LoaderPlugin):
"usdCamera",
}
label = "Reference USD"
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
representations = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = -8
icon = "code-fork"

View file

@ -9,7 +9,7 @@ class SopUsdImportLoader(load.LoaderPlugin):
label = "Load USD to SOPs"
product_types = {"*"}
representations = ["usd"]
representations = {"usd"}
order = -6
icon = "code-fork"
color = "orange"

View file

@ -13,7 +13,7 @@ class VdbLoader(load.LoaderPlugin):
product_types = {"vdbcache"}
label = "Load VDB"
representations = ["vdb"]
representations = {"vdb"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -10,7 +10,7 @@ class ShowInUsdview(load.LoaderPlugin):
"""Open USD file in usdview"""
label = "Show in usdview"
representations = ["*"]
representations = {"*"}
product_types = {"*"}
extensions = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = 15

View file

@ -1,9 +1,21 @@
from collections import deque
import pyblish.api
from ayon_core.pipeline import registered_host
def collect_input_containers(nodes):
def get_container_members(container):
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
return members
def collect_input_containers(containers, nodes):
"""Collect containers that contain any of the node in `nodes`.
This will return any loaded Avalon container that contains at least one of
@ -11,30 +23,13 @@ def collect_input_containers(nodes):
there are member nodes of that container.
Returns:
list: Input avalon containers
list: Loaded containers that contain the `nodes`
"""
# Lookup by node ids
lookup = frozenset(nodes)
containers = []
host = registered_host()
for container in host.ls():
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
# If there's an intersection
if not lookup.isdisjoint(members):
containers.append(container)
return containers
# Assume the containers have collected their cached '_members' data
# in the collector.
return [container for container in containers
if any(node in container["_members"] for node in nodes)]
def iter_upstream(node):
@ -54,7 +49,7 @@ def iter_upstream(node):
)
# Initialize process queue with the node's ancestors itself
queue = list(upstream)
queue = deque(upstream)
collected = set(upstream)
# Traverse upstream references for all nodes and yield them as we
@ -72,6 +67,10 @@ def iter_upstream(node):
# Include the references' ancestors that have not been collected yet.
for reference in references:
if reference in collected:
# Might have been collected in previous iteration
continue
ancestors = reference.inputAncestors(
include_ref_inputs=True, follow_subnets=True
)
@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
)
return
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# For large scenes the querying of "host.ls()" can be relatively slow
# e.g. up to a second. Many instances calling it easily slows this
# down. As such, we cache it so we trigger it only once.
# todo: Instead of hidden cache make "CollectContainers" plug-in
cache_key = "__cache_containers"
scene_containers = instance.context.data.get(cache_key, None)
if scene_containers is None:
# Query the scenes' containers if there's no cache yet
host = registered_host()
scene_containers = list(host.ls())
for container in scene_containers:
# Embed the members into the container dictionary
container_members = set(get_container_members(container))
container["_members"] = container_members
instance.context.data[cache_key] = scene_containers
# Collect containers for the given set of nodes
containers = collect_input_containers(nodes)
inputs = []
if scene_containers:
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# Collect containers for the given set of nodes
containers = collect_input_containers(scene_containers, nodes)
inputs = [c["representation"] for c in containers]
inputs = [c["representation"] for c in containers]
instance.data["inputRepresentations"] = inputs
self.log.debug("Collected inputs: %s" % inputs)

View file

@ -0,0 +1,55 @@
import pyblish.api
import tempfile
from ayon_core.pipeline import publish
from ayon_core.hosts.houdini.api import lib
from ayon_core.hosts.houdini.api.pipeline import IS_HEADLESS
class ExtractActiveViewThumbnail(publish.Extractor):
"""Set instance thumbnail to a screengrab of current active viewport.
This makes it so that if an instance does not have a thumbnail set yet that
it will get a thumbnail of the currently active view at the time of
publishing as a fallback.
"""
order = pyblish.api.ExtractorOrder + 0.49
label = "Extract Active View Thumbnail"
families = ["workfile"]
hosts = ["houdini"]
def process(self, instance):
if IS_HEADLESS:
self.log.debug(
"Skip extraction of active view thumbnail, due to being in"
"headless mode."
)
return
thumbnail = instance.data.get("thumbnailPath")
if thumbnail:
# A thumbnail was already set for this instance
return
view_thumbnail = self.get_view_thumbnail(instance)
if not view_thumbnail:
return
self.log.debug("Setting instance thumbnail path to: {}"
.format(view_thumbnail)
)
instance.data["thumbnailPath"] = view_thumbnail
def get_view_thumbnail(self, instance):
sceneview = lib.get_scene_viewer()
if sceneview is None:
self.log.debug("Skipping Extract Active View Thumbnail"
" because no scene view was detected.")
return
with tempfile.NamedTemporaryFile("w", suffix=".jpg", delete=False) as tmp:
lib.sceneview_snapshot(sceneview, tmp.name)
thumbnail_path = tmp.name
instance.context.data["cleanupFullPaths"].append(thumbnail_path)
return thumbnail_path

View file

@ -3,7 +3,6 @@ import pyblish.api
from ayon_core.lib import version_up
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.publish import get_errored_plugins_from_context
from ayon_core.hosts.houdini.api import HoudiniHost
from ayon_core.pipeline.publish import KnownPublishError
@ -39,7 +38,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
)
# Filename must not have changed since collecting
host = registered_host() # type: HoudiniHost
host = registered_host()
current_file = host.current_file()
if context.data["currentFile"] != current_file:
raise KnownPublishError(

View file

@ -8,10 +8,15 @@ from typing import Any, Dict, Union
import six
import ayon_api
from ayon_core.pipeline import get_current_project_name, colorspace
from ayon_core.pipeline import (
get_current_project_name,
get_current_folder_path,
get_current_task_name,
colorspace
)
from ayon_core.settings import get_project_settings
from ayon_core.pipeline.context_tools import (
get_current_folder_entity,
get_current_task_entity
)
from ayon_core.style import load_stylesheet
from pymxs import runtime as rt
@ -221,41 +226,30 @@ def reset_scene_resolution():
scene resolution can be overwritten by a folder if the folder.attrib
contains any information regarding scene resolution.
"""
folder_entity = get_current_folder_entity(
fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}
)
folder_attributes = folder_entity["attrib"]
width = int(folder_attributes["resolutionWidth"])
height = int(folder_attributes["resolutionHeight"])
task_attributes = get_current_task_entity(fields={"attrib"})["attrib"]
width = int(task_attributes["resolutionWidth"])
height = int(task_attributes["resolutionHeight"])
set_scene_resolution(width, height)
def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]:
"""Get the current folder frame range and handles.
def get_frame_range(task_entity=None) -> Union[Dict[str, Any], None]:
"""Get the current task frame range and handles
Args:
folder_entiy (dict): Folder eneity.
task_entity (dict): Task Entity.
Returns:
dict: with frame start, frame end, handle start, handle end.
"""
# Set frame start/end
if folder_entiy is None:
folder_entiy = get_current_folder_entity()
folder_attributes = folder_entiy["attrib"]
frame_start = folder_attributes.get("frameStart")
frame_end = folder_attributes.get("frameEnd")
if frame_start is None or frame_end is None:
return {}
frame_start = int(frame_start)
frame_end = int(frame_end)
handle_start = int(folder_attributes.get("handleStart", 0))
handle_end = int(folder_attributes.get("handleEnd", 0))
if task_entity is None:
task_entity = get_current_task_entity(fields={"attrib"})
task_attributes = task_entity["attrib"]
frame_start = int(task_attributes["frameStart"])
frame_end = int(task_attributes["frameEnd"])
handle_start = int(task_attributes["handleStart"])
handle_end = int(task_attributes["handleEnd"])
frame_start_handle = frame_start - handle_start
frame_end_handle = frame_end + handle_end
@ -281,9 +275,9 @@ def reset_frame_range(fps: bool = True):
scene frame rate in frames-per-second.
"""
if fps:
project_name = get_current_project_name()
project_entity = ayon_api.get_project(project_name)
fps_number = float(project_entity["attrib"].get("fps"))
task_entity = get_current_task_entity()
task_attributes = task_entity["attrib"]
fps_number = float(task_attributes["fps"])
rt.frameRate = fps_number
frame_range = get_frame_range()

View file

@ -19,7 +19,7 @@ class FbxLoader(load.LoaderPlugin):
"""Fbx Loader."""
product_types = {"camera"}
representations = ["fbx"]
representations = {"fbx"}
order = -9
icon = "code-fork"
color = "white"

View file

@ -78,7 +78,7 @@ class MaxSceneLoader(load.LoaderPlugin):
"model",
}
representations = ["max"]
representations = {"max"}
order = -8
icon = "code-fork"
color = "green"

View file

@ -16,7 +16,7 @@ class ModelAbcLoader(load.LoaderPlugin):
product_types = {"model"}
label = "Load Model with Alembic"
representations = ["abc"]
representations = {"abc"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -18,7 +18,7 @@ class FbxModelLoader(load.LoaderPlugin):
"""Fbx Model Loader."""
product_types = {"model"}
representations = ["fbx"]
representations = {"fbx"}
order = -9
icon = "code-fork"
color = "white"

View file

@ -20,7 +20,7 @@ class ObjLoader(load.LoaderPlugin):
"""Obj Loader."""
product_types = {"model"}
representations = ["obj"]
representations = {"obj"}
order = -9
icon = "code-fork"
color = "white"

View file

@ -24,7 +24,7 @@ class ModelUSDLoader(load.LoaderPlugin):
product_types = {"model"}
label = "Load Model(USD)"
representations = ["usda"]
representations = {"usda"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -20,7 +20,7 @@ class AbcLoader(load.LoaderPlugin):
product_types = {"camera", "animation", "pointcache"}
label = "Load Alembic"
representations = ["abc"]
representations = {"abc"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -23,7 +23,7 @@ class OxAbcLoader(load.LoaderPlugin):
product_types = {"camera", "animation", "pointcache"}
label = "Load Alembic with Ornatrix"
representations = ["abc"]
representations = {"abc"}
order = -10
icon = "code-fork"
color = "orange"

View file

@ -18,7 +18,7 @@ class PointCloudLoader(load.LoaderPlugin):
"""Point Cloud Loader."""
product_types = {"pointcloud"}
representations = ["prt"]
representations = {"prt"}
order = -8
icon = "code-fork"
color = "green"

View file

@ -24,7 +24,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
label = "Load Redshift Proxy"
product_types = {"redshiftproxy"}
representations = ["rs"]
representations = {"rs"}
order = -9
icon = "code-fork"
color = "white"

View file

@ -17,7 +17,7 @@ class TyCacheLoader(load.LoaderPlugin):
"""TyCache Loader."""
product_types = {"tycache"}
representations = ["tyc"]
representations = {"tyc"}
order = -8
icon = "code-fork"
color = "green"

View file

@ -42,7 +42,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin,
return
frame_range = get_frame_range(
instance.data["folderEntity"])
instance.data["taskEntity"])
inst_frame_start = instance.data.get("frameStartHandle")
inst_frame_end = instance.data.get("frameEndHandle")

Some files were not shown because too many files have changed in this diff Show more