mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge branch 'develop' into feature/maya_testing
This commit is contained in:
commit
b4757e1c0e
118 changed files with 2812 additions and 2637 deletions
|
|
@ -16,6 +16,7 @@ import six
|
|||
import appdirs
|
||||
import ayon_api
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.lib import Logger, is_dev_mode_enabled
|
||||
from ayon_core.settings import get_studio_settings
|
||||
|
||||
|
|
@ -335,14 +336,70 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
return addons_to_skip_in_core
|
||||
|
||||
|
||||
def _load_ayon_core_addons_dir(
|
||||
ignore_addon_names, openpype_modules, modules_key, log
|
||||
):
|
||||
addons_dir = os.path.join(AYON_CORE_ROOT, "addons")
|
||||
if not os.path.exists(addons_dir):
|
||||
return
|
||||
|
||||
imported_modules = []
|
||||
|
||||
# Make sure that addons which already have client code are not loaded
|
||||
# from core again, with older code
|
||||
filtered_paths = []
|
||||
for name in os.listdir(addons_dir):
|
||||
if name in ignore_addon_names:
|
||||
continue
|
||||
path = os.path.join(addons_dir, name)
|
||||
if os.path.isdir(path):
|
||||
filtered_paths.append(path)
|
||||
|
||||
for path in filtered_paths:
|
||||
while path in sys.path:
|
||||
sys.path.remove(path)
|
||||
sys.path.insert(0, path)
|
||||
|
||||
for name in os.listdir(path):
|
||||
fullpath = os.path.join(path, name)
|
||||
if os.path.isfile(fullpath):
|
||||
basename, ext = os.path.splitext(name)
|
||||
if ext != ".py":
|
||||
continue
|
||||
else:
|
||||
basename = name
|
||||
try:
|
||||
module = __import__(basename, fromlist=("",))
|
||||
for attr_name in dir(module):
|
||||
attr = getattr(module, attr_name)
|
||||
if (
|
||||
inspect.isclass(attr)
|
||||
and issubclass(attr, AYONAddon)
|
||||
):
|
||||
new_import_str = "{}.{}".format(modules_key, basename)
|
||||
sys.modules[new_import_str] = module
|
||||
setattr(openpype_modules, basename, module)
|
||||
imported_modules.append(module)
|
||||
break
|
||||
|
||||
except Exception:
|
||||
log.error(
|
||||
"Failed to import addon '{}'.".format(fullpath),
|
||||
exc_info=True
|
||||
)
|
||||
return imported_modules
|
||||
|
||||
|
||||
def _load_addons_in_core(
|
||||
ignore_addon_names, openpype_modules, modules_key, log
|
||||
):
|
||||
_load_ayon_core_addons_dir(
|
||||
ignore_addon_names, openpype_modules, modules_key, log
|
||||
)
|
||||
# Add current directory at first place
|
||||
# - has small differences in import logic
|
||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts")
|
||||
modules_dir = os.path.join(os.path.dirname(current_dir), "modules")
|
||||
hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts")
|
||||
modules_dir = os.path.join(AYON_CORE_ROOT, "modules")
|
||||
|
||||
ignored_host_names = set(IGNORED_HOSTS_IN_AYON)
|
||||
ignored_module_dir_filenames = (
|
||||
|
|
|
|||
|
|
@ -0,0 +1,58 @@
|
|||
from .constants import (
|
||||
APPLICATIONS_ADDON_ROOT,
|
||||
DEFAULT_ENV_SUBGROUP,
|
||||
PLATFORM_NAMES,
|
||||
)
|
||||
from .exceptions import (
|
||||
ApplicationNotFound,
|
||||
ApplicationExecutableNotFound,
|
||||
ApplicationLaunchFailed,
|
||||
MissingRequiredKey,
|
||||
)
|
||||
from .defs import (
|
||||
LaunchTypes,
|
||||
ApplicationExecutable,
|
||||
UndefinedApplicationExecutable,
|
||||
ApplicationGroup,
|
||||
Application,
|
||||
EnvironmentToolGroup,
|
||||
EnvironmentTool,
|
||||
)
|
||||
from .hooks import (
|
||||
LaunchHook,
|
||||
PreLaunchHook,
|
||||
PostLaunchHook,
|
||||
)
|
||||
from .manager import (
|
||||
ApplicationManager,
|
||||
ApplicationLaunchContext,
|
||||
)
|
||||
from .addon import ApplicationsAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DEFAULT_ENV_SUBGROUP",
|
||||
"PLATFORM_NAMES",
|
||||
|
||||
"ApplicationNotFound",
|
||||
"ApplicationExecutableNotFound",
|
||||
"ApplicationLaunchFailed",
|
||||
"MissingRequiredKey",
|
||||
|
||||
"LaunchTypes",
|
||||
"ApplicationExecutable",
|
||||
"UndefinedApplicationExecutable",
|
||||
"ApplicationGroup",
|
||||
"Application",
|
||||
"EnvironmentToolGroup",
|
||||
"EnvironmentTool",
|
||||
|
||||
"LaunchHook",
|
||||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
|
||||
"ApplicationManager",
|
||||
"ApplicationLaunchContext",
|
||||
|
||||
"ApplicationsAddon",
|
||||
)
|
||||
173
client/ayon_core/addons/applications/ayon_applications/addon.py
Normal file
173
client/ayon_core/addons/applications/ayon_applications/addon.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap
|
||||
|
||||
from .constants import APPLICATIONS_ADDON_ROOT
|
||||
from .defs import LaunchTypes
|
||||
from .manager import ApplicationManager
|
||||
|
||||
|
||||
class ApplicationsAddon(AYONAddon, IPluginPaths):
|
||||
name = "applications"
|
||||
|
||||
def initialize(self, settings):
|
||||
# TODO remove when addon is removed from ayon-core
|
||||
self.enabled = self.name in settings
|
||||
|
||||
def get_app_environments_for_context(
|
||||
self,
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
env=None,
|
||||
):
|
||||
"""Calculate environment variables for launch context.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Task name.
|
||||
full_app_name (str): Full application name.
|
||||
env_group (Optional[str]): Environment group.
|
||||
launch_type (Optional[str]): Launch type.
|
||||
env (Optional[dict[str, str]]): Environment variables to update.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for context.
|
||||
|
||||
"""
|
||||
from ayon_applications.utils import get_app_environments_for_context
|
||||
|
||||
if not full_app_name:
|
||||
return {}
|
||||
|
||||
return get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=env_group,
|
||||
launch_type=launch_type,
|
||||
env=env,
|
||||
addons_manager=self.manager
|
||||
)
|
||||
|
||||
def get_farm_publish_environment_variables(
|
||||
self,
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name=None,
|
||||
env_group=None,
|
||||
):
|
||||
"""Calculate environment variables for farm publish.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Task name.
|
||||
env_group (Optional[str]): Environment group.
|
||||
full_app_name (Optional[str]): Full application name. Value from
|
||||
environment variable 'AYON_APP_NAME' is used if 'None' is
|
||||
passed.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for farm publish.
|
||||
|
||||
"""
|
||||
if full_app_name is None:
|
||||
full_app_name = os.getenv("AYON_APP_NAME")
|
||||
|
||||
return self.get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
full_app_name,
|
||||
env_group=env_group,
|
||||
launch_type=LaunchTypes.farm_publish
|
||||
)
|
||||
|
||||
def get_applications_manager(self, settings=None):
|
||||
"""Get applications manager.
|
||||
|
||||
Args:
|
||||
settings (Optional[dict]): Studio/project settings.
|
||||
|
||||
Returns:
|
||||
ApplicationManager: Applications manager.
|
||||
|
||||
"""
|
||||
return ApplicationManager(settings)
|
||||
|
||||
def get_plugin_paths(self):
|
||||
return {
|
||||
"publish": [
|
||||
os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish")
|
||||
]
|
||||
}
|
||||
|
||||
# --- CLI ---
|
||||
def cli(self, addon_click_group):
|
||||
main_group = click_wrap.group(
|
||||
self._cli_main, name=self.name, help="Applications addon"
|
||||
)
|
||||
(
|
||||
main_group.command(
|
||||
self._cli_extract_environments,
|
||||
name="extractenvironments",
|
||||
help=(
|
||||
"Extract environment variables for context into json file"
|
||||
)
|
||||
)
|
||||
.argument("output_json_path")
|
||||
.option("--project", help="Project name", default=None)
|
||||
.option("--folder", help="Folder path", default=None)
|
||||
.option("--task", help="Task name", default=None)
|
||||
.option("--app", help="Application name", default=None)
|
||||
.option(
|
||||
"--envgroup",
|
||||
help="Environment group (e.g. \"farm\")",
|
||||
default=None
|
||||
)
|
||||
)
|
||||
# Convert main command to click object and add it to parent group
|
||||
addon_click_group.add_command(
|
||||
main_group.to_click_obj()
|
||||
)
|
||||
|
||||
def _cli_main(self):
|
||||
pass
|
||||
|
||||
def _cli_extract_environments(
|
||||
self, output_json_path, project, folder, task, app, envgroup
|
||||
):
|
||||
"""Produces json file with environment based on project and app.
|
||||
|
||||
Called by farm integration to propagate environment into farm jobs.
|
||||
|
||||
Args:
|
||||
output_json_path (str): Output json file path.
|
||||
project (str): Project name.
|
||||
folder (str): Folder path.
|
||||
task (str): Task name.
|
||||
app (str): Full application name e.g. 'maya/2024'.
|
||||
envgroup (str): Environment group.
|
||||
|
||||
"""
|
||||
if all((project, folder, task, app)):
|
||||
env = self.get_farm_publish_environment_variables(
|
||||
project, folder, task, app, env_group=envgroup,
|
||||
)
|
||||
else:
|
||||
env = os.environ.copy()
|
||||
|
||||
output_dir = os.path.dirname(output_json_path)
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
with open(output_json_path, "w") as file_stream:
|
||||
json.dump(env, file_stream, indent=4)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import os
|
||||
|
||||
APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
PLATFORM_NAMES = {"windows", "linux", "darwin"}
|
||||
DEFAULT_ENV_SUBGROUP = "standard"
|
||||
404
client/ayon_core/addons/applications/ayon_applications/defs.py
Normal file
404
client/ayon_core/addons/applications/ayon_applications/defs.py
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
import os
|
||||
import platform
|
||||
import json
|
||||
import copy
|
||||
|
||||
from ayon_core.lib import find_executable
|
||||
|
||||
|
||||
class LaunchTypes:
|
||||
"""Launch types are filters for pre/post-launch hooks.
|
||||
|
||||
Please use these variables in case they'll change values.
|
||||
"""
|
||||
|
||||
# Local launch - application is launched on local machine
|
||||
local = "local"
|
||||
# Farm render job - application is on farm
|
||||
farm_render = "farm-render"
|
||||
# Farm publish job - integration post-render job
|
||||
farm_publish = "farm-publish"
|
||||
# Remote launch - application is launched on remote machine from which
|
||||
# can be started publishing
|
||||
remote = "remote"
|
||||
# Automated launch - application is launched with automated publishing
|
||||
automated = "automated"
|
||||
|
||||
|
||||
class ApplicationExecutable:
|
||||
"""Representation of executable loaded from settings."""
|
||||
|
||||
def __init__(self, executable):
|
||||
# Try to format executable with environments
|
||||
try:
|
||||
executable = executable.format(**os.environ)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# On MacOS check if exists path to executable when ends with `.app`
|
||||
# - it is common that path will lead to "/Applications/Blender" but
|
||||
# real path is "/Applications/Blender.app"
|
||||
if platform.system().lower() == "darwin":
|
||||
executable = self.macos_executable_prep(executable)
|
||||
|
||||
self.executable_path = executable
|
||||
|
||||
def __str__(self):
|
||||
return self.executable_path
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> {}".format(self.__class__.__name__, self.executable_path)
|
||||
|
||||
@staticmethod
|
||||
def macos_executable_prep(executable):
|
||||
"""Try to find full path to executable file.
|
||||
|
||||
Real executable is stored in '*.app/Contents/MacOS/<executable>'.
|
||||
|
||||
Having path to '*.app' gives ability to read it's plist info and
|
||||
use "CFBundleExecutable" key from plist to know what is "executable."
|
||||
|
||||
Plist is stored in '*.app/Contents/Info.plist'.
|
||||
|
||||
This is because some '*.app' directories don't have same permissions
|
||||
as real executable.
|
||||
"""
|
||||
# Try to find if there is `.app` file
|
||||
if not os.path.exists(executable):
|
||||
_executable = executable + ".app"
|
||||
if os.path.exists(_executable):
|
||||
executable = _executable
|
||||
|
||||
# Try to find real executable if executable has `Contents` subfolder
|
||||
contents_dir = os.path.join(executable, "Contents")
|
||||
if os.path.exists(contents_dir):
|
||||
executable_filename = None
|
||||
# Load plist file and check for bundle executable
|
||||
plist_filepath = os.path.join(contents_dir, "Info.plist")
|
||||
if os.path.exists(plist_filepath):
|
||||
import plistlib
|
||||
|
||||
if hasattr(plistlib, "load"):
|
||||
with open(plist_filepath, "rb") as stream:
|
||||
parsed_plist = plistlib.load(stream)
|
||||
else:
|
||||
parsed_plist = plistlib.readPlist(plist_filepath)
|
||||
executable_filename = parsed_plist.get("CFBundleExecutable")
|
||||
|
||||
if executable_filename:
|
||||
executable = os.path.join(
|
||||
contents_dir, "MacOS", executable_filename
|
||||
)
|
||||
|
||||
return executable
|
||||
|
||||
def as_args(self):
|
||||
return [self.executable_path]
|
||||
|
||||
def _realpath(self):
|
||||
"""Check if path is valid executable path."""
|
||||
# Check for executable in PATH
|
||||
result = find_executable(self.executable_path)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# This is not 100% validation but it is better than remove ability to
|
||||
# launch .bat, .sh or extentionless files
|
||||
if os.path.exists(self.executable_path):
|
||||
return self.executable_path
|
||||
return None
|
||||
|
||||
def exists(self):
|
||||
if not self.executable_path:
|
||||
return False
|
||||
return bool(self._realpath())
|
||||
|
||||
|
||||
class UndefinedApplicationExecutable(ApplicationExecutable):
|
||||
"""Some applications do not require executable path from settings.
|
||||
|
||||
In that case this class is used to "fake" existing executable.
|
||||
"""
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __str__(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}>".format(self.__class__.__name__)
|
||||
|
||||
def as_args(self):
|
||||
return []
|
||||
|
||||
def exists(self):
|
||||
return True
|
||||
|
||||
|
||||
class ApplicationGroup:
|
||||
"""Hold information about application group.
|
||||
|
||||
Application group wraps different versions(variants) of application.
|
||||
e.g. "maya" is group and "maya_2020" is variant.
|
||||
|
||||
Group hold `host_name` which is implementation name used in AYON. Also
|
||||
holds `enabled` if whole app group is enabled or `icon` for application
|
||||
icon path in resources.
|
||||
|
||||
Group has also `environment` which hold same environments for all variants.
|
||||
|
||||
Args:
|
||||
name (str): Groups' name.
|
||||
data (dict): Group defying data loaded from settings.
|
||||
manager (ApplicationManager): Manager that created the group.
|
||||
"""
|
||||
|
||||
def __init__(self, name, data, manager):
|
||||
self.name = name
|
||||
self.manager = manager
|
||||
self._data = data
|
||||
|
||||
self.enabled = data["enabled"]
|
||||
self.label = data["label"] or None
|
||||
self.icon = data["icon"] or None
|
||||
env = {}
|
||||
try:
|
||||
env = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
self._environment = env
|
||||
|
||||
host_name = data["host_name"] or None
|
||||
self.is_host = host_name is not None
|
||||
self.host_name = host_name
|
||||
|
||||
settings_variants = data["variants"]
|
||||
variants = {}
|
||||
for variant_data in settings_variants:
|
||||
app_variant = Application(variant_data, self)
|
||||
variants[app_variant.name] = app_variant
|
||||
|
||||
self.variants = variants
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __iter__(self):
|
||||
for variant in self.variants.values():
|
||||
yield variant
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
|
||||
class Application:
|
||||
"""Hold information about application.
|
||||
|
||||
Object by itself does nothing special.
|
||||
|
||||
Args:
|
||||
data (dict): Data for the version containing information about
|
||||
executables, variant label or if is enabled.
|
||||
Only required key is `executables`.
|
||||
group (ApplicationGroup): App group object that created the application
|
||||
and under which application belongs.
|
||||
|
||||
"""
|
||||
def __init__(self, data, group):
|
||||
self._data = data
|
||||
name = data["name"]
|
||||
label = data["label"] or name
|
||||
enabled = False
|
||||
if group.enabled:
|
||||
enabled = data.get("enabled", True)
|
||||
|
||||
if group.label:
|
||||
full_label = " ".join((group.label, label))
|
||||
else:
|
||||
full_label = label
|
||||
env = {}
|
||||
try:
|
||||
env = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
arguments = data["arguments"]
|
||||
if isinstance(arguments, dict):
|
||||
arguments = arguments.get(platform.system().lower())
|
||||
|
||||
if not arguments:
|
||||
arguments = []
|
||||
|
||||
_executables = data["executables"].get(platform.system().lower(), [])
|
||||
executables = [
|
||||
ApplicationExecutable(executable)
|
||||
for executable in _executables
|
||||
]
|
||||
|
||||
self.group = group
|
||||
|
||||
self.name = name
|
||||
self.label = label
|
||||
self.enabled = enabled
|
||||
self.use_python_2 = data.get("use_python_2", False)
|
||||
|
||||
self.full_name = "/".join((group.name, name))
|
||||
self.full_label = full_label
|
||||
self.arguments = arguments
|
||||
self.executables = executables
|
||||
self._environment = env
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return self.group.manager
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return self.group.host_name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self.group.icon
|
||||
|
||||
@property
|
||||
def is_host(self):
|
||||
return self.group.is_host
|
||||
|
||||
def find_executable(self):
|
||||
"""Try to find existing executable for application.
|
||||
|
||||
Returns (str): Path to executable from `executables` or None if any
|
||||
exists.
|
||||
"""
|
||||
for executable in self.executables:
|
||||
if executable.exists():
|
||||
return executable
|
||||
return None
|
||||
|
||||
def launch(self, *args, **kwargs):
|
||||
"""Launch the application.
|
||||
|
||||
For this purpose is used manager's launch method to keep logic at one
|
||||
place.
|
||||
|
||||
Arguments must match with manager's launch method. That's why *args
|
||||
**kwargs are used.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Return executed process as Popen object.
|
||||
"""
|
||||
return self.manager.launch(self.full_name, *args, **kwargs)
|
||||
|
||||
|
||||
class EnvironmentToolGroup:
|
||||
"""Hold information about environment tool group.
|
||||
|
||||
Environment tool group may hold different variants of same tool and set
|
||||
environments that are same for all of them.
|
||||
|
||||
e.g. "mtoa" may have different versions but all environments except one
|
||||
are same.
|
||||
|
||||
Args:
|
||||
data (dict): Group information with variants.
|
||||
manager (ApplicationManager): Manager that creates the group.
|
||||
"""
|
||||
|
||||
def __init__(self, data, manager):
|
||||
name = data["name"]
|
||||
label = data["label"]
|
||||
|
||||
self.name = name
|
||||
self.label = label
|
||||
self._data = data
|
||||
self.manager = manager
|
||||
|
||||
environment = {}
|
||||
try:
|
||||
environment = json.loads(data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
self._environment = environment
|
||||
|
||||
variants = data.get("variants") or []
|
||||
variants_by_name = {}
|
||||
for variant_data in variants:
|
||||
tool = EnvironmentTool(variant_data, self)
|
||||
variants_by_name[tool.name] = tool
|
||||
self.variants = variants_by_name
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __iter__(self):
|
||||
for variant in self.variants.values():
|
||||
yield variant
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
|
||||
class EnvironmentTool:
|
||||
"""Hold information about application tool.
|
||||
|
||||
Structure of tool information.
|
||||
|
||||
Args:
|
||||
variant_data (dict): Variant data with environments and
|
||||
host and app variant filters.
|
||||
group (EnvironmentToolGroup): Name of group which wraps tool.
|
||||
"""
|
||||
|
||||
def __init__(self, variant_data, group):
|
||||
# Backwards compatibility 3.9.1 - 3.9.2
|
||||
# - 'variant_data' contained only environments but contain also host
|
||||
# and application variant filters
|
||||
name = variant_data["name"]
|
||||
label = variant_data["label"]
|
||||
host_names = variant_data["host_names"]
|
||||
app_variants = variant_data["app_variants"]
|
||||
|
||||
environment = {}
|
||||
try:
|
||||
environment = json.loads(variant_data["environment"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.host_names = host_names
|
||||
self.app_variants = app_variants
|
||||
self.name = name
|
||||
self.variant_label = label
|
||||
self.label = " ".join((group.label, label))
|
||||
self.group = group
|
||||
|
||||
self._environment = environment
|
||||
self.full_name = "/".join((group.name, name))
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return copy.deepcopy(self._environment)
|
||||
|
||||
def is_valid_for_app(self, app):
|
||||
"""Is tool valid for application.
|
||||
|
||||
Args:
|
||||
app (Application): Application for which are prepared environments.
|
||||
"""
|
||||
if self.app_variants and app.full_name not in self.app_variants:
|
||||
return False
|
||||
|
||||
if self.host_names and app.host_name not in self.host_names:
|
||||
return False
|
||||
return True
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
class ApplicationNotFound(Exception):
|
||||
"""Application was not found in ApplicationManager by name."""
|
||||
|
||||
def __init__(self, app_name):
|
||||
self.app_name = app_name
|
||||
super(ApplicationNotFound, self).__init__(
|
||||
"Application \"{}\" was not found.".format(app_name)
|
||||
)
|
||||
|
||||
|
||||
class ApplicationExecutableNotFound(Exception):
|
||||
"""Defined executable paths are not available on the machine."""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
details = None
|
||||
if not application.executables:
|
||||
msg = (
|
||||
"Executable paths for application \"{}\"({}) are not set."
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
"Defined executable paths for application \"{}\"({})"
|
||||
" are not available on this machine."
|
||||
)
|
||||
details = "Defined paths:"
|
||||
for executable in application.executables:
|
||||
details += "\n- " + executable.executable_path
|
||||
|
||||
self.msg = msg.format(application.full_label, application.full_name)
|
||||
self.details = details
|
||||
|
||||
exc_mgs = str(self.msg)
|
||||
if details:
|
||||
# Is good idea to pass new line symbol to exception message?
|
||||
exc_mgs += "\n" + details
|
||||
self.exc_msg = exc_mgs
|
||||
super(ApplicationExecutableNotFound, self).__init__(exc_mgs)
|
||||
|
||||
|
||||
class ApplicationLaunchFailed(Exception):
|
||||
"""Application launch failed due to known reason.
|
||||
|
||||
Message should be self explanatory as traceback won't be shown.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingRequiredKey(KeyError):
|
||||
pass
|
||||
150
client/ayon_core/addons/applications/ayon_applications/hooks.py
Normal file
150
client/ayon_core/addons/applications/ayon_applications/hooks.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
import platform
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from .defs import LaunchTypes
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class LaunchHook:
|
||||
"""Abstract base class of launch hook."""
|
||||
# Order of prelaunch hook, will be executed as last if set to None.
|
||||
order = None
|
||||
# List of host implementations, skipped if empty.
|
||||
hosts = set()
|
||||
# Set of application groups
|
||||
app_groups = set()
|
||||
# Set of specific application names
|
||||
app_names = set()
|
||||
# Set of platform availability
|
||||
platforms = set()
|
||||
# Set of launch types for which is available
|
||||
# - if empty then is available for all launch types
|
||||
# - by default has 'local' which is most common reason for launc hooks
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, launch_context):
|
||||
"""Constructor of launch hook.
|
||||
|
||||
Always should be called
|
||||
"""
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.launch_context = launch_context
|
||||
|
||||
is_valid = self.class_validation(launch_context)
|
||||
if is_valid:
|
||||
is_valid = self.validate()
|
||||
|
||||
self.is_valid = is_valid
|
||||
|
||||
@classmethod
|
||||
def class_validation(cls, launch_context):
|
||||
"""Validation of class attributes by launch context.
|
||||
|
||||
Args:
|
||||
launch_context (ApplicationLaunchContext): Context of launching
|
||||
application.
|
||||
|
||||
Returns:
|
||||
bool: Is launch hook valid for the context by class attributes.
|
||||
"""
|
||||
if cls.platforms:
|
||||
low_platforms = tuple(
|
||||
_platform.lower()
|
||||
for _platform in cls.platforms
|
||||
)
|
||||
if platform.system().lower() not in low_platforms:
|
||||
return False
|
||||
|
||||
if cls.hosts:
|
||||
if launch_context.host_name not in cls.hosts:
|
||||
return False
|
||||
|
||||
if cls.app_groups:
|
||||
if launch_context.app_group.name not in cls.app_groups:
|
||||
return False
|
||||
|
||||
if cls.app_names:
|
||||
if launch_context.app_name not in cls.app_names:
|
||||
return False
|
||||
|
||||
if cls.launch_types:
|
||||
if launch_context.launch_type not in cls.launch_types:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self.launch_context.data
|
||||
|
||||
@property
|
||||
def application(self):
|
||||
return getattr(self.launch_context, "application", None)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return getattr(self.application, "manager", None)
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return getattr(self.application, "host_name", None)
|
||||
|
||||
@property
|
||||
def app_group(self):
|
||||
return getattr(self.application, "group", None)
|
||||
|
||||
@property
|
||||
def app_name(self):
|
||||
return getattr(self.application, "full_name", None)
|
||||
|
||||
@property
|
||||
def addons_manager(self):
|
||||
return getattr(self.launch_context, "addons_manager", None)
|
||||
|
||||
@property
|
||||
def modules_manager(self):
|
||||
"""
|
||||
Deprecated:
|
||||
Use 'addons_wrapper' instead.
|
||||
"""
|
||||
return self.addons_manager
|
||||
|
||||
def validate(self):
|
||||
"""Optional validation of launch hook on initialization.
|
||||
|
||||
Returns:
|
||||
bool: Hook is valid (True) or invalid (False).
|
||||
"""
|
||||
# QUESTION Not sure if this method has any usable potential.
|
||||
# - maybe result can be based on settings
|
||||
return True
|
||||
|
||||
@abstractmethod
|
||||
def execute(self, *args, **kwargs):
|
||||
"""Abstract execute method where logic of hook is."""
|
||||
pass
|
||||
|
||||
|
||||
class PreLaunchHook(LaunchHook):
|
||||
"""Abstract class of prelaunch hook.
|
||||
|
||||
This launch hook will be processed before application is launched.
|
||||
|
||||
If any exception will happen during processing the application won't be
|
||||
launched.
|
||||
"""
|
||||
|
||||
|
||||
class PostLaunchHook(LaunchHook):
|
||||
"""Abstract class of postlaunch hook.
|
||||
|
||||
This launch hook will be processed after application is launched.
|
||||
|
||||
Nothing will happen if any exception will happen during processing. And
|
||||
processing of other postlaunch hooks won't stop either.
|
||||
"""
|
||||
|
|
@ -0,0 +1,676 @@
|
|||
import os
|
||||
import sys
|
||||
import copy
|
||||
import json
|
||||
import tempfile
|
||||
import platform
|
||||
import inspect
|
||||
import subprocess
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.settings import get_studio_settings
|
||||
from ayon_core.lib import (
|
||||
Logger,
|
||||
modules_from_path,
|
||||
classes_from_module,
|
||||
get_linux_launcher_args,
|
||||
)
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
from .constants import DEFAULT_ENV_SUBGROUP
|
||||
from .exceptions import (
|
||||
ApplicationNotFound,
|
||||
ApplicationExecutableNotFound,
|
||||
)
|
||||
from .hooks import PostLaunchHook, PreLaunchHook
|
||||
from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes
|
||||
|
||||
|
||||
class ApplicationManager:
|
||||
"""Load applications and tools and store them by their full name.
|
||||
|
||||
Args:
|
||||
studio_settings (dict): Preloaded studio settings. When passed manager
|
||||
will always use these values. Gives ability to create manager
|
||||
using different settings.
|
||||
"""
|
||||
|
||||
def __init__(self, studio_settings=None):
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.app_groups = {}
|
||||
self.applications = {}
|
||||
self.tool_groups = {}
|
||||
self.tools = {}
|
||||
|
||||
self._studio_settings = studio_settings
|
||||
|
||||
self.refresh()
|
||||
|
||||
def set_studio_settings(self, studio_settings):
|
||||
"""Ability to change init system settings.
|
||||
|
||||
This will trigger refresh of manager.
|
||||
"""
|
||||
self._studio_settings = studio_settings
|
||||
|
||||
self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh applications from settings."""
|
||||
self.app_groups.clear()
|
||||
self.applications.clear()
|
||||
self.tool_groups.clear()
|
||||
self.tools.clear()
|
||||
|
||||
if self._studio_settings is not None:
|
||||
settings = copy.deepcopy(self._studio_settings)
|
||||
else:
|
||||
settings = get_studio_settings(
|
||||
clear_metadata=False, exclude_locals=False
|
||||
)
|
||||
|
||||
applications_addon_settings = settings["applications"]
|
||||
|
||||
# Prepare known applications
|
||||
app_defs = applications_addon_settings["applications"]
|
||||
additional_apps = app_defs.pop("additional_apps")
|
||||
for additional_app in additional_apps:
|
||||
app_name = additional_app.pop("name")
|
||||
if app_name in app_defs:
|
||||
self.log.warning((
|
||||
"Additional application '{}' is already"
|
||||
" in built-in applications."
|
||||
).format(app_name))
|
||||
app_defs[app_name] = additional_app
|
||||
|
||||
for group_name, variant_defs in app_defs.items():
|
||||
group = ApplicationGroup(group_name, variant_defs, self)
|
||||
self.app_groups[group_name] = group
|
||||
for app in group:
|
||||
self.applications[app.full_name] = app
|
||||
|
||||
tools_definitions = applications_addon_settings["tool_groups"]
|
||||
for tool_group_data in tools_definitions:
|
||||
group = EnvironmentToolGroup(tool_group_data, self)
|
||||
self.tool_groups[group.name] = group
|
||||
for tool in group:
|
||||
self.tools[tool.full_name] = tool
|
||||
|
||||
def find_latest_available_variant_for_group(self, group_name):
|
||||
group = self.app_groups.get(group_name)
|
||||
if group is None or not group.enabled:
|
||||
return None
|
||||
|
||||
output = None
|
||||
for _, variant in reversed(sorted(group.variants.items())):
|
||||
executable = variant.find_executable()
|
||||
if executable:
|
||||
output = variant
|
||||
break
|
||||
return output
|
||||
|
||||
def create_launch_context(self, app_name, **data):
|
||||
"""Prepare launch context for application.
|
||||
|
||||
Args:
|
||||
app_name (str): Name of application that should be launched.
|
||||
**data (Any): Any additional data. Data may be used during
|
||||
|
||||
Returns:
|
||||
ApplicationLaunchContext: Launch context for application.
|
||||
|
||||
Raises:
|
||||
ApplicationNotFound: Application was not found by entered name.
|
||||
"""
|
||||
|
||||
app = self.applications.get(app_name)
|
||||
if not app:
|
||||
raise ApplicationNotFound(app_name)
|
||||
|
||||
executable = app.find_executable()
|
||||
|
||||
return ApplicationLaunchContext(
|
||||
app, executable, **data
|
||||
)
|
||||
|
||||
def launch_with_context(self, launch_context):
|
||||
"""Launch application using existing launch context.
|
||||
|
||||
Args:
|
||||
launch_context (ApplicationLaunchContext): Prepared launch
|
||||
context.
|
||||
"""
|
||||
|
||||
if not launch_context.executable:
|
||||
raise ApplicationExecutableNotFound(launch_context.application)
|
||||
return launch_context.launch()
|
||||
|
||||
def launch(self, app_name, **data):
|
||||
"""Launch procedure.
|
||||
|
||||
For host application it's expected to contain "project_name",
|
||||
"folder_path" and "task_name".
|
||||
|
||||
Args:
|
||||
app_name (str): Name of application that should be launched.
|
||||
**data (dict): Any additional data. Data may be used during
|
||||
preparation to store objects usable in multiple places.
|
||||
|
||||
Raises:
|
||||
ApplicationNotFound: Application was not found by entered
|
||||
argument `app_name`.
|
||||
ApplicationExecutableNotFound: Executables in application definition
|
||||
were not found on this machine.
|
||||
ApplicationLaunchFailed: Something important for application launch
|
||||
failed. Exception should contain explanation message,
|
||||
traceback should not be needed.
|
||||
"""
|
||||
|
||||
context = self.create_launch_context(app_name, **data)
|
||||
return self.launch_with_context(context)
|
||||
|
||||
|
||||
class ApplicationLaunchContext:
|
||||
"""Context of launching application.
|
||||
|
||||
Main purpose of context is to prepare launch arguments and keyword
|
||||
arguments for new process. Most important part of keyword arguments
|
||||
preparations are environment variables.
|
||||
|
||||
During the whole process is possible to use `data` attribute to store
|
||||
object usable in multiple places.
|
||||
|
||||
Launch arguments are strings in list. It is possible to "chain" argument
|
||||
when order of them matters. That is possible to do with adding list where
|
||||
order is right and should not change.
|
||||
NOTE: This is recommendation, not requirement.
|
||||
e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may
|
||||
insert argument between `nuke.exe` and `--NukeX`. To keep them together
|
||||
it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`.
|
||||
|
||||
Notes:
|
||||
It is possible to use launch context only to prepare environment
|
||||
variables. In that case `executable` may be None and can be used
|
||||
'run_prelaunch_hooks' method to run prelaunch hooks which prepare
|
||||
them.
|
||||
|
||||
Args:
|
||||
application (Application): Application definition.
|
||||
executable (ApplicationExecutable): Object with path to executable.
|
||||
env_group (Optional[str]): Environment variable group. If not set
|
||||
'DEFAULT_ENV_SUBGROUP' is used.
|
||||
launch_type (Optional[str]): Launch type. If not set 'local' is used.
|
||||
**data (dict): Any additional data. Data may be used during
|
||||
preparation to store objects usable in multiple places.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
application,
|
||||
executable,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
**data
|
||||
):
|
||||
# Application object
|
||||
self.application = application
|
||||
|
||||
self.addons_manager = AddonsManager()
|
||||
|
||||
# Logger
|
||||
logger_name = "{}-{}".format(self.__class__.__name__,
|
||||
self.application.full_name)
|
||||
self.log = Logger.get_logger(logger_name)
|
||||
|
||||
self.executable = executable
|
||||
|
||||
if launch_type is None:
|
||||
launch_type = LaunchTypes.local
|
||||
self.launch_type = launch_type
|
||||
|
||||
if env_group is None:
|
||||
env_group = DEFAULT_ENV_SUBGROUP
|
||||
|
||||
self.env_group = env_group
|
||||
|
||||
self.data = dict(data)
|
||||
|
||||
launch_args = []
|
||||
if executable is not None:
|
||||
launch_args = executable.as_args()
|
||||
# subprocess.Popen launch arguments (first argument in constructor)
|
||||
self.launch_args = launch_args
|
||||
self.launch_args.extend(application.arguments)
|
||||
if self.data.get("app_args"):
|
||||
self.launch_args.extend(self.data.pop("app_args"))
|
||||
|
||||
# Handle launch environemtns
|
||||
src_env = self.data.pop("env", None)
|
||||
if src_env is not None and not isinstance(src_env, dict):
|
||||
self.log.warning((
|
||||
"Passed `env` kwarg has invalid type: {}. Expected: `dict`."
|
||||
" Using `os.environ` instead."
|
||||
).format(str(type(src_env))))
|
||||
src_env = None
|
||||
|
||||
if src_env is None:
|
||||
src_env = os.environ
|
||||
|
||||
ignored_env = {"QT_API", }
|
||||
env = {
|
||||
key: str(value)
|
||||
for key, value in src_env.items()
|
||||
if key not in ignored_env
|
||||
}
|
||||
# subprocess.Popen keyword arguments
|
||||
self.kwargs = {"env": env}
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
# Detach new process from currently running process on Windows
|
||||
flags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| subprocess.DETACHED_PROCESS
|
||||
)
|
||||
self.kwargs["creationflags"] = flags
|
||||
|
||||
if not sys.stdout:
|
||||
self.kwargs["stdout"] = subprocess.DEVNULL
|
||||
self.kwargs["stderr"] = subprocess.DEVNULL
|
||||
|
||||
self.prelaunch_hooks = None
|
||||
self.postlaunch_hooks = None
|
||||
|
||||
self.process = None
|
||||
self._prelaunch_hooks_executed = False
|
||||
|
||||
@property
|
||||
def env(self):
|
||||
if (
|
||||
"env" not in self.kwargs
|
||||
or self.kwargs["env"] is None
|
||||
):
|
||||
self.kwargs["env"] = {}
|
||||
return self.kwargs["env"]
|
||||
|
||||
@env.setter
|
||||
def env(self, value):
|
||||
if not isinstance(value, dict):
|
||||
raise ValueError(
|
||||
"'env' attribute expect 'dict' object. Got: {}".format(
|
||||
str(type(value))
|
||||
)
|
||||
)
|
||||
self.kwargs["env"] = value
|
||||
|
||||
@property
|
||||
def modules_manager(self):
|
||||
"""
|
||||
Deprecated:
|
||||
Use 'addons_manager' instead.
|
||||
|
||||
"""
|
||||
return self.addons_manager
|
||||
|
||||
def _collect_addons_launch_hook_paths(self):
|
||||
"""Helper to collect application launch hooks from addons.
|
||||
|
||||
Module have to have implemented 'get_launch_hook_paths' method which
|
||||
can expect application as argument or nothing.
|
||||
|
||||
Returns:
|
||||
List[str]: Paths to launch hook directories.
|
||||
"""
|
||||
|
||||
expected_types = (list, tuple, set)
|
||||
|
||||
output = []
|
||||
for module in self.addons_manager.get_enabled_addons():
|
||||
# Skip module if does not have implemented 'get_launch_hook_paths'
|
||||
func = getattr(module, "get_launch_hook_paths", None)
|
||||
if func is None:
|
||||
continue
|
||||
|
||||
func = module.get_launch_hook_paths
|
||||
if hasattr(inspect, "signature"):
|
||||
sig = inspect.signature(func)
|
||||
expect_args = len(sig.parameters) > 0
|
||||
else:
|
||||
expect_args = len(inspect.getargspec(func)[0]) > 0
|
||||
|
||||
# Pass application argument if method expect it.
|
||||
try:
|
||||
if expect_args:
|
||||
hook_paths = func(self.application)
|
||||
else:
|
||||
hook_paths = func()
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to call 'get_launch_hook_paths'",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
if not hook_paths:
|
||||
continue
|
||||
|
||||
# Convert string to list
|
||||
if isinstance(hook_paths, six.string_types):
|
||||
hook_paths = [hook_paths]
|
||||
|
||||
# Skip invalid types
|
||||
if not isinstance(hook_paths, expected_types):
|
||||
self.log.warning((
|
||||
"Result of `get_launch_hook_paths`"
|
||||
" has invalid type {}. Expected {}"
|
||||
).format(type(hook_paths), expected_types))
|
||||
continue
|
||||
|
||||
output.extend(hook_paths)
|
||||
return output
|
||||
|
||||
def paths_to_launch_hooks(self):
|
||||
"""Directory paths where to look for launch hooks."""
|
||||
# This method has potential to be part of application manager (maybe).
|
||||
paths = []
|
||||
|
||||
# TODO load additional studio paths from settings
|
||||
global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks")
|
||||
|
||||
hooks_dirs = [
|
||||
global_hooks_dir
|
||||
]
|
||||
if self.host_name:
|
||||
# If host requires launch hooks and is module then launch hooks
|
||||
# should be collected using 'collect_launch_hook_paths'
|
||||
# - module have to implement 'get_launch_hook_paths'
|
||||
host_module = self.addons_manager.get_host_addon(self.host_name)
|
||||
if not host_module:
|
||||
hooks_dirs.append(os.path.join(
|
||||
AYON_CORE_ROOT, "hosts", self.host_name, "hooks"
|
||||
))
|
||||
|
||||
for path in hooks_dirs:
|
||||
if (
|
||||
os.path.exists(path)
|
||||
and os.path.isdir(path)
|
||||
and path not in paths
|
||||
):
|
||||
paths.append(path)
|
||||
|
||||
# Load modules paths
|
||||
paths.extend(self._collect_addons_launch_hook_paths())
|
||||
|
||||
return paths
|
||||
|
||||
def discover_launch_hooks(self, force=False):
|
||||
"""Load and prepare launch hooks."""
|
||||
if (
|
||||
self.prelaunch_hooks is not None
|
||||
or self.postlaunch_hooks is not None
|
||||
):
|
||||
if not force:
|
||||
self.log.info("Launch hooks were already discovered.")
|
||||
return
|
||||
|
||||
self.prelaunch_hooks.clear()
|
||||
self.postlaunch_hooks.clear()
|
||||
|
||||
self.log.debug("Discovery of launch hooks started.")
|
||||
|
||||
paths = self.paths_to_launch_hooks()
|
||||
self.log.debug("Paths searched for launch hooks:\n{}".format(
|
||||
"\n".join("- {}".format(path) for path in paths)
|
||||
))
|
||||
|
||||
all_classes = {
|
||||
"pre": [],
|
||||
"post": []
|
||||
}
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
self.log.info(
|
||||
"Path to launch hooks does not exist: \"{}\"".format(path)
|
||||
)
|
||||
continue
|
||||
|
||||
modules, _crashed = modules_from_path(path)
|
||||
for _filepath, module in modules:
|
||||
all_classes["pre"].extend(
|
||||
classes_from_module(PreLaunchHook, module)
|
||||
)
|
||||
all_classes["post"].extend(
|
||||
classes_from_module(PostLaunchHook, module)
|
||||
)
|
||||
|
||||
for launch_type, classes in all_classes.items():
|
||||
hooks_with_order = []
|
||||
hooks_without_order = []
|
||||
for klass in classes:
|
||||
try:
|
||||
hook = klass(self)
|
||||
if not hook.is_valid:
|
||||
self.log.debug(
|
||||
"Skipped hook invalid for current launch context: "
|
||||
"{}".format(klass.__name__)
|
||||
)
|
||||
continue
|
||||
|
||||
if inspect.isabstract(hook):
|
||||
self.log.debug("Skipped abstract hook: {}".format(
|
||||
klass.__name__
|
||||
))
|
||||
continue
|
||||
|
||||
# Separate hooks by pre/post class
|
||||
if hook.order is None:
|
||||
hooks_without_order.append(hook)
|
||||
else:
|
||||
hooks_with_order.append(hook)
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Initialization of hook failed: "
|
||||
"{}".format(klass.__name__),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Sort hooks with order by order
|
||||
ordered_hooks = list(sorted(
|
||||
hooks_with_order, key=lambda obj: obj.order
|
||||
))
|
||||
# Extend ordered hooks with hooks without defined order
|
||||
ordered_hooks.extend(hooks_without_order)
|
||||
|
||||
if launch_type == "pre":
|
||||
self.prelaunch_hooks = ordered_hooks
|
||||
else:
|
||||
self.postlaunch_hooks = ordered_hooks
|
||||
|
||||
self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format(
|
||||
len(self.prelaunch_hooks), len(self.postlaunch_hooks)
|
||||
))
|
||||
|
||||
@property
|
||||
def app_name(self):
|
||||
return self.application.name
|
||||
|
||||
@property
|
||||
def host_name(self):
|
||||
return self.application.host_name
|
||||
|
||||
@property
|
||||
def app_group(self):
|
||||
return self.application.group
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return self.application.manager
|
||||
|
||||
def _run_process(self):
|
||||
# Windows and MacOS have easier process start
|
||||
low_platform = platform.system().lower()
|
||||
if low_platform in ("windows", "darwin"):
|
||||
return subprocess.Popen(self.launch_args, **self.kwargs)
|
||||
|
||||
# Linux uses mid process
|
||||
# - it is possible that the mid process executable is not
|
||||
# available for this version of AYON in that case use standard
|
||||
# launch
|
||||
launch_args = get_linux_launcher_args()
|
||||
if launch_args is None:
|
||||
return subprocess.Popen(self.launch_args, **self.kwargs)
|
||||
|
||||
# Prepare data that will be passed to midprocess
|
||||
# - store arguments to a json and pass path to json as last argument
|
||||
# - pass environments to set
|
||||
app_env = self.kwargs.pop("env", {})
|
||||
json_data = {
|
||||
"args": self.launch_args,
|
||||
"env": app_env
|
||||
}
|
||||
if app_env:
|
||||
# Filter environments of subprocess
|
||||
self.kwargs["env"] = {
|
||||
key: value
|
||||
for key, value in os.environ.items()
|
||||
if key in app_env
|
||||
}
|
||||
|
||||
# Create temp file
|
||||
json_temp = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="op_app_args", suffix=".json", delete=False
|
||||
)
|
||||
json_temp.close()
|
||||
json_temp_filpath = json_temp.name
|
||||
with open(json_temp_filpath, "w") as stream:
|
||||
json.dump(json_data, stream)
|
||||
|
||||
launch_args.append(json_temp_filpath)
|
||||
|
||||
# Create mid-process which will launch application
|
||||
process = subprocess.Popen(launch_args, **self.kwargs)
|
||||
# Wait until the process finishes
|
||||
# - This is important! The process would stay in "open" state.
|
||||
process.wait()
|
||||
# Remove the temp file
|
||||
os.remove(json_temp_filpath)
|
||||
# Return process which is already terminated
|
||||
return process
|
||||
|
||||
def run_prelaunch_hooks(self):
|
||||
"""Run prelaunch hooks.
|
||||
|
||||
This method will be executed only once, any future calls will skip
|
||||
the processing.
|
||||
"""
|
||||
|
||||
if self._prelaunch_hooks_executed:
|
||||
self.log.warning("Prelaunch hooks were already executed.")
|
||||
return
|
||||
# Discover launch hooks
|
||||
self.discover_launch_hooks()
|
||||
|
||||
# Execute prelaunch hooks
|
||||
for prelaunch_hook in self.prelaunch_hooks:
|
||||
self.log.debug("Executing prelaunch hook: {}".format(
|
||||
str(prelaunch_hook.__class__.__name__)
|
||||
))
|
||||
prelaunch_hook.execute()
|
||||
self._prelaunch_hooks_executed = True
|
||||
|
||||
def launch(self):
|
||||
"""Collect data for new process and then create it.
|
||||
|
||||
This method must not be executed more than once.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Created process as Popen object.
|
||||
"""
|
||||
if self.process is not None:
|
||||
self.log.warning("Application was already launched.")
|
||||
return
|
||||
|
||||
if not self._prelaunch_hooks_executed:
|
||||
self.run_prelaunch_hooks()
|
||||
|
||||
self.log.debug("All prelaunch hook executed. Starting new process.")
|
||||
|
||||
# Prepare subprocess args
|
||||
args_len_str = ""
|
||||
if isinstance(self.launch_args, str):
|
||||
args = self.launch_args
|
||||
else:
|
||||
args = self.clear_launch_args(self.launch_args)
|
||||
args_len_str = " ({})".format(len(args))
|
||||
self.log.info(
|
||||
"Launching \"{}\" with args{}: {}".format(
|
||||
self.application.full_name, args_len_str, args
|
||||
)
|
||||
)
|
||||
self.launch_args = args
|
||||
|
||||
# Run process
|
||||
self.process = self._run_process()
|
||||
|
||||
# Process post launch hooks
|
||||
for postlaunch_hook in self.postlaunch_hooks:
|
||||
self.log.debug("Executing postlaunch hook: {}".format(
|
||||
str(postlaunch_hook.__class__.__name__)
|
||||
))
|
||||
|
||||
# TODO how to handle errors?
|
||||
# - store to variable to let them accessible?
|
||||
try:
|
||||
postlaunch_hook.execute()
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"After launch procedures were not successful.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
self.log.debug("Launch of {} finished.".format(
|
||||
self.application.full_name
|
||||
))
|
||||
|
||||
return self.process
|
||||
|
||||
@staticmethod
|
||||
def clear_launch_args(args):
|
||||
"""Collect launch arguments to final order.
|
||||
|
||||
Launch argument should be list that may contain another lists this
|
||||
function will upack inner lists and keep ordering.
|
||||
|
||||
```
|
||||
# source
|
||||
[ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]]
|
||||
# result
|
||||
[ arg1, arg2, arg3, arg4, arg5, arg6]
|
||||
|
||||
Args:
|
||||
args (list): Source arguments in list may contain inner lists.
|
||||
|
||||
Return:
|
||||
list: Unpacked arguments.
|
||||
"""
|
||||
if isinstance(args, str):
|
||||
return args
|
||||
all_cleared = False
|
||||
while not all_cleared:
|
||||
all_cleared = True
|
||||
new_args = []
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple, set)):
|
||||
all_cleared = False
|
||||
for _arg in arg:
|
||||
new_args.append(_arg)
|
||||
else:
|
||||
new_args.append(arg)
|
||||
args = new_args
|
||||
|
||||
return args
|
||||
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
Run after global plugin 'CollectHostName' in ayon_core.
|
||||
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
context -> hostName (str)
|
||||
context -> appName (str)
|
||||
context -> appLabel (str)
|
||||
"""
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_applications import ApplicationManager
|
||||
|
||||
|
||||
class CollectAppName(pyblish.api.ContextPlugin):
|
||||
"""Collect avalon host name to context."""
|
||||
|
||||
label = "Collect App Name"
|
||||
order = pyblish.api.CollectorOrder - 0.499999
|
||||
|
||||
def process(self, context):
|
||||
host_name = context.data.get("hostName")
|
||||
app_name = context.data.get("appName")
|
||||
app_label = context.data.get("appLabel")
|
||||
# Don't override value if is already set
|
||||
if host_name and app_name and app_label:
|
||||
return
|
||||
|
||||
# Use AYON_APP_NAME to get full app name
|
||||
if not app_name:
|
||||
app_name = os.environ.get("AYON_APP_NAME")
|
||||
|
||||
# Fill missing values based on app full name
|
||||
if (not host_name or not app_label) and app_name:
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications.get(app_name)
|
||||
if app:
|
||||
if not host_name:
|
||||
host_name = app.host_name
|
||||
if not app_label:
|
||||
app_label = app.full_label
|
||||
|
||||
context.data["hostName"] = host_name
|
||||
context.data["appName"] = app_name
|
||||
context.data["appLabel"] = app_label
|
||||
609
client/ayon_core/addons/applications/ayon_applications/utils.py
Normal file
609
client/ayon_core/addons/applications/ayon_applications/utils.py
Normal file
|
|
@ -0,0 +1,609 @@
|
|||
import os
|
||||
import copy
|
||||
import json
|
||||
import platform
|
||||
import collections
|
||||
|
||||
import six
|
||||
import acre
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import Logger, get_ayon_username
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
from ayon_core.pipeline.workfile import (
|
||||
get_workfile_template_key,
|
||||
get_workdir_with_workdir_data,
|
||||
get_last_workfile,
|
||||
should_use_last_workfile_on_launch,
|
||||
should_open_workfiles_tool_on_launch,
|
||||
)
|
||||
|
||||
from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP
|
||||
from .exceptions import MissingRequiredKey, ApplicationLaunchFailed
|
||||
from .manager import ApplicationManager
|
||||
|
||||
|
||||
def parse_environments(env_data, env_group=None, platform_name=None):
|
||||
"""Parse environment values from settings byt group and platform.
|
||||
|
||||
Data may contain up to 2 hierarchical levels of dictionaries. At the end
|
||||
of the last level must be string or list. List is joined using platform
|
||||
specific joiner (';' for windows and ':' for linux and mac).
|
||||
|
||||
Hierarchical levels can contain keys for subgroups and platform name.
|
||||
Platform specific values must be always last level of dictionary. Platform
|
||||
names are "windows" (MS Windows), "linux" (any linux distribution) and
|
||||
"darwin" (any MacOS distribution).
|
||||
|
||||
Subgroups are helpers added mainly for standard and on farm usage. Farm
|
||||
may require different environments for e.g. licence related values or
|
||||
plugins. Default subgroup is "standard".
|
||||
|
||||
Examples:
|
||||
```
|
||||
{
|
||||
# Unchanged value
|
||||
"ENV_KEY1": "value",
|
||||
# Empty values are kept (unset environment variable)
|
||||
"ENV_KEY2": "",
|
||||
|
||||
# Join list values with ':' or ';'
|
||||
"ENV_KEY3": ["value1", "value2"],
|
||||
|
||||
# Environment groups
|
||||
"ENV_KEY4": {
|
||||
"standard": "DEMO_SERVER_URL",
|
||||
"farm": "LICENCE_SERVER_URL"
|
||||
},
|
||||
|
||||
# Platform specific (and only for windows and mac)
|
||||
"ENV_KEY5": {
|
||||
"windows": "windows value",
|
||||
"darwin": ["value 1", "value 2"]
|
||||
},
|
||||
|
||||
# Environment groups and platform combination
|
||||
"ENV_KEY6": {
|
||||
"farm": "FARM_VALUE",
|
||||
"standard": {
|
||||
"windows": ["value1", "value2"],
|
||||
"linux": "value1",
|
||||
"darwin": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
"""
|
||||
output = {}
|
||||
if not env_data:
|
||||
return output
|
||||
|
||||
if not env_group:
|
||||
env_group = DEFAULT_ENV_SUBGROUP
|
||||
|
||||
if not platform_name:
|
||||
platform_name = platform.system().lower()
|
||||
|
||||
for key, value in env_data.items():
|
||||
if isinstance(value, dict):
|
||||
# Look if any key is platform key
|
||||
# - expect that represents environment group if does not contain
|
||||
# platform keys
|
||||
if not PLATFORM_NAMES.intersection(set(value.keys())):
|
||||
# Skip the key if group is not available
|
||||
if env_group not in value:
|
||||
continue
|
||||
value = value[env_group]
|
||||
|
||||
# Check again if value is dictionary
|
||||
# - this time there should be only platform keys
|
||||
if isinstance(value, dict):
|
||||
value = value.get(platform_name)
|
||||
|
||||
# Check if value is list and join it's values
|
||||
# QUESTION Should empty values be skipped?
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = os.pathsep.join(value)
|
||||
|
||||
# Set key to output if value is string
|
||||
if isinstance(value, six.string_types):
|
||||
output[key] = value
|
||||
return output
|
||||
|
||||
|
||||
class EnvironmentPrepData(dict):
|
||||
"""Helper dictionary for storin temp data during environment prep.
|
||||
|
||||
Args:
|
||||
data (dict): Data must contain required keys.
|
||||
"""
|
||||
required_keys = (
|
||||
"project_entity", "folder_entity", "task_entity", "app", "anatomy"
|
||||
)
|
||||
|
||||
def __init__(self, data):
|
||||
for key in self.required_keys:
|
||||
if key not in data:
|
||||
raise MissingRequiredKey(key)
|
||||
|
||||
if not data.get("log"):
|
||||
data["log"] = Logger.get_logger("EnvironmentPrepData")
|
||||
|
||||
if data.get("env") is None:
|
||||
data["env"] = os.environ.copy()
|
||||
|
||||
project_name = data["project_entity"]["name"]
|
||||
if "project_settings" not in data:
|
||||
data["project_settings"] = get_project_settings(project_name)
|
||||
|
||||
super(EnvironmentPrepData, self).__init__(data)
|
||||
|
||||
|
||||
def get_app_environments_for_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
app_name,
|
||||
env_group=None,
|
||||
launch_type=None,
|
||||
env=None,
|
||||
addons_manager=None
|
||||
):
|
||||
"""Prepare environment variables by context.
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
folder_path (str): Folder path.
|
||||
task_name (str): Name of task.
|
||||
app_name (str): Name of application that is launched and can be found
|
||||
by ApplicationManager.
|
||||
env_group (Optional[str]): Name of environment group. If not passed
|
||||
default group is used.
|
||||
launch_type (Optional[str]): Type for which prelaunch hooks are
|
||||
executed.
|
||||
env (Optional[dict[str, str]]): Initial environment variables.
|
||||
`os.environ` is used when not passed.
|
||||
addons_manager (Optional[AddonsManager]): Initialized modules
|
||||
manager.
|
||||
|
||||
Returns:
|
||||
dict: Environments for passed context and application.
|
||||
"""
|
||||
|
||||
# Prepare app object which can be obtained only from ApplicationManager
|
||||
app_manager = ApplicationManager()
|
||||
context = app_manager.create_launch_context(
|
||||
app_name,
|
||||
project_name=project_name,
|
||||
folder_path=folder_path,
|
||||
task_name=task_name,
|
||||
env_group=env_group,
|
||||
launch_type=launch_type,
|
||||
env=env,
|
||||
addons_manager=addons_manager,
|
||||
modules_manager=addons_manager,
|
||||
)
|
||||
context.run_prelaunch_hooks()
|
||||
return context.env
|
||||
|
||||
|
||||
def _merge_env(env, current_env):
|
||||
"""Modified function(merge) from acre module."""
|
||||
result = current_env.copy()
|
||||
for key, value in env.items():
|
||||
# Keep missing keys by not filling `missing` kwarg
|
||||
value = acre.lib.partial_format(value, data=current_env)
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def _add_python_version_paths(app, env, logger, addons_manager):
|
||||
"""Add vendor packages specific for a Python version."""
|
||||
|
||||
for addon in addons_manager.get_enabled_addons():
|
||||
addon.modify_application_launch_arguments(app, env)
|
||||
|
||||
# Skip adding if host name is not set
|
||||
if not app.host_name:
|
||||
return
|
||||
|
||||
# Add Python 2/3 modules
|
||||
python_vendor_dir = os.path.join(
|
||||
AYON_CORE_ROOT,
|
||||
"vendor",
|
||||
"python"
|
||||
)
|
||||
if app.use_python_2:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_2")
|
||||
else:
|
||||
pythonpath = os.path.join(python_vendor_dir, "python_3")
|
||||
|
||||
if not os.path.exists(pythonpath):
|
||||
return
|
||||
|
||||
logger.debug("Adding Python version specific paths to PYTHONPATH")
|
||||
python_paths = [pythonpath]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
||||
def prepare_app_environments(
|
||||
data, env_group=None, implementation_envs=True, addons_manager=None
|
||||
):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
|
||||
"""
|
||||
app = data["app"]
|
||||
log = data["log"]
|
||||
source_env = data["env"].copy()
|
||||
|
||||
if addons_manager is None:
|
||||
addons_manager = AddonsManager()
|
||||
|
||||
_add_python_version_paths(app, source_env, log, addons_manager)
|
||||
|
||||
# Use environments from local settings
|
||||
filtered_local_envs = {}
|
||||
# NOTE Overrides for environment variables are not implemented in AYON.
|
||||
# project_settings = data["project_settings"]
|
||||
# whitelist_envs = project_settings["general"].get("local_env_white_list")
|
||||
# if whitelist_envs:
|
||||
# local_settings = get_local_settings()
|
||||
# local_envs = local_settings.get("environments") or {}
|
||||
# filtered_local_envs = {
|
||||
# key: value
|
||||
# for key, value in local_envs.items()
|
||||
# if key in whitelist_envs
|
||||
# }
|
||||
|
||||
# Apply local environment variables for already existing values
|
||||
for key, value in filtered_local_envs.items():
|
||||
if key in source_env:
|
||||
source_env[key] = value
|
||||
|
||||
# `app_and_tool_labels` has debug purpose
|
||||
app_and_tool_labels = [app.full_name]
|
||||
# Environments for application
|
||||
environments = [
|
||||
app.group.environment,
|
||||
app.environment
|
||||
]
|
||||
|
||||
folder_entity = data.get("folder_entity")
|
||||
# Add tools environments
|
||||
groups_by_name = {}
|
||||
tool_by_group_name = collections.defaultdict(dict)
|
||||
if folder_entity:
|
||||
# Make sure each tool group can be added only once
|
||||
for key in folder_entity["attrib"].get("tools") or []:
|
||||
tool = app.manager.tools.get(key)
|
||||
if not tool or not tool.is_valid_for_app(app):
|
||||
continue
|
||||
groups_by_name[tool.group.name] = tool.group
|
||||
tool_by_group_name[tool.group.name][tool.name] = tool
|
||||
|
||||
for group_name in sorted(groups_by_name.keys()):
|
||||
group = groups_by_name[group_name]
|
||||
environments.append(group.environment)
|
||||
for tool_name in sorted(tool_by_group_name[group_name].keys()):
|
||||
tool = tool_by_group_name[group_name][tool_name]
|
||||
environments.append(tool.environment)
|
||||
app_and_tool_labels.append(tool.full_name)
|
||||
|
||||
log.debug(
|
||||
"Will add environments for apps and tools: {}".format(
|
||||
", ".join(app_and_tool_labels)
|
||||
)
|
||||
)
|
||||
|
||||
env_values = {}
|
||||
for _env_values in environments:
|
||||
if not _env_values:
|
||||
continue
|
||||
|
||||
# Choose right platform
|
||||
tool_env = parse_environments(_env_values, env_group)
|
||||
|
||||
# Apply local environment variables
|
||||
# - must happen between all values because they may be used during
|
||||
# merge
|
||||
for key, value in filtered_local_envs.items():
|
||||
if key in tool_env:
|
||||
tool_env[key] = value
|
||||
|
||||
# Merge dictionaries
|
||||
env_values = _merge_env(tool_env, env_values)
|
||||
|
||||
merged_env = _merge_env(env_values, source_env)
|
||||
|
||||
loaded_env = acre.compute(merged_env, cleanup=False)
|
||||
|
||||
final_env = None
|
||||
# Add host specific environments
|
||||
if app.host_name and implementation_envs:
|
||||
host_addon = addons_manager.get_host_addon(app.host_name)
|
||||
add_implementation_envs = None
|
||||
if host_addon:
|
||||
add_implementation_envs = getattr(
|
||||
host_addon, "add_implementation_envs", None
|
||||
)
|
||||
if add_implementation_envs:
|
||||
# Function may only modify passed dict without returning value
|
||||
final_env = add_implementation_envs(loaded_env, app)
|
||||
|
||||
if final_env is None:
|
||||
final_env = loaded_env
|
||||
|
||||
keys_to_remove = set(source_env.keys()) - set(final_env.keys())
|
||||
|
||||
# Update env
|
||||
data["env"].update(final_env)
|
||||
for key in keys_to_remove:
|
||||
data["env"].pop(key, None)
|
||||
|
||||
|
||||
def apply_project_environments_value(
|
||||
project_name, env, project_settings=None, env_group=None
|
||||
):
|
||||
"""Apply project specific environments on passed environments.
|
||||
|
||||
The environments are applied on passed `env` argument value so it is not
|
||||
required to apply changes back.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which environments should be
|
||||
received.
|
||||
env (dict): Environment values on which project specific environments
|
||||
will be applied.
|
||||
project_settings (dict): Project settings for passed project name.
|
||||
Optional if project settings are already prepared.
|
||||
|
||||
Returns:
|
||||
dict: Passed env values with applied project environments.
|
||||
|
||||
Raises:
|
||||
KeyError: If project settings do not contain keys for project specific
|
||||
environments.
|
||||
|
||||
"""
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
env_value = project_settings["core"]["project_environments"]
|
||||
if env_value:
|
||||
env_value = json.loads(env_value)
|
||||
parsed_value = parse_environments(env_value, env_group)
|
||||
env.update(acre.compute(
|
||||
_merge_env(parsed_value, env),
|
||||
cleanup=False
|
||||
))
|
||||
return env
|
||||
|
||||
|
||||
def prepare_context_environments(data, env_group=None, addons_manager=None):
|
||||
"""Modify launch environments with context data for launched host.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
|
||||
"""
|
||||
# Context environments
|
||||
log = data["log"]
|
||||
|
||||
project_entity = data["project_entity"]
|
||||
folder_entity = data["folder_entity"]
|
||||
task_entity = data["task_entity"]
|
||||
if not project_entity:
|
||||
log.info(
|
||||
"Skipping context environments preparation."
|
||||
" Launch context does not contain required data."
|
||||
)
|
||||
return
|
||||
|
||||
# Load project specific environments
|
||||
project_name = project_entity["name"]
|
||||
project_settings = get_project_settings(project_name)
|
||||
data["project_settings"] = project_settings
|
||||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AYON_PROJECT_NAME": project_entity["name"],
|
||||
"AYON_APP_NAME": app.full_name
|
||||
}
|
||||
if folder_entity:
|
||||
folder_path = folder_entity["path"]
|
||||
context_env["AYON_FOLDER_PATH"] = folder_path
|
||||
|
||||
if task_entity:
|
||||
context_env["AYON_TASK_NAME"] = task_entity["name"]
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
|
||||
# Apply project specific environments on current env value
|
||||
# - apply them once the context environments are set
|
||||
apply_project_environments_value(
|
||||
project_name, data["env"], project_settings, env_group
|
||||
)
|
||||
|
||||
if not app.is_host:
|
||||
return
|
||||
|
||||
data["env"]["AYON_HOST_NAME"] = app.host_name
|
||||
|
||||
if not folder_entity or not task_entity:
|
||||
# QUESTION replace with log.info and skip workfile discovery?
|
||||
# - technically it should be possible to launch host without context
|
||||
raise ApplicationLaunchFailed(
|
||||
"Host launch require folder and task context."
|
||||
)
|
||||
|
||||
workdir_data = get_template_data(
|
||||
project_entity,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
app.host_name,
|
||||
project_settings
|
||||
)
|
||||
data["workdir_data"] = workdir_data
|
||||
|
||||
anatomy = data["anatomy"]
|
||||
|
||||
task_type = workdir_data["task"]["type"]
|
||||
# Temp solution how to pass task type to `_prepare_last_workfile`
|
||||
data["task_type"] = task_type
|
||||
|
||||
try:
|
||||
workdir = get_workdir_with_workdir_data(
|
||||
workdir_data,
|
||||
anatomy.project_name,
|
||||
anatomy,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Error in anatomy.format: {}".format(str(exc))
|
||||
)
|
||||
|
||||
if not os.path.exists(workdir):
|
||||
log.debug(
|
||||
"Creating workdir folder: \"{}\"".format(workdir)
|
||||
)
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
except Exception as exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
data["env"]["AYON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir, addons_manager)
|
||||
|
||||
|
||||
def _prepare_last_workfile(data, workdir, addons_manager):
|
||||
"""last workfile workflow preparation.
|
||||
|
||||
Function check if should care about last workfile workflow and tries
|
||||
to find the last workfile. Both information are stored to `data` and
|
||||
environments.
|
||||
|
||||
Last workfile is filled always (with version 1) even if any workfile
|
||||
exists yet.
|
||||
|
||||
Args:
|
||||
data (EnvironmentPrepData): Dictionary where result and intermediate
|
||||
result will be stored.
|
||||
workdir (str): Path to folder where workfiles should be stored.
|
||||
|
||||
"""
|
||||
if not addons_manager:
|
||||
addons_manager = AddonsManager()
|
||||
|
||||
log = data["log"]
|
||||
|
||||
_workdir_data = data.get("workdir_data")
|
||||
if not _workdir_data:
|
||||
log.info(
|
||||
"Skipping last workfile preparation."
|
||||
" Key `workdir_data` not filled."
|
||||
)
|
||||
return
|
||||
|
||||
app = data["app"]
|
||||
workdir_data = copy.deepcopy(_workdir_data)
|
||||
project_name = data["project_name"]
|
||||
task_name = data["task_name"]
|
||||
task_type = data["task_type"]
|
||||
|
||||
start_last_workfile = data.get("start_last_workfile")
|
||||
if start_last_workfile is None:
|
||||
start_last_workfile = should_use_last_workfile_on_launch(
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
else:
|
||||
log.info("Opening of last workfile was disabled by user")
|
||||
|
||||
data["start_last_workfile"] = start_last_workfile
|
||||
|
||||
workfile_startup = should_open_workfiles_tool_on_launch(
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["workfile_startup"] = workfile_startup
|
||||
|
||||
# Store boolean as "0"(False) or "1"(True)
|
||||
data["env"]["AVALON_OPEN_LAST_WORKFILE"] = (
|
||||
str(int(bool(start_last_workfile)))
|
||||
)
|
||||
data["env"]["AYON_WORKFILE_TOOL_ON_START"] = (
|
||||
str(int(bool(workfile_startup)))
|
||||
)
|
||||
|
||||
_sub_msg = "" if start_last_workfile else " not"
|
||||
log.debug(
|
||||
"Last workfile should{} be opened on start.".format(_sub_msg)
|
||||
)
|
||||
|
||||
# Last workfile path
|
||||
last_workfile_path = data.get("last_workfile_path") or ""
|
||||
if not last_workfile_path:
|
||||
host_addon = addons_manager.get_host_addon(app.host_name)
|
||||
if host_addon:
|
||||
extensions = host_addon.get_workfile_extensions()
|
||||
else:
|
||||
extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name)
|
||||
|
||||
if extensions:
|
||||
anatomy = data["anatomy"]
|
||||
project_settings = data["project_settings"]
|
||||
task_type = workdir_data["task"]["type"]
|
||||
template_key = get_workfile_template_key(
|
||||
project_name,
|
||||
task_type,
|
||||
app.host_name,
|
||||
project_settings=project_settings
|
||||
)
|
||||
# Find last workfile
|
||||
file_template = anatomy.get_template_item(
|
||||
"work", template_key, "file"
|
||||
).template
|
||||
|
||||
workdir_data.update({
|
||||
"version": 1,
|
||||
"user": get_ayon_username(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
last_workfile_path = get_last_workfile(
|
||||
workdir, file_template, workdir_data, extensions, True
|
||||
)
|
||||
|
||||
if os.path.exists(last_workfile_path):
|
||||
log.debug((
|
||||
"Workfiles for launch context does not exists"
|
||||
" yet but path will be set."
|
||||
))
|
||||
log.debug(
|
||||
"Setting last workfile path: {}".format(last_workfile_path)
|
||||
)
|
||||
|
||||
data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path
|
||||
data["last_workfile_path"] = last_workfile_path
|
||||
|
|
@ -81,7 +81,7 @@ main_cli.set_alias("addon", "module")
|
|||
@main_cli.command()
|
||||
@click.argument("output_json_path")
|
||||
@click.option("--project", help="Project name", default=None)
|
||||
@click.option("--asset", help="Asset name", default=None)
|
||||
@click.option("--asset", help="Folder path", default=None)
|
||||
@click.option("--task", help="Task name", default=None)
|
||||
@click.option("--app", help="Application name", default=None)
|
||||
@click.option(
|
||||
|
|
@ -96,6 +96,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup):
|
|||
environments will be extracted.
|
||||
|
||||
Context options are "project", "asset", "task", "app"
|
||||
|
||||
Deprecated:
|
||||
This function is deprecated and will be removed in future. Please use
|
||||
'addon applications extractenvironments ...' instead.
|
||||
"""
|
||||
Commands.extractenvironments(
|
||||
output_json_path, project, asset, task, app, envgroup
|
||||
|
|
@ -127,7 +131,7 @@ def publish_report_viewer():
|
|||
@main_cli.command()
|
||||
@click.argument("output_path")
|
||||
@click.option("--project", help="Define project context")
|
||||
@click.option("--asset", help="Define asset in project (project must be set)")
|
||||
@click.option("--folder", help="Define folder in project (project must be set)")
|
||||
@click.option(
|
||||
"--strict",
|
||||
is_flag=True,
|
||||
|
|
@ -136,18 +140,18 @@ def publish_report_viewer():
|
|||
def contextselection(
|
||||
output_path,
|
||||
project,
|
||||
asset,
|
||||
folder,
|
||||
strict
|
||||
):
|
||||
"""Show Qt dialog to select context.
|
||||
|
||||
Context is project name, asset name and task name. The result is stored
|
||||
Context is project name, folder path and task name. The result is stored
|
||||
into json file which path is passed in first argument.
|
||||
"""
|
||||
Commands.contextselection(
|
||||
output_path,
|
||||
project,
|
||||
asset,
|
||||
folder,
|
||||
strict
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
"""Implementation of AYON commands."""
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import warnings
|
||||
|
||||
|
||||
class Commands:
|
||||
|
|
@ -57,10 +57,7 @@ class Commands:
|
|||
|
||||
"""
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib.applications import (
|
||||
get_app_environments_for_context,
|
||||
LaunchTypes,
|
||||
)
|
||||
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline import (
|
||||
install_ayon_plugins,
|
||||
|
|
@ -68,7 +65,6 @@ class Commands:
|
|||
)
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
if not isinstance(path, str):
|
||||
|
|
@ -99,15 +95,13 @@ class Commands:
|
|||
for plugin_path in publish_paths:
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
app_full_name = os.getenv("AYON_APP_NAME")
|
||||
if app_full_name:
|
||||
applications_addon = manager.get_enabled_addon("applications")
|
||||
if applications_addon is not None:
|
||||
context = get_global_context()
|
||||
env = get_app_environments_for_context(
|
||||
env = applications_addon.get_farm_publish_environment_variables(
|
||||
context["project_name"],
|
||||
context["folder_path"],
|
||||
context["task_name"],
|
||||
app_full_name,
|
||||
launch_type=LaunchTypes.farm_publish,
|
||||
)
|
||||
os.environ.update(env)
|
||||
|
||||
|
|
@ -149,36 +143,36 @@ class Commands:
|
|||
log.info("Publish finished.")
|
||||
|
||||
@staticmethod
|
||||
def extractenvironments(output_json_path, project, asset, task, app,
|
||||
env_group):
|
||||
def extractenvironments(
|
||||
output_json_path, project, asset, task, app, env_group
|
||||
):
|
||||
"""Produces json file with environment based on project and app.
|
||||
|
||||
Called by Deadline plugin to propagate environment into render jobs.
|
||||
"""
|
||||
|
||||
from ayon_core.lib.applications import (
|
||||
get_app_environments_for_context,
|
||||
LaunchTypes,
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
warnings.warn(
|
||||
(
|
||||
"Command 'extractenvironments' is deprecated and will be"
|
||||
" removed in future. Please use "
|
||||
"'addon applications extractenvironments ...' instead."
|
||||
),
|
||||
DeprecationWarning
|
||||
)
|
||||
|
||||
if all((project, asset, task, app)):
|
||||
env = get_app_environments_for_context(
|
||||
project,
|
||||
asset,
|
||||
task,
|
||||
app,
|
||||
env_group=env_group,
|
||||
launch_type=LaunchTypes.farm_render
|
||||
addons_manager = AddonsManager()
|
||||
applications_addon = addons_manager.get_enabled_addon("applications")
|
||||
if applications_addon is None:
|
||||
raise RuntimeError(
|
||||
"Applications addon is not available or enabled."
|
||||
)
|
||||
else:
|
||||
env = os.environ.copy()
|
||||
|
||||
output_dir = os.path.dirname(output_json_path)
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
with open(output_json_path, "w") as file_stream:
|
||||
json.dump(env, file_stream, indent=4)
|
||||
# Please ignore the fact this is using private method
|
||||
applications_addon._cli_extract_environments(
|
||||
output_json_path, project, asset, task, app, env_group
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def contextselection(output_path, project_name, folder_path, strict):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import shutil
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.pipeline.workfile import (
|
||||
get_custom_workfile_template,
|
||||
get_custom_workfile_template_by_string_context
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.pipeline.workfile import create_workdir_extra_folders
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from ayon_api import get_project, get_folder_by_path, get_task_by_name
|
||||
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
from ayon_applications import PreLaunchHook
|
||||
from ayon_applications.utils import (
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchWithTerminal(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import subprocess
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchNewConsoleApps(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook
|
||||
from ayon_applications import PreLaunchHook
|
||||
|
||||
from ayon_core.pipeline.colorspace import get_imageio_config
|
||||
from ayon_core.pipeline.template_data import get_template_data_with_names
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from ayon_core.lib import (
|
|||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.aftereffects import get_launch_script_path
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pathlib import Path
|
||||
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class AddPythonScriptToLaunchArgs(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import re
|
||||
import subprocess
|
||||
from platform import system
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InstallPySideToBlender(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import subprocess
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class BlenderConsoleWindows(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import shutil
|
|||
import winreg
|
||||
import subprocess
|
||||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.celaction import CELACTION_ROOT_DIR
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from ayon_core.lib import (
|
|||
get_ayon_username,
|
||||
run_subprocess,
|
||||
)
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts import flame as opflame
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from ayon_core.lib import Logger
|
|||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._project = None
|
||||
|
|
@ -57,7 +57,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True,
|
|||
def set_current_context_framerange(folder_entity=None):
|
||||
"""Set Comp's frame range based on current folder."""
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
folder_entity = get_current_folder_entity(
|
||||
fields={"attrib.frameStart",
|
||||
"attrib.frameEnd",
|
||||
"attrib.handleStart",
|
||||
|
|
@ -76,7 +76,7 @@ def set_current_context_framerange(folder_entity=None):
|
|||
def set_current_context_fps(folder_entity=None):
|
||||
"""Set Comp's frame rate (FPS) to based on current asset"""
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(fields={"attrib.fps"})
|
||||
folder_entity = get_current_folder_entity(fields={"attrib.fps"})
|
||||
|
||||
fps = float(folder_entity["attrib"].get("fps", 24.0))
|
||||
comp = get_current_comp()
|
||||
|
|
@ -88,7 +88,7 @@ def set_current_context_fps(folder_entity=None):
|
|||
def set_current_context_resolution(folder_entity=None):
|
||||
"""Set Comp's resolution width x height default based on current folder"""
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
folder_entity = get_current_folder_entity(
|
||||
fields={"attrib.resolutionWidth", "attrib.resolutionHeight"})
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
|
@ -124,7 +124,7 @@ def validate_comp_prefs(comp=None, force_repair=False):
|
|||
"attrib.resolutionHeight",
|
||||
"attrib.pixelAspect",
|
||||
}
|
||||
folder_entity = get_current_project_folder(fields=fields)
|
||||
folder_entity = get_current_folder_entity(fields=fields)
|
||||
folder_path = folder_entity["path"]
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
|
|
@ -389,7 +389,7 @@ def prompt_reset_context():
|
|||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
if options["frame_range"]:
|
||||
set_current_context_framerange(folder_entity)
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
ABOUT_TO_SAVE = False
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class FusionLogHandler(logging.Handler):
|
||||
|
|
@ -176,15 +176,15 @@ def on_save(event):
|
|||
validate_comp_prefs(comp)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = False
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global ABOUT_TO_SAVE
|
||||
print(f"Task changed: {ABOUT_TO_SAVE}")
|
||||
global _about_to_save
|
||||
print(f"Task changed: {_about_to_save}")
|
||||
# TODO: Only do this if not headless
|
||||
if ABOUT_TO_SAVE:
|
||||
if _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
prompt_reset_context()
|
||||
|
||||
|
|
@ -228,7 +228,7 @@ def before_workfile_save(event):
|
|||
# have been shut down, and restarted - which will restart it to the
|
||||
# environment Fusion started with; not necessarily where the artist
|
||||
# is currently working.
|
||||
# The `ABOUT_TO_SAVE` var is used to detect context changes when
|
||||
# The `_about_to_save` var is used to detect context changes when
|
||||
# saving into another asset. If we keep it False it will be ignored
|
||||
# as context change. As such, before we change tasks we will only
|
||||
# consider it the current filepath is within the currently known
|
||||
|
|
@ -239,8 +239,8 @@ def before_workfile_save(event):
|
|||
filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
workdir = os.environ.get("AYON_WORKDIR")
|
||||
if Path(workdir) in Path(filepath).parents:
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = True
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def ls():
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from ayon_core.hosts.fusion import (
|
|||
FUSION_VERSIONS_DICT,
|
||||
get_fusion_version,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
from ayon_applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
ApplicationLaunchFailed,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import (
|
||||
from ayon_applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
ApplicationLaunchFailed,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import subprocess
|
|||
import platform
|
||||
import uuid
|
||||
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InstallPySideToFusion(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from ayon_core.pipeline import (
|
|||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.pipeline.load import get_outdated_containers
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
|
||||
from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT
|
||||
import ayon_core.hosts.harmony.api as harmony
|
||||
|
|
@ -50,7 +50,7 @@ def get_current_context_settings():
|
|||
|
||||
"""
|
||||
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
fps = folder_attributes.get("fps")
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from ayon_core.lib import (
|
|||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.harmony import get_launch_script_path
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -248,8 +248,12 @@ def get_track_items(
|
|||
# collect all available active sequence track items
|
||||
if not return_list:
|
||||
sequence = get_current_sequence(name=sequence_name)
|
||||
# get all available tracks from sequence
|
||||
tracks = list(sequence.audioTracks()) + list(sequence.videoTracks())
|
||||
tracks = []
|
||||
if sequence is not None:
|
||||
# get all available tracks from sequence
|
||||
tracks.extend(sequence.audioTracks())
|
||||
tracks.extend(sequence.videoTracks())
|
||||
|
||||
# loop all tracks
|
||||
for track in tracks:
|
||||
if check_locked and track.isLocked():
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
asset, asset_name = self._get_folder_data(tag_data)
|
||||
folder_path, folder_name = self._get_folder_data(tag_data)
|
||||
|
||||
product_name = tag_data.get("productName")
|
||||
if product_name is None:
|
||||
|
|
@ -98,12 +98,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
families = [str(f) for f in tag_data["families"]]
|
||||
|
||||
# form label
|
||||
label = "{} -".format(asset)
|
||||
if asset_name != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {}".format(product_name)
|
||||
|
||||
# TODO: remove backward compatibility
|
||||
product_name = tag_data.get("productName")
|
||||
if product_name is None:
|
||||
|
|
@ -113,7 +107,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
# backward compatibility: product_name should not be missing
|
||||
if not product_name:
|
||||
self.log.error(
|
||||
"Product name is not defined for: {}".format(asset))
|
||||
"Product name is not defined for: {}".format(folder_path))
|
||||
|
||||
# TODO: remove backward compatibility
|
||||
product_type = tag_data.get("productType")
|
||||
|
|
@ -124,15 +118,21 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
# backward compatibility: product_type should not be missing
|
||||
if not product_type:
|
||||
self.log.error(
|
||||
"Product type is not defined for: {}".format(asset))
|
||||
"Product type is not defined for: {}".format(folder_path))
|
||||
|
||||
# form label
|
||||
label = "{} -".format(folder_path)
|
||||
if folder_name != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {}".format(product_name)
|
||||
|
||||
data.update({
|
||||
"name": "{}_{}".format(asset, product_name),
|
||||
"name": "{}_{}".format(folder_path, product_name),
|
||||
"label": label,
|
||||
"folderPath": asset,
|
||||
"asset_name": asset_name,
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
"folderPath": folder_path,
|
||||
"asset_name": folder_name,
|
||||
"item": track_item,
|
||||
"families": families,
|
||||
"publish": tag_data["publish"],
|
||||
|
|
@ -222,19 +222,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not hierarchy_data:
|
||||
return
|
||||
|
||||
asset = data["folderPath"]
|
||||
asset_name = data["asset_name"]
|
||||
folder_path = data["folderPath"]
|
||||
folder_name = data["asset_name"]
|
||||
|
||||
product_type = "shot"
|
||||
|
||||
# form label
|
||||
label = "{} -".format(asset)
|
||||
if asset_name != clip_name:
|
||||
label = "{} -".format(folder_path)
|
||||
if folder_name != clip_name:
|
||||
label += " ({}) ".format(clip_name)
|
||||
label += " {}".format(product_name)
|
||||
|
||||
data.update({
|
||||
"name": "{}_{}".format(asset, product_name),
|
||||
"name": "{}_{}".format(folder_path, product_name),
|
||||
"label": label,
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
|
|
@ -281,19 +281,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not self.test_any_audio(item):
|
||||
return
|
||||
|
||||
asset = data["folderPath"]
|
||||
folder_path = data["folderPath"]
|
||||
asset_name = data["asset_name"]
|
||||
|
||||
product_type = "audio"
|
||||
|
||||
# form label
|
||||
label = "{} -".format(asset)
|
||||
label = "{} -".format(folder_path)
|
||||
if asset_name != clip_name:
|
||||
label += " ({}) ".format(clip_name)
|
||||
label += " {}".format(product_name)
|
||||
|
||||
data.update({
|
||||
"name": "{}_{}".format(asset, product_name),
|
||||
"name": "{}_{}".format(folder_path, subset),
|
||||
"label": label,
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.491
|
||||
|
||||
def process(self, context):
|
||||
asset = context.data["folderPath"]
|
||||
asset_name = asset.split("/")[-1]
|
||||
folder_path = context.data["folderPath"]
|
||||
folder_name = folder_path.split("/")[-1]
|
||||
|
||||
active_timeline = hiero.ui.activeSequence()
|
||||
project = active_timeline.project()
|
||||
|
|
@ -62,12 +62,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
product_type = "workfile"
|
||||
instance_data = {
|
||||
"label": "{} - {}Main".format(
|
||||
asset, product_type),
|
||||
"name": "{}_{}".format(asset_name, product_type),
|
||||
"folderPath": context.data["folderPath"],
|
||||
folder_path, product_type),
|
||||
"name": "{}_{}".format(folder_name, product_type),
|
||||
"folderPath": folder_path,
|
||||
# TODO use 'get_product_name'
|
||||
"productName": "{}{}Main".format(
|
||||
asset_name, product_type.capitalize()
|
||||
folder_name, product_type.capitalize()
|
||||
),
|
||||
"item": project,
|
||||
"productType": product_type,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ from ayon_core.pipeline import (
|
|||
)
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.tools.utils import PopupUpdateKeys, SimplePopup
|
||||
from ayon_core.tools.utils.host_tools import get_tool_by_name
|
||||
|
||||
|
|
@ -39,7 +39,7 @@ def get_folder_fps(folder_entity=None):
|
|||
"""Return current folder fps."""
|
||||
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(fields=["attrib.fps"])
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
return folder_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
|
|
@ -243,7 +243,10 @@ def render_rop(ropnode):
|
|||
try:
|
||||
ropnode.render(verbose=verbose,
|
||||
# Allow Deadline to capture completion percentage
|
||||
output_progress=verbose)
|
||||
output_progress=verbose,
|
||||
# Render only this node
|
||||
# (do not render any of its dependencies)
|
||||
ignore_inputs=True)
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
|
|
@ -738,7 +741,7 @@ def set_camera_resolution(camera, folder_entity=None):
|
|||
"""Apply resolution to camera from folder entity of the publish"""
|
||||
|
||||
if not folder_entity:
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
|
||||
resolution = get_resolution_from_folder(folder_entity)
|
||||
|
||||
|
|
@ -948,7 +951,7 @@ def self_publish():
|
|||
|
||||
Firstly, it gets the node and its dependencies.
|
||||
Then, it deactivates all other ROPs
|
||||
And finaly, it triggers the publishing action.
|
||||
And finally, it triggers the publishing action.
|
||||
"""
|
||||
|
||||
result, comment = hou.ui.readInput(
|
||||
|
|
@ -1076,4 +1079,4 @@ def prompt_reset_context():
|
|||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
dialog.deleteLater()
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
ABOUT_TO_SAVE = False
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
|
@ -292,8 +292,8 @@ def ls():
|
|||
|
||||
|
||||
def before_workfile_save(event):
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = True
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def before_save():
|
||||
|
|
@ -307,18 +307,14 @@ def on_save():
|
|||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = False
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global ABOUT_TO_SAVE
|
||||
if not IS_HEADLESS and ABOUT_TO_SAVE:
|
||||
global _about_to_save
|
||||
if not IS_HEADLESS and _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from ayon_core.hosts.houdini.api.lib import (
|
|||
get_camera_from_container,
|
||||
set_camera_resolution
|
||||
)
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
|
||||
|
||||
class SetCameraResolution(InventoryAction):
|
||||
|
|
@ -19,7 +19,7 @@ class SetCameraResolution(InventoryAction):
|
|||
)
|
||||
|
||||
def process(self, containers):
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
for container in containers:
|
||||
node = container["node"]
|
||||
camera = get_camera_from_container(node)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import load
|
||||
from openpype.hosts.houdini.api import pipeline
|
||||
from ayon_core.hosts.houdini.api import pipeline
|
||||
|
||||
import hou
|
||||
|
||||
|
|
@ -103,8 +103,8 @@ class FilePathLoader(load.LoaderPlugin):
|
|||
parm)
|
||||
node.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import ayon_api
|
|||
from ayon_core.pipeline import get_current_project_name, colorspace
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline.context_tools import (
|
||||
get_current_project_folder,
|
||||
get_current_folder_entity,
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -222,7 +222,7 @@ def reset_scene_resolution():
|
|||
contains any information regarding scene resolution.
|
||||
"""
|
||||
|
||||
folder_entity = get_current_project_folder(
|
||||
folder_entity = get_current_folder_entity(
|
||||
fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}
|
||||
)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
|
@ -243,7 +243,7 @@ def get_frame_range(folder_entiy=None) -> Union[Dict[str, Any], None]:
|
|||
"""
|
||||
# Set frame start/end
|
||||
if folder_entiy is None:
|
||||
folder_entiy = get_current_project_folder()
|
||||
folder_entiy = get_current_folder_entity()
|
||||
|
||||
folder_attributes = folder_entiy["attrib"]
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from pymxs import runtime as rt
|
|||
from ayon_core.lib import Logger
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
|
||||
from ayon_core.hosts.max.api.lib import (
|
||||
set_render_frame_range,
|
||||
|
|
@ -57,7 +57,7 @@ class RenderSettings(object):
|
|||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
# hard-coded, should be customized in the setting
|
||||
folder_attributes = get_current_project_folder()["attrib"]
|
||||
folder_attributes = get_current_folder_entity()["attrib"]
|
||||
|
||||
# get project resolution
|
||||
width = folder_attributes.get("resolutionWidth")
|
||||
|
|
|
|||
|
|
@ -240,10 +240,10 @@ def get_previous_loaded_object(container: str):
|
|||
node_list(list): list of nodes which are previously loaded
|
||||
"""
|
||||
node_list = []
|
||||
sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list")
|
||||
for obj in rt.Objects:
|
||||
if str(obj) in sel_list:
|
||||
node_list.append(obj)
|
||||
node_transform_monitor_list = rt.getProperty(
|
||||
container.modifiers[0].openPypeData, "all_handles")
|
||||
for node_transform_monitor in node_transform_monitor_list:
|
||||
node_list.append(node_transform_monitor.node)
|
||||
return node_list
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
"""Pre-launch to force 3ds max startup script."""
|
||||
import os
|
||||
from ayon_core.hosts.max import MAX_HOST_DIR
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch hook to inject python environment."""
|
||||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InjectPythonPath(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,10 @@ from __future__ import absolute_import
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
from ayon_core.pipeline.publish import (
|
||||
get_errored_instances_from_context,
|
||||
get_errored_plugins_from_context
|
||||
)
|
||||
|
||||
|
||||
class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
||||
|
|
@ -112,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action):
|
|||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context,
|
||||
plugin=plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
if issubclass(plugin, pyblish.api.ContextPlugin):
|
||||
errored_plugins = get_errored_plugins_from_context(context)
|
||||
if plugin in errored_plugins:
|
||||
invalid = plugin.get_invalid(context)
|
||||
else:
|
||||
errored_instances = get_errored_instances_from_context(
|
||||
context, plugin=plugin
|
||||
)
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ from ayon_core.pipeline import (
|
|||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.lib import NumberDef
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
from ayon_core.lib.profiles_filtering import filter_profiles
|
||||
|
||||
|
|
@ -1876,18 +1876,9 @@ def list_looks(project_name, folder_id):
|
|||
list[dict[str, Any]]: List of look products.
|
||||
|
||||
"""
|
||||
# # get all products with look leading in
|
||||
# the name associated with the asset
|
||||
# TODO this should probably look for product type 'look' instead of
|
||||
# checking product name that can not start with product type
|
||||
product_entities = ayon_api.get_products(
|
||||
project_name, folder_ids=[folder_id]
|
||||
)
|
||||
return [
|
||||
product_entity
|
||||
for product_entity in product_entities
|
||||
if product_entity["name"].startswith("look")
|
||||
]
|
||||
return list(ayon_api.get_products(
|
||||
project_name, folder_ids=[folder_id], product_types={"look"}
|
||||
))
|
||||
|
||||
|
||||
def assign_look_by_version(nodes, version_id):
|
||||
|
|
@ -1906,12 +1897,15 @@ def assign_look_by_version(nodes, version_id):
|
|||
project_name = get_current_project_name()
|
||||
|
||||
# Get representations of shader file and relationships
|
||||
look_representation = ayon_api.get_representation_by_name(
|
||||
project_name, "ma", version_id
|
||||
)
|
||||
json_representation = ayon_api.get_representation_by_name(
|
||||
project_name, "json", version_id
|
||||
)
|
||||
representations = list(ayon_api.get_representations(
|
||||
project_name=project_name,
|
||||
representation_names={"ma", "json"},
|
||||
version_ids=[version_id]
|
||||
))
|
||||
look_representation = next(
|
||||
repre for repre in representations if repre["name"] == "ma")
|
||||
json_representation = next(
|
||||
repre for repre in representations if repre["name"] == "json")
|
||||
|
||||
# See if representation is already loaded, if so reuse it.
|
||||
host = registered_host()
|
||||
|
|
@ -1948,7 +1942,7 @@ def assign_look_by_version(nodes, version_id):
|
|||
apply_shaders(relationships, shader_nodes, nodes)
|
||||
|
||||
|
||||
def assign_look(nodes, product_name="lookDefault"):
|
||||
def assign_look(nodes, product_name="lookMain"):
|
||||
"""Assigns a look to a node.
|
||||
|
||||
Optimizes the nodes by grouping by folder id and finding
|
||||
|
|
@ -1981,14 +1975,10 @@ def assign_look(nodes, product_name="lookDefault"):
|
|||
product_entity["id"]
|
||||
for product_entity in product_entities_by_folder_id.values()
|
||||
}
|
||||
last_version_entities = ayon_api.get_last_versions(
|
||||
last_version_entities_by_product_id = ayon_api.get_last_versions(
|
||||
project_name,
|
||||
product_ids
|
||||
)
|
||||
last_version_entities_by_product_id = {
|
||||
last_version_entity["productId"]: last_version_entity
|
||||
for last_version_entity in last_version_entities
|
||||
}
|
||||
|
||||
for folder_id, asset_nodes in grouped.items():
|
||||
product_entity = product_entities_by_folder_id.get(folder_id)
|
||||
|
|
@ -2444,12 +2434,10 @@ def set_scene_fps(fps, update=True):
|
|||
cmds.currentUnit(time=unit, updateAnimation=update)
|
||||
|
||||
# Set time slider data back to previous state
|
||||
cmds.playbackOptions(edit=True, minTime=start_frame)
|
||||
cmds.playbackOptions(edit=True, maxTime=end_frame)
|
||||
|
||||
# Set animation data
|
||||
cmds.playbackOptions(edit=True, animationStartTime=animation_start)
|
||||
cmds.playbackOptions(edit=True, animationEndTime=animation_end)
|
||||
cmds.playbackOptions(minTime=start_frame,
|
||||
maxTime=end_frame,
|
||||
animationStartTime=animation_start,
|
||||
animationEndTime=animation_end)
|
||||
|
||||
cmds.currentTime(current_frame, edit=True, update=True)
|
||||
|
||||
|
|
@ -2525,7 +2513,7 @@ def get_fps_for_current_context():
|
|||
|
||||
|
||||
def get_frame_range(include_animation_range=False):
|
||||
"""Get the current folder frame range and handles.
|
||||
"""Get the current task frame range and handles.
|
||||
|
||||
Args:
|
||||
include_animation_range (bool, optional): Whether to include
|
||||
|
|
@ -2533,25 +2521,34 @@ def get_frame_range(include_animation_range=False):
|
|||
range of the timeline. It is excluded by default.
|
||||
|
||||
Returns:
|
||||
dict: Folder's expected frame range values.
|
||||
dict: Task's expected frame range values.
|
||||
|
||||
"""
|
||||
|
||||
# Set frame start/end
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
task_name = get_current_task_name()
|
||||
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name,
|
||||
folder_path,
|
||||
fields={"id"})
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
|
||||
task_attributes = task_entity["attrib"]
|
||||
|
||||
frame_start = task_attributes.get("frameStart")
|
||||
frame_end = task_attributes.get("frameEnd")
|
||||
|
||||
if frame_start is None or frame_end is None:
|
||||
cmds.warning("No edit information found for '{}'".format(folder_path))
|
||||
return
|
||||
|
||||
handle_start = folder_attributes.get("handleStart") or 0
|
||||
handle_end = folder_attributes.get("handleEnd") or 0
|
||||
handle_start = task_attributes.get("handleStart") or 0
|
||||
handle_end = task_attributes.get("handleEnd") or 0
|
||||
|
||||
frame_range = {
|
||||
"frameStart": frame_start,
|
||||
|
|
@ -2565,14 +2562,10 @@ def get_frame_range(include_animation_range=False):
|
|||
# Some usages of this function use the full dictionary to define
|
||||
# instance attributes for which we want to exclude the animation
|
||||
# keys. That is why these are excluded by default.
|
||||
task_name = get_current_task_name()
|
||||
|
||||
settings = get_project_settings(project_name)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
task_type = None
|
||||
if task_entity:
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
include_handles_settings = settings["maya"]["include_handles"]
|
||||
|
||||
|
|
@ -2641,7 +2634,7 @@ def reset_scene_resolution():
|
|||
None
|
||||
"""
|
||||
|
||||
folder_attributes = get_current_project_folder()["attrib"]
|
||||
folder_attributes = get_current_folder_entity()["attrib"]
|
||||
|
||||
# Set resolution
|
||||
width = folder_attributes.get("resolutionWidth", 1920)
|
||||
|
|
@ -3250,7 +3243,7 @@ def update_content_on_context_change():
|
|||
This will update scene content to match new folder on context change
|
||||
"""
|
||||
scene_sets = cmds.listSets(allSets=True)
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
new_folder_path = folder_entity["path"]
|
||||
for s in scene_sets:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from ayon_core.lib import Logger
|
|||
from ayon_core.settings import get_project_settings
|
||||
|
||||
from ayon_core.pipeline import CreatorError, get_current_project_name
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.hosts.maya.api.lib import reset_frame_range
|
||||
|
||||
|
||||
|
|
@ -77,7 +77,7 @@ class RenderSettings(object):
|
|||
renderer = cmds.getAttr(
|
||||
'defaultRenderGlobals.currentRenderer').lower()
|
||||
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
# project_settings/maya/create/CreateRender/aov_separator
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
|||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
ABOUT_TO_SAVE = False
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
|
@ -585,8 +585,8 @@ def on_save():
|
|||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = False
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_open():
|
||||
|
|
@ -657,8 +657,8 @@ def on_task_changed():
|
|||
lib.set_context_settings()
|
||||
lib.update_content_on_context_change()
|
||||
|
||||
global ABOUT_TO_SAVE
|
||||
if not lib.IS_HEADLESS and ABOUT_TO_SAVE:
|
||||
global _about_to_save
|
||||
if not lib.IS_HEADLESS and _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
|
@ -676,8 +676,8 @@ def before_workfile_save(event):
|
|||
if workdir_path:
|
||||
create_workspace_mel(workdir_path, project_name)
|
||||
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = True
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def workfile_save_before_xgen(event):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class MayaPreAutoLoadPlugins(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class MayaPreOpenWorkfilePostInitialization(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,24 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect render data.
|
||||
|
||||
This collector will go through render layers in maya and prepare all data
|
||||
needed to create instances and their representations for submission and
|
||||
publishing on farm.
|
||||
This collector will go through renderlayer instances and prepare all data
|
||||
needed to detect the expected rendered files for a layer, with resolution,
|
||||
frame ranges and collects the data needed for publishing on the farm.
|
||||
|
||||
Requires:
|
||||
instance -> families
|
||||
instance -> setMembers
|
||||
instance -> folderPath
|
||||
|
||||
context -> currentFile
|
||||
context -> workspaceDir
|
||||
context -> user
|
||||
|
||||
Optional:
|
||||
|
||||
Provides:
|
||||
instance -> label
|
||||
instance -> productName
|
||||
instance -> subset
|
||||
instance -> attachTo
|
||||
instance -> setMembers
|
||||
instance -> publish
|
||||
|
|
@ -26,6 +21,8 @@ Provides:
|
|||
instance -> frameEnd
|
||||
instance -> byFrameStep
|
||||
instance -> renderer
|
||||
instance -> family
|
||||
instance -> asset
|
||||
instance -> time
|
||||
instance -> author
|
||||
instance -> source
|
||||
|
|
@ -71,8 +68,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
|
||||
# TODO: Re-add force enable of workfile instance?
|
||||
# TODO: Re-add legacy layer support with LAYER_ prefix but in Creator
|
||||
# TODO: Set and collect active state of RenderLayer in Creator using
|
||||
# renderlayer.isRenderable()
|
||||
context = instance.context
|
||||
|
||||
layer = instance.data["transientData"]["layer"]
|
||||
|
|
@ -112,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
except UnsupportedRendererException as exc:
|
||||
raise KnownPublishError(exc)
|
||||
render_products = layer_render_products.layer_data.products
|
||||
assert render_products, "no render products generated"
|
||||
if not render_products:
|
||||
self.log.error(
|
||||
"No render products generated for '%s'. You might not have "
|
||||
"any render camera in the renderlayer or render end frame is "
|
||||
"lower than start frame.",
|
||||
instance.name
|
||||
)
|
||||
expected_files = []
|
||||
multipart = False
|
||||
for product in render_products:
|
||||
|
|
@ -130,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
})
|
||||
|
||||
has_cameras = any(product.camera for product in render_products)
|
||||
assert has_cameras, "No render cameras found."
|
||||
|
||||
self.log.debug("multipart: {}".format(
|
||||
multipart))
|
||||
assert expected_files, "no file names were generated, this is a bug"
|
||||
self.log.debug(
|
||||
"expected files: {}".format(
|
||||
json.dumps(expected_files, indent=4, sort_keys=True)
|
||||
if render_products and not has_cameras:
|
||||
self.log.error(
|
||||
"No render cameras found for: %s",
|
||||
instance
|
||||
)
|
||||
)
|
||||
if not expected_files:
|
||||
self.log.warning(
|
||||
"No file names were generated, this is a bug.")
|
||||
|
||||
for render_product in render_products:
|
||||
self.log.debug(render_product)
|
||||
self.log.debug("multipart: {}".format(multipart))
|
||||
self.log.debug("expected files: {}".format(
|
||||
json.dumps(expected_files, indent=4, sort_keys=True)
|
||||
))
|
||||
|
||||
# if we want to attach render to product, check if we have AOV's
|
||||
# in expectedFiles. If so, raise error as we cannot attach AOV
|
||||
|
|
@ -151,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
# append full path
|
||||
aov_dict = {}
|
||||
image_directory = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
publish_meta_path = None
|
||||
publish_meta_path = "NOT-SET"
|
||||
aov_dict = {}
|
||||
for aov in expected_files:
|
||||
full_paths = []
|
||||
aov_first_key = list(aov.keys())[0]
|
||||
|
|
@ -169,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
publish_meta_path = os.path.dirname(full_path)
|
||||
aov_dict[aov_first_key] = full_paths
|
||||
full_exp_files = [aov_dict]
|
||||
self.log.debug(full_exp_files)
|
||||
|
||||
if publish_meta_path is None:
|
||||
raise KnownPublishError("Unable to detect any expected output "
|
||||
"images for: {}. Make sure you have a "
|
||||
"renderable camera and a valid frame "
|
||||
"range set for your renderlayer."
|
||||
"".format(instance.name))
|
||||
|
||||
frame_start_render = int(self.get_render_attribute(
|
||||
"startFrame", layer=layer_name))
|
||||
|
|
@ -222,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
common_publish_meta_path = "/" + common_publish_meta_path
|
||||
|
||||
self.log.debug(
|
||||
"Publish meta path: {}".format(common_publish_meta_path))
|
||||
"Publish meta path: {}".format(common_publish_meta_path)
|
||||
)
|
||||
|
||||
# Get layer specific settings, might be overrides
|
||||
colorspace_data = lib.get_color_management_preferences()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from maya import cmds
|
|||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractGPUCache(publish.Extractor):
|
||||
class ExtractGPUCache(publish.Extractor,
|
||||
publish.OptionalPyblishPluginMixin):
|
||||
"""Extract the content of the instance to a GPU cache file."""
|
||||
|
||||
label = "GPU Cache"
|
||||
|
|
@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor):
|
|||
useBaseTessellation = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
|
|||
|
|
@ -47,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin,
|
|||
shape, destination=True, type="shadingEngine"
|
||||
) or []
|
||||
for shading_engine in shading_engines:
|
||||
name = (
|
||||
cmds.listConnections(shading_engine + ".surfaceShader")[0]
|
||||
+ "SG"
|
||||
materials = cmds.listConnections(
|
||||
shading_engine + ".surfaceShader",
|
||||
source=True, destination=False
|
||||
)
|
||||
if not materials:
|
||||
cls.log.warning(
|
||||
"Shading engine '{}' has no material connected to its "
|
||||
".surfaceShader attribute.".format(shading_engine))
|
||||
continue
|
||||
|
||||
material = materials[0] # there should only ever be one input
|
||||
name = material + "SG"
|
||||
if shading_engine != name:
|
||||
invalid.append(shading_engine)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import maya.cmds as cmds
|
|||
import pyblish.api
|
||||
|
||||
import ayon_core.hosts.maya.api.lib as mayalib
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.publish import (
|
||||
RepairContextAction,
|
||||
ValidateSceneOrder,
|
||||
|
|
@ -131,6 +130,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin,
|
|||
cls.log.debug(current_linear)
|
||||
|
||||
cls.log.info("Setting time unit to match project")
|
||||
# TODO replace query with using 'context.data["folderEntity"]'
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = context.data["folderEntity"]
|
||||
mayalib.set_scene_fps(folder_entity["attrib"]["fps"])
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import re
|
||||
import inspect
|
||||
|
||||
import pyblish.api
|
||||
from maya import cmds
|
||||
|
|
@ -36,7 +37,10 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Invalid cameras for render.")
|
||||
raise PublishValidationError(
|
||||
"Invalid render cameras.",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -51,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin,
|
|||
RenderSettings.get_image_prefix_attr(renderer)
|
||||
)
|
||||
|
||||
|
||||
renderlayer = instance.data["renderlayer"]
|
||||
if len(cameras) > 1:
|
||||
if re.search(cls.R_CAMERA_TOKEN, file_prefix):
|
||||
# if there is <Camera> token in prefix and we have more then
|
||||
# 1 camera, all is ok.
|
||||
return
|
||||
cls.log.error("Multiple renderable cameras found for %s: %s " %
|
||||
(instance.data["setMembers"], cameras))
|
||||
return [instance.data["setMembers"]] + cameras
|
||||
cls.log.error(
|
||||
"Multiple renderable cameras found for %s: %s ",
|
||||
renderlayer, ", ".join(cameras))
|
||||
return [renderlayer] + cameras
|
||||
|
||||
elif len(cameras) < 1:
|
||||
cls.log.error("No renderable cameras found for %s " %
|
||||
instance.data["setMembers"])
|
||||
return [instance.data["setMembers"]]
|
||||
cls.log.error("No renderable cameras found for %s ", renderlayer)
|
||||
return [renderlayer]
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Render Cameras Invalid
|
||||
|
||||
Your render cameras are misconfigured. You may have no render
|
||||
camera set or have multiple cameras with a render filename
|
||||
prefix that does not include the `<Camera>` token.
|
||||
|
||||
See the logs for more details about the cameras.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments):
|
|||
index += 1
|
||||
|
||||
|
||||
def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"):
|
||||
def vrayproxy_assign_look(vrayproxy, product_name="lookMain"):
|
||||
# type: (str, str) -> None
|
||||
"""Assign look to vray proxy.
|
||||
|
||||
|
|
|
|||
|
|
@ -904,7 +904,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
node, product_name, "Reposition node... `{}`"
|
||||
)
|
||||
# append reformatted tag
|
||||
add_tags.append("reformated")
|
||||
add_tags.append("reformatted")
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
if bake_viewer_process:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PreLaunchHook
|
||||
from ayon_applications import PreLaunchHook
|
||||
|
||||
|
||||
class PrelaunchNukeAssistHook(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from ayon_core.lib import (
|
|||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.photoshop import get_launch_script_path
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class PreLaunchResolveLastWorkfile(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_core.hosts.resolve.utils import setup
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
import ayon_core.hosts.resolve
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
|||
"""Load mesh for project"""
|
||||
|
||||
product_types = {"*"}
|
||||
representations = ["abc", "fbx", "obj", "gltf"]
|
||||
representations = ["abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"]
|
||||
|
||||
label = "Load mesh"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class TvpaintPrelaunchHook(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from pathlib import Path
|
|||
from qtpy import QtCore
|
||||
|
||||
from ayon_core import resources
|
||||
from ayon_core.lib.applications import (
|
||||
from ayon_applications import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
LaunchTypes,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from unreal import EditorAssetLibrary
|
|||
from unreal import MovieSceneSkeletalAnimationTrack
|
||||
from unreal import MovieSceneSkeletalAnimationSection
|
||||
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
|
|
@ -53,7 +53,7 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
if not actor:
|
||||
return None
|
||||
|
||||
folder_entity = get_current_project_folder(fields=["attrib.fps"])
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
|
||||
task.set_editor_property('filename', path)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
|
|
@ -256,7 +256,7 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
repre_entity = context["representation"]
|
||||
folder_name = container["asset_name"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
folder_entity = get_current_project_folder(fields=["attrib.fps"])
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ from ayon_core.pipeline import (
|
|||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
)
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_core.hosts.unreal.api import plugin
|
||||
from ayon_core.hosts.unreal.api.pipeline import (
|
||||
|
|
@ -169,7 +169,7 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
anim_path = f"{asset_dir}/animations/{anim_file_name}"
|
||||
|
||||
folder_entity = get_current_project_folder()
|
||||
folder_entity = get_current_folder_entity()
|
||||
# Import animation
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
|
|
|||
|
|
@ -120,22 +120,6 @@ from .transcoding import (
|
|||
get_rescaled_command_arguments,
|
||||
)
|
||||
|
||||
from .applications import (
|
||||
ApplicationLaunchFailed,
|
||||
ApplictionExecutableNotFound,
|
||||
ApplicationNotFound,
|
||||
ApplicationManager,
|
||||
|
||||
PreLaunchHook,
|
||||
PostLaunchHook,
|
||||
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value
|
||||
)
|
||||
|
||||
from .plugin_tools import (
|
||||
prepare_template_data,
|
||||
source_hash,
|
||||
|
|
@ -231,18 +215,6 @@ __all__ = [
|
|||
"convert_ffprobe_fps_to_float",
|
||||
"get_rescaled_command_arguments",
|
||||
|
||||
"ApplicationLaunchFailed",
|
||||
"ApplictionExecutableNotFound",
|
||||
"ApplicationNotFound",
|
||||
"ApplicationManager",
|
||||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
"EnvironmentPrepData",
|
||||
"prepare_app_environments",
|
||||
"prepare_context_environments",
|
||||
"get_app_environments_for_context",
|
||||
"apply_project_environments_value",
|
||||
|
||||
"compile_list_of_regexes",
|
||||
|
||||
"filter_profiles",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,15 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package helping with colorizing and formatting terminal output."""
|
||||
# ::
|
||||
# //. ... .. ///. //.
|
||||
# ///\\\ \\\ \\ ///\\\ ///
|
||||
# /// \\ \\\ \\ /// \\ /// //
|
||||
# \\\ // \\\ // \\\ // \\\// ./
|
||||
# \\\// \\\// \\\// \\\' //
|
||||
# \\\ \\\ \\\ \\\//
|
||||
# ''' ''' ''' '''
|
||||
# ..---===[[ PyP3 Setup ]]===---...
|
||||
#
|
||||
import re
|
||||
import time
|
||||
import threading
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$")
|
|||
|
||||
IMAGE_EXTENSIONS = {
|
||||
".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave",
|
||||
".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr",
|
||||
".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr",
|
||||
".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc",
|
||||
".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2",
|
||||
".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg",
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -225,6 +225,8 @@ class FusionSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -273,6 +273,8 @@ class HarmonySubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"IS_TEST"
|
||||
"IS_TEST",
|
||||
]
|
||||
|
||||
environment = {
|
||||
|
|
|
|||
|
|
@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -376,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
keys = [
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
@ -388,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"TOOL_ENV",
|
||||
"FOUNDRY_LICENSE",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
]
|
||||
|
||||
# add allowed keys from preset if any
|
||||
|
|
|
|||
|
|
@ -133,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
|
|
|
|||
|
|
@ -210,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from Deadline.Scripting import (
|
|||
FileUtils,
|
||||
DirectoryUtils,
|
||||
)
|
||||
__version__ = "1.0.1"
|
||||
__version__ = "1.1.0"
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
|
|
@ -463,19 +463,13 @@ def inject_ayon_environment(deadlinePlugin):
|
|||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
add_kwargs = {
|
||||
"envgroup": "farm",
|
||||
}
|
||||
# Support backwards compatible keys
|
||||
for key, env_keys in (
|
||||
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
|
||||
("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
|
||||
("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]),
|
||||
):
|
||||
|
|
@ -486,18 +480,37 @@ def inject_ayon_environment(deadlinePlugin):
|
|||
break
|
||||
add_kwargs[key] = value
|
||||
|
||||
if job.GetJobEnvironmentKeyValue("IS_TEST"):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
if all(add_kwargs.values()):
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
else:
|
||||
if not all(add_kwargs.values()):
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AYON_PROJECT_NAME,"
|
||||
" AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
# Use applications addon arguments
|
||||
# TODO validate if applications addon should be used
|
||||
args = [
|
||||
"--headless",
|
||||
"addon",
|
||||
"applications",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
# Backwards compatibility for older versions
|
||||
legacy_args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
if job.GetJobEnvironmentKeyValue("IS_TEST"):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
# Legacy arguments expect '--asset' instead of '--folder'
|
||||
if key == "folder":
|
||||
key = "asset"
|
||||
legacy_args.extend(["--{}".format(key), value])
|
||||
|
||||
environment = {
|
||||
"AYON_SERVER_URL": ayon_server_url,
|
||||
"AYON_API_KEY": ayon_api_key,
|
||||
|
|
@ -516,9 +529,18 @@ def inject_ayon_environment(deadlinePlugin):
|
|||
)
|
||||
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run Ayon process to extract environments."
|
||||
print(
|
||||
"Failed to run AYON process to extract environments. Trying"
|
||||
" to use legacy arguments."
|
||||
)
|
||||
legacy_args_str = subprocess.list2cmdline(legacy_args)
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, legacy_args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run AYON process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ class JobQueueAddon(AYONAddon):
|
|||
@classmethod
|
||||
def start_worker(cls, app_name, server_url=None):
|
||||
import requests
|
||||
from ayon_core.lib import ApplicationManager
|
||||
from ayon_applications import ApplicationManager
|
||||
|
||||
if not server_url:
|
||||
server_url = cls.get_server_url_from_settings()
|
||||
|
|
|
|||
|
|
@ -308,31 +308,45 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
anatomy_data = instance.context.data["anatomyData"]
|
||||
addons_manager = instance.context.data["ayonAddonsManager"]
|
||||
applications_addon = addons_manager.get_enabled_addon("applications")
|
||||
|
||||
folder_key = "folder"
|
||||
if applications_addon is None:
|
||||
# Use 'asset' when applications addon command is not used
|
||||
folder_key = "asset"
|
||||
|
||||
add_kwargs = {
|
||||
"project": anatomy_data["project"]["name"],
|
||||
"asset": instance.context.data["folderPath"],
|
||||
folder_key: instance.context.data["folderPath"],
|
||||
"task": anatomy_data["task"]["name"],
|
||||
"app": instance.context.data.get("appName"),
|
||||
"envgroup": "farm"
|
||||
}
|
||||
|
||||
if os.getenv('IS_TEST'):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
if not all(add_kwargs.values()):
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH,"
|
||||
" AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
args = ["--headless"]
|
||||
# Use applications addon to extract environments
|
||||
# NOTE this is for backwards compatibility, the global command
|
||||
# will be removed in future and only applications addon command
|
||||
# should be used.
|
||||
if applications_addon is not None:
|
||||
args.extend(["addon", "applications"])
|
||||
|
||||
args.extend([
|
||||
"extractenvironments",
|
||||
export_url
|
||||
])
|
||||
|
||||
if os.getenv('IS_TEST'):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend([f"--{key}", value])
|
||||
self.log.debug("Executing: {}".format(" ".join(args)))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib.applications import PostLaunchHook, LaunchTypes
|
||||
from ayon_applications import PostLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class PostStartTimerHook(PostLaunchHook):
|
||||
|
|
|
|||
|
|
@ -97,8 +97,8 @@ def install_host(host):
|
|||
"""Install `host` into the running Python session.
|
||||
|
||||
Args:
|
||||
host (module): A Python module containing the Avalon
|
||||
avalon host-interface.
|
||||
host (HostBase): A host interface object.
|
||||
|
||||
"""
|
||||
global _is_installed
|
||||
|
||||
|
|
@ -154,6 +154,13 @@ def install_host(host):
|
|||
|
||||
|
||||
def install_ayon_plugins(project_name=None, host_name=None):
|
||||
"""Install AYON core plugins and make sure the core is initialized.
|
||||
|
||||
Args:
|
||||
project_name (Optional[str]): Name of project to install plugins for.
|
||||
host_name (Optional[str]): Name of host to install plugins for.
|
||||
|
||||
"""
|
||||
# Make sure global AYON connection has set site id and version
|
||||
# - this is necessary if 'install_host' is not called
|
||||
initialize_ayon_connection()
|
||||
|
|
@ -223,6 +230,12 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
|
||||
|
||||
def install_openpype_plugins(project_name=None, host_name=None):
|
||||
"""Install AYON core plugins and make sure the core is initialized.
|
||||
|
||||
Deprecated:
|
||||
Use `install_ayon_plugins` instead.
|
||||
|
||||
"""
|
||||
install_ayon_plugins(project_name, host_name)
|
||||
|
||||
|
||||
|
|
@ -281,47 +294,6 @@ def deregister_host():
|
|||
_registered_host["_"] = None
|
||||
|
||||
|
||||
def debug_host():
|
||||
"""A debug host, useful to debugging features that depend on a host"""
|
||||
|
||||
host = types.ModuleType("debugHost")
|
||||
|
||||
def ls():
|
||||
containers = [
|
||||
{
|
||||
"representation": "ee-ft-a-uuid1",
|
||||
"schema": "openpype:container-1.0",
|
||||
"name": "Bruce01",
|
||||
"objectName": "Bruce01_node",
|
||||
"namespace": "_bruce01_",
|
||||
"version": 3,
|
||||
},
|
||||
{
|
||||
"representation": "aa-bc-s-uuid2",
|
||||
"schema": "openpype:container-1.0",
|
||||
"name": "Bruce02",
|
||||
"objectName": "Bruce01_node",
|
||||
"namespace": "_bruce02_",
|
||||
"version": 2,
|
||||
}
|
||||
]
|
||||
|
||||
for container in containers:
|
||||
yield container
|
||||
|
||||
host.__dict__.update({
|
||||
"ls": ls,
|
||||
"open_file": lambda fname: None,
|
||||
"save_file": lambda fname: None,
|
||||
"current_file": lambda: os.path.expanduser("~/temp.txt"),
|
||||
"has_unsaved_changes": lambda: False,
|
||||
"work_root": lambda: os.path.expanduser("~/temp"),
|
||||
"file_extensions": lambda: ["txt"],
|
||||
})
|
||||
|
||||
return host
|
||||
|
||||
|
||||
def get_current_host_name():
|
||||
"""Current host name.
|
||||
|
||||
|
|
@ -347,7 +319,8 @@ def get_global_context():
|
|||
Use 'get_current_context' to make sure you'll get current host integration
|
||||
context info.
|
||||
|
||||
Example:
|
||||
Example::
|
||||
|
||||
{
|
||||
"project_name": "Commercial",
|
||||
"folder_path": "Bunny",
|
||||
|
|
@ -411,42 +384,67 @@ def get_current_project_entity(fields=None):
|
|||
return ayon_api.get_project(project_name, fields=fields)
|
||||
|
||||
|
||||
def get_current_project_folder(folder_path=None, folder_id=None, fields=None):
|
||||
def get_current_folder_entity(fields=None):
|
||||
"""Helper function to get folder entity based on current context.
|
||||
|
||||
This function should be called only in process where host is installed.
|
||||
|
||||
Folder is found out based on passed folder path or id (not both). Folder
|
||||
path is not used for filtering if folder id is passed. When both
|
||||
folder path and id are missing then current folder path is used.
|
||||
Folder is based on current context project name and folder path.
|
||||
|
||||
Args:
|
||||
folder_path (Union[str, None]): Folder path used for filter.
|
||||
folder_id (Union[str, None]): Folder id. If entered then
|
||||
is used as only filter.
|
||||
fields (Optional[Iterable[str]]): Limit returned data of folder entity
|
||||
to specific keys.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Fodler entity or None.
|
||||
Union[dict[str, Any], None]: Folder entity or None.
|
||||
|
||||
"""
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
|
||||
project_name = get_current_project_name()
|
||||
if folder_id:
|
||||
return ayon_api.get_folder_by_id(
|
||||
project_name, folder_id, fields=fields
|
||||
)
|
||||
|
||||
if not folder_path:
|
||||
folder_path = get_current_folder_path()
|
||||
# Skip if is not set even on context
|
||||
if not folder_path:
|
||||
return None
|
||||
# Skip if is not set even on context
|
||||
if not project_name or not folder_path:
|
||||
return None
|
||||
return ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_current_task_entity(fields=None):
|
||||
"""Helper function to get task entity based on current context.
|
||||
|
||||
This function should be called only in process where host is installed.
|
||||
|
||||
Task is based on current context project name, folder path
|
||||
and task name.
|
||||
|
||||
Args:
|
||||
fields (Optional[Iterable[str]]): Limit returned data of task entity
|
||||
to specific keys.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Task entity or None.
|
||||
|
||||
"""
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
task_name = context["task_name"]
|
||||
|
||||
# Skip if is not set even on context
|
||||
if not project_name or not folder_path or not task_name:
|
||||
return None
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"id"}
|
||||
)
|
||||
if not folder_entity:
|
||||
return None
|
||||
return ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name, fields=fields
|
||||
)
|
||||
|
||||
|
||||
def is_representation_from_latest(representation):
|
||||
"""Return whether the representation is from latest version
|
||||
|
||||
|
|
@ -515,88 +513,13 @@ def get_current_context_template_data(settings=None):
|
|||
)
|
||||
|
||||
|
||||
def get_workdir_from_session(session=None, template_key=None):
|
||||
"""Template data for template fill from session keys.
|
||||
|
||||
Args:
|
||||
session (Union[Dict[str, str], None]): The Session to use. If not
|
||||
provided use the currently active global Session.
|
||||
template_key (str): Prepared template key from which workdir is
|
||||
calculated.
|
||||
|
||||
Returns:
|
||||
str: Workdir path.
|
||||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
project_name = get_current_project_name()
|
||||
host_name = get_current_host_name()
|
||||
template_data = get_template_data_from_session(session)
|
||||
|
||||
if not template_key:
|
||||
task_type = template_data["task"]["type"]
|
||||
template_key = get_workfile_template_key(
|
||||
project_name,
|
||||
task_type,
|
||||
host_name,
|
||||
)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
template_obj = anatomy.get_template_item("work", template_key, "directory")
|
||||
path = template_obj.format_strict(template_data)
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
return path
|
||||
|
||||
|
||||
def get_custom_workfile_template_from_session(
|
||||
session=None, project_settings=None
|
||||
):
|
||||
"""Filter and fill workfile template profiles by current context.
|
||||
|
||||
This function cab be used only inside host where context is set.
|
||||
|
||||
Args:
|
||||
session (Optional[Dict[str, str]]): Session from which are taken
|
||||
data.
|
||||
project_settings(Optional[Dict[str, Any]]): Project settings.
|
||||
|
||||
Returns:
|
||||
str: Path to template or None if none of profiles match current
|
||||
context. (Existence of formatted path is not validated.)
|
||||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
task_name = context["task_name"]
|
||||
host_name = get_current_host_name()
|
||||
|
||||
return get_custom_workfile_template_by_string_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
host_name,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
|
||||
def get_current_context_custom_workfile_template(project_settings=None):
|
||||
"""Filter and fill workfile template profiles by current context.
|
||||
|
||||
This function can be used only inside host where context is set.
|
||||
This function can be used only inside host where current context is set.
|
||||
|
||||
Args:
|
||||
project_settings(Optional[Dict[str, Any]]): Project settings.
|
||||
project_settings (Optional[dict[str, Any]]): Project settings
|
||||
|
||||
Returns:
|
||||
str: Path to template or None if none of profiles match current
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i
|
|||
|
||||
Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant).
|
||||
|
||||
Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`.
|
||||
Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`.
|
||||
|
||||
Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
import logging
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
|
||||
Session = {}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.warning(
|
||||
"DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in"
|
||||
" future versions of ayon-core addon."
|
||||
"\nReading from Session won't give you updated information and changing"
|
||||
" values won't affect global state of a process."
|
||||
)
|
||||
|
||||
|
||||
def session_data_from_environment(context_keys=False):
|
||||
return {}
|
||||
|
||||
|
||||
def is_installed():
|
||||
return False
|
||||
|
||||
|
||||
def install():
|
||||
pass
|
||||
|
||||
|
||||
def uninstall():
|
||||
pass
|
||||
|
||||
|
||||
def active_project(*args, **kwargs):
|
||||
return get_current_project_name()
|
||||
|
||||
|
||||
def current_project(*args, **kwargs):
|
||||
return get_current_project_name()
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
import os
|
||||
from ayon_core.lib import ApplicationManager
|
||||
from ayon_core.pipeline import load
|
||||
|
||||
|
||||
def existing_djv_path():
|
||||
app_manager = ApplicationManager()
|
||||
djv_list = []
|
||||
|
||||
for app_name, app in app_manager.applications.items():
|
||||
if 'djv' in app_name and app.find_executable():
|
||||
djv_list.append(app_name)
|
||||
|
||||
return djv_list
|
||||
|
||||
|
||||
class OpenInDJV(load.LoaderPlugin):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
djv_list = existing_djv_path()
|
||||
product_types = {"*"} if djv_list else []
|
||||
representations = ["*"]
|
||||
extensions = {
|
||||
"cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
|
||||
"mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
|
||||
"1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
|
||||
"sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264",
|
||||
}
|
||||
|
||||
label = "Open in DJV"
|
||||
order = -10
|
||||
icon = "play-circle"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
import clique
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
directory = os.path.dirname(path)
|
||||
|
||||
pattern = clique.PATTERNS["frames"]
|
||||
files = os.listdir(directory)
|
||||
collections, remainder = clique.assemble(
|
||||
files,
|
||||
patterns=[pattern],
|
||||
minimum_items=1
|
||||
)
|
||||
|
||||
if not remainder:
|
||||
sequence = collections[0]
|
||||
first_image = list(sequence)[0]
|
||||
else:
|
||||
first_image = path
|
||||
filepath = os.path.normpath(os.path.join(directory, first_image))
|
||||
|
||||
self.log.info("Opening : {}".format(filepath))
|
||||
|
||||
last_djv_version = sorted(self.djv_list)[-1]
|
||||
|
||||
app_manager = ApplicationManager()
|
||||
djv = app_manager.applications.get(last_djv_version)
|
||||
djv.arguments.append(filepath)
|
||||
|
||||
app_manager.launch(last_djv_version)
|
||||
|
|
@ -1,14 +1,13 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
context -> host (str)
|
||||
context -> hostName (str)
|
||||
"""
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import ApplicationManager
|
||||
|
||||
|
||||
class CollectHostName(pyblish.api.ContextPlugin):
|
||||
"""Collect avalon host name to context."""
|
||||
|
|
@ -18,30 +17,8 @@ class CollectHostName(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
host_name = context.data.get("hostName")
|
||||
app_name = context.data.get("appName")
|
||||
app_label = context.data.get("appLabel")
|
||||
# Don't override value if is already set
|
||||
if host_name and app_name and app_label:
|
||||
if host_name:
|
||||
return
|
||||
|
||||
# Use AYON_HOST_NAME to get host name if available
|
||||
if not host_name:
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
# Use AYON_APP_NAME to get full app name
|
||||
if not app_name:
|
||||
app_name = os.environ.get("AYON_APP_NAME")
|
||||
|
||||
# Fill missing values based on app full name
|
||||
if (not host_name or not app_label) and app_name:
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications.get(app_name)
|
||||
if app:
|
||||
if not host_name:
|
||||
host_name = app.host_name
|
||||
if not app_label:
|
||||
app_label = app.full_label
|
||||
|
||||
context.data["hostName"] = host_name
|
||||
context.data["appName"] = app_name
|
||||
context.data["appLabel"] = app_label
|
||||
context.data["hostName"] = os.environ.get("AYON_HOST_NAME")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from pyblish import api
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
class CollectSettings(api.ContextPlugin):
|
||||
|
|
@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin):
|
|||
label = "Collect Settings"
|
||||
|
||||
def process(self, context):
|
||||
context.data["project_settings"] = get_current_project_settings()
|
||||
project_name = context.data["projectName"]
|
||||
self.log.debug(
|
||||
"Collecting settings for project: {}".format(project_name)
|
||||
)
|
||||
project_settings = get_project_settings(project_name)
|
||||
context.data["project_settings"] = project_settings
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
Extractor to create video with pre-defined burnins from
|
||||
existing extracted video representation.
|
||||
|
||||
It will work only on represenations having `burnin = True` or
|
||||
It will work only on representations having `burnin = True` or
|
||||
`tags` including `burnin`
|
||||
"""
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
burnin_defs = copy.deepcopy(src_burnin_defs)
|
||||
|
||||
# Filter output definition by `burnin` represetation key
|
||||
# Filter output definition by `burnin` representation key
|
||||
repre_linked_burnins = [
|
||||
burnin_def
|
||||
for burnin_def in burnin_defs
|
||||
|
|
@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor):
|
|||
).format(host_name, product_type, task_name, profile))
|
||||
return
|
||||
|
||||
burnins_per_repres = self._get_burnins_per_representations(
|
||||
instance, burnin_defs
|
||||
)
|
||||
if not burnins_per_repres:
|
||||
self.log.debug(
|
||||
"Skipped instance. No representations found matching a burnin"
|
||||
"definition in: %s", burnin_defs
|
||||
)
|
||||
return
|
||||
|
||||
burnin_options = self._get_burnin_options()
|
||||
|
||||
# Prepare basic data for processing
|
||||
|
|
@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
# Args that will execute the script
|
||||
executable_args = ["run", scriptpath]
|
||||
burnins_per_repres = self._get_burnins_per_representations(
|
||||
instance, burnin_defs
|
||||
)
|
||||
for repre, repre_burnin_defs in burnins_per_repres:
|
||||
# Create copy of `_burnin_data` and `_temp_data` for repre.
|
||||
burnin_data = copy.deepcopy(_burnin_data)
|
||||
|
|
@ -371,6 +378,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
# Prepare subprocess arguments
|
||||
args = list(executable_args)
|
||||
args.append(temporary_json_filepath)
|
||||
args.append("--headless")
|
||||
self.log.debug("Executing: {}".format(" ".join(args)))
|
||||
|
||||
# Run burnin script
|
||||
|
|
@ -540,7 +548,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
return burnin_data, temp_data
|
||||
|
||||
def repres_is_valid(self, repre):
|
||||
"""Validation if representaion should be processed.
|
||||
"""Validation if representation should be processed.
|
||||
|
||||
Args:
|
||||
repre (dict): Representation which should be checked.
|
||||
|
|
@ -572,7 +580,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
tags (list): Tags of processed representation.
|
||||
|
||||
Returns:
|
||||
list: Containg all burnin definitions matching entered tags.
|
||||
list: Contain all burnin definitions matching entered tags.
|
||||
|
||||
"""
|
||||
filtered_burnins = []
|
||||
|
|
@ -597,7 +605,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
Store data to `temp_data` for keys "full_input_path" which is full path
|
||||
to source files optionally with sequence formatting,
|
||||
"full_output_path" full path to otput with optionally with sequence
|
||||
"full_output_path" full path to output with optionally with sequence
|
||||
formatting, "full_input_paths" list of all source files which will be
|
||||
deleted when burnin script ends, "repre_files" list of output
|
||||
filenames.
|
||||
|
|
@ -747,7 +755,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
profile (dict): Profile from presets matching current context.
|
||||
|
||||
Returns:
|
||||
list: Containg all valid output definitions.
|
||||
list: Contain all valid output definitions.
|
||||
"""
|
||||
filtered_burnin_defs = []
|
||||
|
||||
|
|
@ -768,7 +776,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
):
|
||||
self.log.debug((
|
||||
"Skipped burnin definition \"{}\". Family"
|
||||
" fiters ({}) does not match current instance families: {}"
|
||||
" filters ({}) does not match current instance families: {}"
|
||||
).format(
|
||||
filename_suffix, str(families_filters), str(families)
|
||||
))
|
||||
|
|
|
|||
|
|
@ -619,7 +619,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# Prepare input and output filepaths
|
||||
self.input_output_paths(new_repre, output_def, temp_data)
|
||||
|
||||
# Set output frames len to 1 when ouput is single image
|
||||
# Set output frames len to 1 when output is single image
|
||||
if (
|
||||
temp_data["output_ext_is_image"]
|
||||
and not temp_data["output_is_sequence"]
|
||||
|
|
@ -955,7 +955,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("New representation ext: `{}`".format(output_ext))
|
||||
|
||||
# Output is image file sequence witht frames
|
||||
# Output is image file sequence with frames
|
||||
output_ext_is_image = bool(output_ext in self.image_exts)
|
||||
output_is_sequence = bool(
|
||||
output_ext_is_image
|
||||
|
|
@ -967,7 +967,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
frame_end = temp_data["output_frame_end"]
|
||||
|
||||
filename_base = "{}_{}".format(filename, filename_suffix)
|
||||
# Temporary tempalte for frame filling. Example output:
|
||||
# Temporary template for frame filling. Example output:
|
||||
# "basename.%04d.exr" when `frame_end` == 1001
|
||||
repr_file = "{}.%{:0>2}d.{}".format(
|
||||
filename_base, len(str(frame_end)), output_ext
|
||||
|
|
@ -997,7 +997,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Creating dir: {}".format(dst_staging_dir))
|
||||
os.makedirs(dst_staging_dir)
|
||||
|
||||
# Store stagingDir to representaion
|
||||
# Store stagingDir to representation
|
||||
new_repre["stagingDir"] = dst_staging_dir
|
||||
|
||||
# Store paths to temp data
|
||||
|
|
@ -1225,19 +1225,13 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
filters = []
|
||||
|
||||
# if reformat input video file is already reforamted from upstream
|
||||
reformat_in_baking = bool("reformated" in new_repre["tags"])
|
||||
reformat_in_baking = (
|
||||
"reformatted" in new_repre["tags"]
|
||||
# Backwards compatibility
|
||||
or "reformated" in new_repre["tags"]
|
||||
)
|
||||
self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking))
|
||||
|
||||
# Get instance data
|
||||
pixel_aspect = temp_data["pixel_aspect"]
|
||||
|
||||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from upstream process"
|
||||
))
|
||||
pixel_aspect = 1
|
||||
|
||||
# NOTE Skipped using instance's resolution
|
||||
full_input_path_single_file = temp_data["full_input_path_single_file"]
|
||||
try:
|
||||
|
|
@ -1268,7 +1262,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from upstream process"
|
||||
"reformatted from upstream process"
|
||||
))
|
||||
pixel_aspect = 1
|
||||
output_width = input_width
|
||||
|
|
@ -1374,7 +1368,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# Make sure output width and height is not an odd number
|
||||
# When this can happen:
|
||||
# - if output definition has set width and height with odd number
|
||||
# - `instance.data` contain width and height with odd numbeer
|
||||
# - `instance.data` contain width and height with odd number
|
||||
if output_width % 2 != 0:
|
||||
self.log.warning((
|
||||
"Converting output width from odd to even number. {} -> {}"
|
||||
|
|
@ -1555,7 +1549,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
custom_tags (list): Custom Tags of processed representation.
|
||||
|
||||
Returns:
|
||||
list: Containg all output definitions matching entered tags.
|
||||
list: Containing all output definitions matching entered tags.
|
||||
"""
|
||||
|
||||
filtered_outputs = []
|
||||
|
|
@ -1820,8 +1814,8 @@ class OverscanCrop:
|
|||
"""
|
||||
# crop=width:height:x:y - explicit start x, y position
|
||||
# crop=width:height - x, y are related to center by width/height
|
||||
# pad=width:heigth:x:y - explicit start x, y position
|
||||
# pad=width:heigth - x, y are set to 0 by default
|
||||
# pad=width:height:x:y - explicit start x, y position
|
||||
# pad=width:height - x, y are set to 0 by default
|
||||
|
||||
width = self.width()
|
||||
height = self.height()
|
||||
|
|
@ -1869,7 +1863,7 @@ class OverscanCrop:
|
|||
# Replace "px" (and spaces before) with single space
|
||||
string_value = re.sub(r"([ ]+)?px", " ", string_value)
|
||||
string_value = re.sub(r"([ ]+)%", "%", string_value)
|
||||
# Make sure +/- sign at the beggining of string is next to number
|
||||
# Make sure +/- sign at the beginning of string is next to number
|
||||
string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value)
|
||||
# Make sure +/- sign in the middle has zero spaces before number under
|
||||
# which belongs
|
||||
|
|
|
|||
|
|
@ -90,6 +90,9 @@ class IntegrateHeroVersion(
|
|||
# *but all other plugins must be sucessfully completed
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"--- Integration of Hero version for product `{}` begins.".format(
|
||||
instance.data["productName"]
|
||||
|
|
|
|||
|
|
@ -201,7 +201,7 @@ def get_current_project_settings():
|
|||
Project name should be stored in environment variable `AYON_PROJECT_NAME`.
|
||||
This function should be used only in host context where environment
|
||||
variable must be set and should not happen that any part of process will
|
||||
change the value of the enviornment variable.
|
||||
change the value of the environment variable.
|
||||
"""
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
if not project_name:
|
||||
|
|
@ -209,6 +209,3 @@ def get_current_project_settings():
|
|||
"Missing context project in environemt variable `AYON_PROJECT_NAME`."
|
||||
)
|
||||
return get_project_settings(project_name)
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
from ayon_core import resources
|
||||
from ayon_core.lib import Logger, AYONSettingsRegistry
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline.actions import (
|
||||
discover_launcher_actions,
|
||||
LauncherAction,
|
||||
|
|
@ -109,8 +110,8 @@ class ApplicationAction(LauncherAction):
|
|||
def process(self, selection, **kwargs):
|
||||
"""Process the full Application action"""
|
||||
|
||||
from ayon_core.lib import (
|
||||
ApplictionExecutableNotFound,
|
||||
from ayon_applications import (
|
||||
ApplicationExecutableNotFound,
|
||||
ApplicationLaunchFailed,
|
||||
)
|
||||
|
||||
|
|
@ -122,7 +123,7 @@ class ApplicationAction(LauncherAction):
|
|||
**self.data
|
||||
)
|
||||
|
||||
except ApplictionExecutableNotFound as exc:
|
||||
except ApplicationExecutableNotFound as exc:
|
||||
details = exc.details
|
||||
msg = exc.msg
|
||||
log_msg = str(msg)
|
||||
|
|
@ -270,6 +271,8 @@ class ActionsModel:
|
|||
|
||||
self._launcher_tool_reg = AYONSettingsRegistry("launcher_tool")
|
||||
|
||||
self._addons_manager = None
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
|
|
@ -410,6 +413,11 @@ class ActionsModel:
|
|||
}
|
||||
)
|
||||
|
||||
def _get_addons_manager(self):
|
||||
if self._addons_manager is None:
|
||||
self._addons_manager = AddonsManager()
|
||||
return self._addons_manager
|
||||
|
||||
def _get_no_last_workfile_reg_data(self):
|
||||
try:
|
||||
no_workfile_reg_data = self._launcher_tool_reg.get_item(
|
||||
|
|
@ -489,19 +497,16 @@ class ActionsModel:
|
|||
return action_items
|
||||
|
||||
def _get_applications_action_classes(self):
|
||||
from ayon_core.lib.applications import (
|
||||
CUSTOM_LAUNCH_APP_GROUPS,
|
||||
ApplicationManager,
|
||||
)
|
||||
|
||||
actions = []
|
||||
|
||||
manager = ApplicationManager()
|
||||
addons_manager = self._get_addons_manager()
|
||||
applications_addon = addons_manager.get_enabled_addon("applications")
|
||||
if applications_addon is None:
|
||||
return actions
|
||||
|
||||
manager = applications_addon.get_applications_manager()
|
||||
for full_name, application in manager.applications.items():
|
||||
if (
|
||||
application.group.name in CUSTOM_LAUNCH_APP_GROUPS
|
||||
or not application.enabled
|
||||
):
|
||||
if not application.enabled:
|
||||
continue
|
||||
|
||||
action = type(
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
# TODO remove - kept for kitsu addon which imported it
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
|
||||
class PressHoverButton(QtWidgets.QPushButton):
|
||||
"""
|
||||
Deprecated:
|
||||
Use `openpype.tools.utils.PressHoverButton` instead.
|
||||
"""
|
||||
_mouse_pressed = False
|
||||
_mouse_hovered = False
|
||||
change_state = QtCore.Signal(bool)
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
self._mouse_pressed = True
|
||||
self._mouse_hovered = True
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mousePressEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
self._mouse_pressed = False
|
||||
self._mouse_hovered = False
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mouseReleaseEvent(event)
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos())
|
||||
under_mouse = self.rect().contains(mouse_pos)
|
||||
if under_mouse != self._mouse_hovered:
|
||||
self._mouse_hovered = under_mouse
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
|
||||
super(PressHoverButton, self).mouseMoveEvent(event)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue