Merge branch 'develop' into bugfix/validate_mesh_no_faces

This commit is contained in:
Toke Jepsen 2024-04-15 08:46:54 +01:00 committed by GitHub
commit 44e9dfeb6f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
62 changed files with 541 additions and 247 deletions

View file

@ -15,6 +15,7 @@ from abc import ABCMeta, abstractmethod
import six
import appdirs
import ayon_api
from semver import VersionInfo
from ayon_core import AYON_CORE_ROOT
from ayon_core.lib import Logger, is_dev_mode_enabled
@ -46,6 +47,11 @@ IGNORED_HOSTS_IN_AYON = {
}
IGNORED_MODULES_IN_AYON = set()
# When addon was moved from ayon-core codebase
# - this is used to log the missing addon
MOVED_ADDON_MILESTONE_VERSIONS = {
"applications": VersionInfo(2, 0, 0),
}
# Inherit from `object` for Python 2 hosts
class _ModuleClass(object):
@ -192,6 +198,45 @@ def _get_ayon_addons_information(bundle_info):
return output
def _handle_moved_addons(addon_name, milestone_version, log):
"""Log message that addon version is not compatible with current core.
The function can return path to addon client code, but that can happen
only if ayon-core is used from code (for development), but still
logs a warning.
Args:
addon_name (str): Addon name.
milestone_version (str): Milestone addon version.
log (logging.Logger): Logger object.
Returns:
Union[str, None]: Addon dir or None.
"""
# Handle addons which were moved out of ayon-core
# - Try to fix it by loading it directly from server addons dir in
# ayon-core repository. But that will work only if ayon-core is
# used from code.
addon_dir = os.path.join(
os.path.dirname(os.path.dirname(AYON_CORE_ROOT)),
"server_addon",
addon_name,
"client",
)
if not os.path.exists(addon_dir):
log.error((
"Addon '{}' is not be available."
" Please update applications addon to '{}' or higher."
).format(addon_name, milestone_version))
return None
log.warning((
"Please update '{}' addon to '{}' or higher."
" Using client code from ayon-core repository."
).format(addon_name, milestone_version))
return addon_dir
def _load_ayon_addons(openpype_modules, modules_key, log):
"""Load AYON addons based on information from server.
@ -249,6 +294,7 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
use_dev_path = dev_addon_info.get("enabled", False)
addon_dir = None
milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name)
if use_dev_path:
addon_dir = dev_addon_info["path"]
if not addon_dir or not os.path.exists(addon_dir):
@ -257,6 +303,16 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
).format(addon_name, addon_version, addon_dir))
continue
elif (
milestone_version is not None
and VersionInfo.parse(addon_version) < milestone_version
):
addon_dir = _handle_moved_addons(
addon_name, milestone_version, log
)
if not addon_dir:
continue
elif addons_dir_exists:
folder_name = "{}_{}".format(addon_name, addon_version)
addon_dir = os.path.join(addons_dir, folder_name)
@ -336,66 +392,9 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
return addons_to_skip_in_core
def _load_ayon_core_addons_dir(
ignore_addon_names, openpype_modules, modules_key, log
):
addons_dir = os.path.join(AYON_CORE_ROOT, "addons")
if not os.path.exists(addons_dir):
return
imported_modules = []
# Make sure that addons which already have client code are not loaded
# from core again, with older code
filtered_paths = []
for name in os.listdir(addons_dir):
if name in ignore_addon_names:
continue
path = os.path.join(addons_dir, name)
if os.path.isdir(path):
filtered_paths.append(path)
for path in filtered_paths:
while path in sys.path:
sys.path.remove(path)
sys.path.insert(0, path)
for name in os.listdir(path):
fullpath = os.path.join(path, name)
if os.path.isfile(fullpath):
basename, ext = os.path.splitext(name)
if ext != ".py":
continue
else:
basename = name
try:
module = __import__(basename, fromlist=("",))
for attr_name in dir(module):
attr = getattr(module, attr_name)
if (
inspect.isclass(attr)
and issubclass(attr, AYONAddon)
):
new_import_str = "{}.{}".format(modules_key, basename)
sys.modules[new_import_str] = module
setattr(openpype_modules, basename, module)
imported_modules.append(module)
break
except Exception:
log.error(
"Failed to import addon '{}'.".format(fullpath),
exc_info=True
)
return imported_modules
def _load_addons_in_core(
ignore_addon_names, openpype_modules, modules_key, log
):
_load_ayon_core_addons_dir(
ignore_addon_names, openpype_modules, modules_key, log
)
# Add current directory at first place
# - has small differences in import logic
hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts")

View file

@ -41,7 +41,6 @@ class CollectAERender(publish.AbstractCollectRender):
def get_instances(self, context):
instances = []
instances_to_remove = []
app_version = CollectAERender.get_stub().get_app_version()
app_version = app_version[0:4]
@ -117,7 +116,10 @@ class CollectAERender(publish.AbstractCollectRender):
fps=fps,
app_version=app_version,
publish_attributes=inst.data.get("publish_attributes", {}),
file_names=[item.file_name for item in render_q]
file_names=[item.file_name for item in render_q],
# The source instance this render instance replaces
source_instance=inst
)
comp = compositions_by_id.get(comp_id)
@ -145,10 +147,7 @@ class CollectAERender(publish.AbstractCollectRender):
instance.families.remove("review")
instances.append(instance)
instances_to_remove.append(inst)
for instance in instances_to_remove:
context.remove(instance)
return instances
def get_expected_files(self, render_instance):

View file

@ -55,8 +55,7 @@ class BlenderAddon(AYONAddon, IHostAddon):
)
# Define Qt binding if not defined
if not env.get("QT_PREFERRED_BINDING"):
env["QT_PREFERRED_BINDING"] = "PySide2"
env.pop("QT_PREFERRED_BINDING", None)
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:

View file

@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook):
def inner_execute(self):
# Get blender's python directory
version_regex = re.compile(r"^[2-4]\.[0-9]+$")
version_regex = re.compile(r"^([2-4])\.[0-9]+$")
platform = system().lower()
executable = self.launch_context.executable.executable_path
@ -42,7 +42,8 @@ class InstallPySideToBlender(PreLaunchHook):
if os.path.basename(executable).lower() != expected_executable:
self.log.info((
f"Executable does not lead to {expected_executable} file."
"Can't determine blender's python to check/install PySide2."
"Can't determine blender's python to check/install"
" Qt binding."
))
return
@ -73,6 +74,15 @@ class InstallPySideToBlender(PreLaunchHook):
return
version_subfolder = version_subfolders[0]
before_blender_4 = False
if int(version_regex.match(version_subfolder).group(1)) < 4:
before_blender_4 = True
# Blender 4 has Python 3.11 which does not support 'PySide2'
# QUESTION could we always install PySide6?
qt_binding = "PySide2" if before_blender_4 else "PySide6"
# Use PySide6 6.6.3 because 6.7.0 had a bug
# - 'QTextEdit' can't be added to 'QBoxLayout'
qt_binding_version = None if before_blender_4 else "6.6.3"
python_dir = os.path.join(versions_dir, version_subfolder, "python")
python_lib = os.path.join(python_dir, "lib")
@ -116,22 +126,41 @@ class InstallPySideToBlender(PreLaunchHook):
return
# Check if PySide2 is installed and skip if yes
if self.is_pyside_installed(python_executable):
if self.is_pyside_installed(python_executable, qt_binding):
self.log.debug("Blender has already installed PySide2.")
return
# Install PySide2 in blender's python
if platform == "windows":
result = self.install_pyside_windows(python_executable)
result = self.install_pyside_windows(
python_executable,
qt_binding,
qt_binding_version,
before_blender_4,
)
else:
result = self.install_pyside(python_executable)
result = self.install_pyside(
python_executable,
qt_binding,
qt_binding_version,
)
if result:
self.log.info("Successfully installed PySide2 module to blender.")
self.log.info(
f"Successfully installed {qt_binding} module to blender."
)
else:
self.log.warning("Failed to install PySide2 module to blender.")
self.log.warning(
f"Failed to install {qt_binding} module to blender."
)
def install_pyside_windows(self, python_executable):
def install_pyside_windows(
self,
python_executable,
qt_binding,
qt_binding_version,
before_blender_4,
):
"""Install PySide2 python module to blender's python.
Installation requires administration rights that's why it is required
@ -139,7 +168,6 @@ class InstallPySideToBlender(PreLaunchHook):
administration rights.
"""
try:
import win32api
import win32con
import win32process
import win32event
@ -150,12 +178,37 @@ class InstallPySideToBlender(PreLaunchHook):
self.log.warning("Couldn't import \"pywin32\" modules")
return
if qt_binding_version:
qt_binding = f"{qt_binding}=={qt_binding_version}"
try:
# Parameters
# - use "-m pip" as module pip to install PySide2 and argument
# "--ignore-installed" is to force install module to blender's
# site-packages and make sure it is binary compatible
parameters = "-m pip install --ignore-installed PySide2"
fake_exe = "fake.exe"
site_packages_prefix = os.path.dirname(
os.path.dirname(python_executable)
)
args = [
fake_exe,
"-m",
"pip",
"install",
"--ignore-installed",
qt_binding,
]
if not before_blender_4:
# Define prefix for site package
# Python in blender 4.x is installing packages in AppData and
# not in blender's directory.
args.extend(["--prefix", site_packages_prefix])
parameters = (
subprocess.list2cmdline(args)
.lstrip(fake_exe)
.lstrip(" ")
)
# Execute command and ask for administrator's rights
process_info = ShellExecuteEx(
@ -173,20 +226,29 @@ class InstallPySideToBlender(PreLaunchHook):
except pywintypes.error:
pass
def install_pyside(self, python_executable):
"""Install PySide2 python module to blender's python."""
def install_pyside(
self,
python_executable,
qt_binding,
qt_binding_version,
):
"""Install Qt binding python module to blender's python."""
if qt_binding_version:
qt_binding = f"{qt_binding}=={qt_binding_version}"
try:
# Parameters
# - use "-m pip" as module pip to install PySide2 and argument
# - use "-m pip" as module pip to install qt binding and argument
# "--ignore-installed" is to force install module to blender's
# site-packages and make sure it is binary compatible
# TODO find out if blender 4.x on linux/darwin does install
# qt binding to correct place.
args = [
python_executable,
"-m",
"pip",
"install",
"--ignore-installed",
"PySide2",
qt_binding,
]
process = subprocess.Popen(
args, stdout=subprocess.PIPE, universal_newlines=True
@ -203,13 +265,15 @@ class InstallPySideToBlender(PreLaunchHook):
except subprocess.SubprocessError:
pass
def is_pyside_installed(self, python_executable):
def is_pyside_installed(self, python_executable, qt_binding):
"""Check if PySide2 module is in blender's pip list.
Check that PySide2 is installed directly in blender's site-packages.
It is possible that it is installed in user's site-packages but that
may be incompatible with blender's python.
"""
qt_binding_low = qt_binding.lower()
# Get pip list from blender's python executable
args = [python_executable, "-m", "pip", "list"]
process = subprocess.Popen(args, stdout=subprocess.PIPE)
@ -226,6 +290,6 @@ class InstallPySideToBlender(PreLaunchHook):
if not line:
continue
package_name = line[0:package_len].strip()
if package_name.lower() == "pyside2":
if package_name.lower() == qt_binding_low:
return True
return False

View file

@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
asset_group.empty_display_type = 'SINGLE_ARROW'
avalon_container.objects.link(asset_group)
self._process(libpath, asset, asset_group, None)
self._process(libpath, asset_name, asset_group, None)
bpy.context.scene.collection.objects.link(asset_group)

View file

@ -3,8 +3,8 @@ import sys
import re
import contextlib
from ayon_core.lib import Logger
from ayon_core.lib import Logger, BoolDef, UILabelDef
from ayon_core.style import load_stylesheet
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.create import CreateContext
from ayon_core.pipeline.context_tools import get_current_folder_entity
@ -181,7 +181,6 @@ def validate_comp_prefs(comp=None, force_repair=False):
from . import menu
from ayon_core.tools.utils import SimplePopup
from ayon_core.style import load_stylesheet
dialog = SimplePopup(parent=menu.menu)
dialog.setWindowTitle("Fusion comp has invalid configuration")
@ -340,9 +339,7 @@ def prompt_reset_context():
from ayon_core.tools.attribute_defs.dialog import (
AttributeDefinitionsDialog
)
from ayon_core.style import load_stylesheet
from ayon_core.lib import BoolDef, UILabelDef
from qtpy import QtWidgets, QtCore
from qtpy import QtCore
definitions = [
UILabelDef(

View file

@ -85,7 +85,6 @@ class InstallPySideToFusion(PreLaunchHook):
administration rights.
"""
try:
import win32api
import win32con
import win32process
import win32event

View file

@ -37,14 +37,13 @@ class CollectFusionRender(
aspect_x = comp_frame_format_prefs["AspectX"]
aspect_y = comp_frame_format_prefs["AspectY"]
instances = []
instances_to_remove = []
current_file = context.data["currentFile"]
version = context.data["version"]
project_entity = context.data["projectEntity"]
instances = []
for inst in context:
if not inst.data.get("active", True):
continue
@ -91,7 +90,10 @@ class CollectFusionRender(
frameStep=1,
fps=comp_frame_format_prefs.get("Rate"),
app_version=comp.GetApp().Version,
publish_attributes=inst.data.get("publish_attributes", {})
publish_attributes=inst.data.get("publish_attributes", {}),
# The source instance this render instance replaces
source_instance=inst
)
render_target = inst.data["creator_attributes"]["render_target"]
@ -114,13 +116,7 @@ class CollectFusionRender(
# to skip ExtractReview locally
instance.families.remove("review")
# add new instance to the list and remove the original
# instance since it is not needed anymore
instances.append(instance)
instances_to_remove.append(inst)
for instance in instances_to_remove:
context.remove(instance)
return instances

View file

@ -92,10 +92,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
folder_path, folder_name = self._get_folder_data(tag_data)
product_name = tag_data.get("productName")
if product_name is None:
product_name = tag_data["subset"]
families = [str(f) for f in tag_data["families"]]
# TODO: remove backward compatibility
@ -293,7 +289,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
label += " {}".format(product_name)
data.update({
"name": "{}_{}".format(folder_path, subset),
"name": "{}_{}".format(folder_path, product_name),
"label": label,
"productName": product_name,
"productType": product_type,

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating alembic camera products."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator):
instance = super(CreateAlembicCamera, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {

View file

@ -29,7 +29,7 @@ class CreateArnoldAss(plugin.HoudiniCreator):
instance = super(CreateArnoldAss, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -31,7 +31,7 @@ class CreateArnoldRop(plugin.HoudiniCreator):
instance = super(CreateArnoldRop, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache bgeo files."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
from ayon_core.lib import EnumDef, BoolDef
@ -25,7 +25,7 @@ class CreateBGEO(plugin.HoudiniCreator):
instance = super(CreateBGEO, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating composite sequences."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
import hou
@ -25,7 +25,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator):
instance = super(CreateCompositeSequence, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
filepath = "{}{}".format(

View file

@ -78,7 +78,7 @@ class CreateHDA(plugin.HoudiniCreator):
instance = super(CreateHDA, self).create(
product_name,
instance_data,
pre_create_data) # type: plugin.CreatedInstance
pre_create_data)
return instance

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Karma ROP."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef, EnumDef, NumberDef
@ -25,7 +24,7 @@ class CreateKarmaROP(plugin.HoudiniCreator):
instance = super(CreateKarmaROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache alembics."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef
@ -22,7 +21,7 @@ class CreateMantraIFD(plugin.HoudiniCreator):
instance = super(CreateMantraIFD, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Mantra ROP."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import EnumDef, BoolDef
@ -28,7 +27,7 @@ class CreateMantraROP(plugin.HoudiniCreator):
instance = super(CreateMantraROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USDs."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
import hou
@ -22,7 +21,7 @@ class CreateUSD(plugin.HoudiniCreator):
instance = super(CreateUSD, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USD renders."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
class CreateUSDRender(plugin.HoudiniCreator):
@ -23,7 +22,7 @@ class CreateUSDRender(plugin.HoudiniCreator):
instance = super(CreateUSDRender, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating VDB Caches."""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance
from ayon_core.lib import BoolDef
import hou
@ -26,7 +25,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
instance = super(CreateVDBCache, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
file_path = "{}{}".format(

View file

@ -3,7 +3,7 @@
import hou
from ayon_core.hosts.houdini.api import plugin
from ayon_core.pipeline import CreatedInstance, CreatorError
from ayon_core.pipeline import CreatorError
from ayon_core.lib import EnumDef, BoolDef
@ -31,7 +31,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
instance = super(CreateVrayROP, self).create(
product_name,
instance_data,
pre_create_data) # type: CreatedInstance
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))

View file

@ -1,9 +1,21 @@
from collections import deque
import pyblish.api
from ayon_core.pipeline import registered_host
def collect_input_containers(nodes):
def get_container_members(container):
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
return members
def collect_input_containers(containers, nodes):
"""Collect containers that contain any of the node in `nodes`.
This will return any loaded Avalon container that contains at least one of
@ -11,30 +23,13 @@ def collect_input_containers(nodes):
there are member nodes of that container.
Returns:
list: Input avalon containers
list: Loaded containers that contain the `nodes`
"""
# Lookup by node ids
lookup = frozenset(nodes)
containers = []
host = registered_host()
for container in host.ls():
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
# If there's an intersection
if not lookup.isdisjoint(members):
containers.append(container)
return containers
# Assume the containers have collected their cached '_members' data
# in the collector.
return [container for container in containers
if any(node in container["_members"] for node in nodes)]
def iter_upstream(node):
@ -54,7 +49,7 @@ def iter_upstream(node):
)
# Initialize process queue with the node's ancestors itself
queue = list(upstream)
queue = deque(upstream)
collected = set(upstream)
# Traverse upstream references for all nodes and yield them as we
@ -72,6 +67,10 @@ def iter_upstream(node):
# Include the references' ancestors that have not been collected yet.
for reference in references:
if reference in collected:
# Might have been collected in previous iteration
continue
ancestors = reference.inputAncestors(
include_ref_inputs=True, follow_subnets=True
)
@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
)
return
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# For large scenes the querying of "host.ls()" can be relatively slow
# e.g. up to a second. Many instances calling it easily slows this
# down. As such, we cache it so we trigger it only once.
# todo: Instead of hidden cache make "CollectContainers" plug-in
cache_key = "__cache_containers"
scene_containers = instance.context.data.get(cache_key, None)
if scene_containers is None:
# Query the scenes' containers if there's no cache yet
host = registered_host()
scene_containers = list(host.ls())
for container in scene_containers:
# Embed the members into the container dictionary
container_members = set(get_container_members(container))
container["_members"] = container_members
instance.context.data[cache_key] = scene_containers
# Collect containers for the given set of nodes
containers = collect_input_containers(nodes)
inputs = []
if scene_containers:
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# Collect containers for the given set of nodes
containers = collect_input_containers(scene_containers, nodes)
inputs = [c["representation"] for c in containers]
inputs = [c["representation"] for c in containers]
instance.data["inputRepresentations"] = inputs
self.log.debug("Collected inputs: %s" % inputs)

View file

@ -3,7 +3,6 @@ import pyblish.api
from ayon_core.lib import version_up
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.publish import get_errored_plugins_from_context
from ayon_core.hosts.houdini.api import HoudiniHost
from ayon_core.pipeline.publish import KnownPublishError
@ -39,7 +38,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
)
# Filename must not have changed since collecting
host = registered_host() # type: HoudiniHost
host = registered_host()
current_file = host.current_file()
if context.data["currentFile"] != current_file:
raise KnownPublishError(

View file

@ -2,8 +2,6 @@
"""Tools to work with FBX."""
import logging
from pyblish.api import Instance
from maya import cmds # noqa
import maya.mel as mel # noqa
from ayon_core.hosts.maya.api.lib import maintained_selection
@ -146,7 +144,6 @@ class FBXExtractor:
return options
def set_options_from_instance(self, instance):
# type: (Instance) -> None
"""Sets FBX export options from data in the instance.
Args:

View file

@ -1917,6 +1917,29 @@ def apply_attributes(attributes, nodes_by_id):
set_attribute(attr, value, node)
def is_valid_reference_node(reference_node):
"""Return whether Maya considers the reference node a valid reference.
Maya might report an error when using `maya.cmds.referenceQuery`:
Reference node 'reference_node' is not associated with a reference file.
Note that this does *not* check whether the reference node points to an
existing file. Instead it only returns whether maya considers it valid
and thus is not an unassociated reference node
Arguments:
reference_node (str): Reference node name
Returns:
bool: Whether reference node is a valid reference
"""
sel = OpenMaya.MSelectionList()
sel.add(reference_node)
depend_node = sel.getDependNode(0)
return OpenMaya.MFnReference(depend_node).isValidReference()
def get_container_members(container):
"""Returns the members of a container.
This includes the nodes from any loaded references in the container.
@ -1942,7 +1965,16 @@ def get_container_members(container):
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
continue
reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True)
try:
reference_members = cmds.referenceQuery(ref,
nodes=True,
dagPath=True)
except RuntimeError:
# Ignore reference nodes that are not associated with a
# referenced file on which `referenceQuery` command fails
if not is_valid_reference_node(ref):
continue
raise
reference_members = cmds.ls(reference_members,
long=True,
objectsOnly=True)
@ -4238,6 +4270,9 @@ def get_reference_node(members, log=None):
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
continue
if not is_valid_reference_node(ref):
continue
references.add(ref)
assert references, "No reference node found in container"
@ -4268,15 +4303,19 @@ def get_reference_node_parents(ref):
list: The upstream parent reference nodes.
"""
parent = cmds.referenceQuery(ref,
referenceNode=True,
parent=True)
def _get_parent(reference_node):
"""Return parent reference node, but ignore invalid reference nodes"""
if not is_valid_reference_node(reference_node):
return
return cmds.referenceQuery(reference_node,
referenceNode=True,
parent=True)
parent = _get_parent(ref)
parents = []
while parent:
parents.append(parent)
parent = cmds.referenceQuery(parent,
referenceNode=True,
parent=True)
parent = _get_parent(parent)
return parents

View file

@ -299,4 +299,10 @@ def transfer_image_planes(source_cameras, target_cameras,
def _attach_image_plane(camera, image_plane):
cmds.imagePlane(image_plane, edit=True, detach=True)
# Attaching to a camera resets it to identity size, so we counter that
size_x = cmds.getAttr(f"{image_plane}.sizeX")
size_y = cmds.getAttr(f"{image_plane}.sizeY")
cmds.imagePlane(image_plane, edit=True, camera=camera)
cmds.setAttr(f"{image_plane}.sizeX", size_x)
cmds.setAttr(f"{image_plane}.sizeY", size_y)

View file

@ -2,7 +2,6 @@ from maya import cmds
import pyblish.api
from ayon_core.pipeline.publish import (
ValidateContentsOrder,
RepairContextAction,
PublishValidationError
)

View file

@ -1,17 +1,27 @@
import inspect
import uuid
from collections import defaultdict
import pyblish.api
import ayon_core.hosts.maya.api.action
from ayon_core.hosts.maya.api import lib
from ayon_core.pipeline.publish import (
OptionalPyblishPluginMixin, PublishValidationError, ValidatePipelineOrder)
from ayon_api import get_folders
def is_valid_uuid(value) -> bool:
"""Return whether value is a valid UUID"""
try:
uuid.UUID(value)
except ValueError:
return False
return True
class ValidateNodeIDsRelated(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate nodes have a related Colorbleed Id to the
instance.data[folderPath]
"""
"""Validate nodes have a related `cbId` to the instance.data[folderPath]"""
order = ValidatePipelineOrder
label = 'Node Ids Related (ID)'
@ -39,21 +49,24 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin,
# Ensure all nodes have a cbId
invalid = self.get_invalid(instance)
if invalid:
invalid_list = "\n".join(f"- {node}" for node in sorted(invalid))
raise PublishValidationError((
"Nodes IDs found that are not related to folder '{}' : {}"
).format(
instance.data["folderPath"], invalid
))
"Nodes IDs found that are not related to folder '{}':\n{}"
).format(instance.data["folderPath"], invalid_list),
description=self.get_description()
)
@classmethod
def get_invalid(cls, instance):
"""Return the member nodes that are invalid"""
invalid = list()
folder_id = instance.data["folderEntity"]["id"]
# We do want to check the referenced nodes as we it might be
# We do want to check the referenced nodes as it might be
# part of the end product
invalid = list()
nodes_by_other_folder_ids = defaultdict(set)
for node in instance:
_id = lib.get_id(node)
if not _id:
@ -62,5 +75,48 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin,
node_folder_id = _id.split(":", 1)[0]
if node_folder_id != folder_id:
invalid.append(node)
nodes_by_other_folder_ids[node_folder_id].add(node)
# Log what other assets were found.
if nodes_by_other_folder_ids:
project_name = instance.context.data["projectName"]
other_folder_ids = set(nodes_by_other_folder_ids.keys())
# Remove folder ids that are not valid UUID identifiers, these
# may be legacy OpenPype ids
other_folder_ids = {folder_id for folder_id in other_folder_ids
if is_valid_uuid(folder_id)}
if not other_folder_ids:
return invalid
folder_entities = get_folders(project_name=project_name,
folder_ids=other_folder_ids,
fields=["path"])
if folder_entities:
# Log names of other assets detected
# We disregard logging nodes/ids for asset ids where no asset
# was found in the database because ValidateNodeIdsInDatabase
# takes care of that.
folder_paths = {entity["path"] for entity in folder_entities}
cls.log.error(
"Found nodes related to other folders:\n{}".format(
"\n".join(f"- {path}" for path in sorted(folder_paths))
)
)
return invalid
@staticmethod
def get_description():
return inspect.cleandoc("""### Node IDs must match folder id
The node ids must match the folder entity id you are publishing to.
Usually these mismatch occurs if you are re-using nodes from another
folder or project.
#### How to repair?
The repair action will regenerate new ids for
the invalid nodes to match the instance's folder.
""")

View file

@ -46,6 +46,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin):
raise PublishValidationError(
"Maya workspace is not set correctly.\n\n"
f"Current workfile `{scene_name}` is not inside the "
"current Maya project root directory `{root_dir}`.\n\n"
f"current Maya project root directory `{root_dir}`.\n\n"
"Please use Workfile app to re-save."
)

View file

@ -5,7 +5,7 @@ import sys
import six
import random
import string
from collections import OrderedDict, defaultdict
from collections import defaultdict
from ayon_core.settings import get_current_project_settings
from ayon_core.lib import (

View file

@ -144,7 +144,8 @@ class CreateTextures(Creator):
9: "512",
10: "1024",
11: "2048",
12: "4096"
12: "4096",
13: "8192"
},
default=None,
label="Size"),

View file

@ -1,6 +1,5 @@
import os
from ayon_core.lib import StringTemplate
from ayon_core.pipeline import (
registered_host,
get_current_context,
@ -111,8 +110,6 @@ class LoadWorkfile(plugin.Loader):
data["version"] = version
filename = StringTemplate.format_strict_template(
file_template, data
)
filename = work_template["file"].format_strict(data)
path = os.path.join(work_root, filename)
host.save_workfile(path)

View file

@ -94,8 +94,12 @@ def prepare_template_data(fill_pairs):
output = {}
for item in valid_items:
keys, value = item
upper_value = value.upper()
capitalized_value = _capitalize_value(value)
# Convert only string values
if isinstance(value, str):
upper_value = value.upper()
capitalized_value = _capitalize_value(value)
else:
upper_value = capitalized_value = value
first_key = keys.pop(0)
if not keys:

View file

@ -103,17 +103,17 @@ class FusionSubmitDeadline(
# Collect all saver instances in context that are to be rendered
saver_instances = []
for instance in context:
if instance.data["productType"] != "render":
for inst in context:
if inst.data["productType"] != "render":
# Allow only saver family instances
continue
if not instance.data.get("publish", True):
if not inst.data.get("publish", True):
# Skip inactive instances
continue
self.log.debug(instance.data["name"])
saver_instances.append(instance)
self.log.debug(inst.data["name"])
saver_instances.append(inst)
if not saver_instances:
raise RuntimeError("No instances found for Deadline submission")

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
"""Wrapper around Royal Render API."""
import sys
import os
import sys
from ayon_core.lib.local_settings import AYONSettingsRegistry
from ayon_core.lib import Logger, run_subprocess
from .rr_job import RRJob, SubmitFile, SubmitterParameter
from ayon_core.lib import Logger, run_subprocess, AYONSettingsRegistry
from ayon_core.lib.vendor_bin_utils import find_tool_in_custom_paths
from .rr_job import SubmitFile
from .rr_job import RRjob, SubmitterParameter # noqa F401
class Api:

View file

@ -3,7 +3,6 @@
import os
import attr
import json
import re
import pyblish.api

View file

@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy):
)
else:
# Ask sync server to get roots overrides
roots_overrides = sitesync.get_site_root_overrides(
roots_overrides = sitesync_addon.get_site_root_overrides(
project_name, site_name
)
site_cache.update_data(roots_overrides)

View file

@ -14,7 +14,6 @@ from .exceptions import (
TemplateMissingKey,
AnatomyTemplateUnsolved,
)
from .roots import RootItem
_PLACEHOLDER = object()

View file

@ -1,7 +1,6 @@
"""Core pipeline functionality"""
import os
import types
import logging
import platform
import uuid
@ -21,7 +20,6 @@ from .anatomy import Anatomy
from .template_data import get_template_data_with_names
from .workfile import (
get_workdir,
get_workfile_template_key,
get_custom_workfile_template_by_string_context,
)
from . import (

View file

@ -6,13 +6,11 @@ from copy import deepcopy
import attr
import ayon_api
import pyblish.api
import clique
from ayon_core.pipeline import (
get_current_project_name,
get_representation_path,
Anatomy,
)
from ayon_core.lib import Logger
from ayon_core.pipeline.publish import KnownPublishError
@ -137,7 +135,7 @@ def get_transferable_representations(instance):
list of dicts: List of transferable representations.
"""
anatomy = instance.context.data["anatomy"] # type: Anatomy
anatomy = instance.context.data["anatomy"]
to_transfer = []
for representation in instance.data.get("representations", []):
@ -166,7 +164,6 @@ def get_transferable_representations(instance):
def create_skeleton_instance(
instance, families_transfer=None, instance_transfer=None):
# type: (pyblish.api.Instance, list, dict) -> dict
"""Create skeleton instance from original instance data.
This will create dictionary containing skeleton
@ -191,7 +188,7 @@ def create_skeleton_instance(
context = instance.context
data = instance.data.copy()
anatomy = instance.context.data["anatomy"] # type: Anatomy
anatomy = instance.context.data["anatomy"]
# get time related data from instance (or context)
time_data = get_time_data_from_instance_or_context(instance)
@ -620,15 +617,32 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
aov_patterns = aov_filter
preview = match_aov_pattern(app, aov_patterns, render_file_name)
# toggle preview on if multipart is on
if instance.data.get("multipartExr"):
log.debug("Adding preview tag because its multipartExr")
preview = True
new_instance = deepcopy(skeleton)
new_instance["productName"] = product_name
new_instance["productGroup"] = group_name
# toggle preview on if multipart is on
# Because we cant query the multipartExr data member of each AOV we'll
# need to have hardcoded rule of excluding any renders with
# "cryptomatte" in the file name from being a multipart EXR. This issue
# happens with Redshift that forces Cryptomatte renders to be separate
# files even when the rest of the AOVs are merged into a single EXR.
# There might be an edge case where the main instance has cryptomatte
# in the name even though it's a multipart EXR.
if instance.data.get("renderer") == "redshift":
if (
instance.data.get("multipartExr") and
"cryptomatte" not in render_file_name.lower()
):
log.debug("Adding preview tag because it's multipartExr")
preview = True
else:
new_instance["multipartExr"] = False
elif instance.data.get("multipartExr"):
log.debug("Adding preview tag because its multipartExr")
preview = True
# explicitly disable review by user
preview = preview and not do_not_add_review
if preview:
@ -751,7 +765,6 @@ def get_resources(project_name, version_entity, extension=None):
def create_skeleton_instance_cache(instance):
# type: (pyblish.api.Instance, list, dict) -> dict
"""Create skeleton instance from original instance data.
This will create dictionary containing skeleton
@ -771,7 +784,7 @@ def create_skeleton_instance_cache(instance):
context = instance.context
data = instance.data.copy()
anatomy = instance.context.data["anatomy"] # type: Anatomy
anatomy = instance.context.data["anatomy"]
# get time related data from instance (or context)
time_data = get_time_data_from_instance_or_context(instance)
@ -1005,7 +1018,7 @@ def copy_extend_frames(instance, representation):
start = instance.data.get("frameStart")
end = instance.data.get("frameEnd")
project_name = instance.context.data["project"]
anatomy = instance.context.data["anatomy"] # type: Anatomy
anatomy = instance.context.data["anatomy"]
folder_entity = ayon_api.get_folder_by_path(
project_name, instance.data.get("folderPath")

View file

@ -81,6 +81,9 @@ class RenderInstance(object):
outputDir = attr.ib(default=None)
context = attr.ib(default=None)
# The source instance the data of this render instance should merge into
source_instance = attr.ib(default=None, type=pyblish.api.Instance)
@frameStart.validator
def check_frame_start(self, _, value):
"""Validate if frame start is not larger then end."""
@ -214,8 +217,11 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
data = self.add_additional_data(data)
render_instance_dict = attr.asdict(render_instance)
instance = context.create_instance(render_instance.name)
instance.data["label"] = render_instance.label
# Merge into source instance if provided, otherwise create instance
instance = render_instance_dict.pop("source_instance", None)
if instance is None:
instance = context.create_instance(render_instance.name)
instance.data.update(render_instance_dict)
instance.data.update(data)

View file

@ -13,7 +13,6 @@ Resources:
"""
import os
import re
import json
import logging

View file

@ -3,8 +3,6 @@ import platform
import subprocess
from string import Formatter
import ayon_api
from ayon_core.pipeline import (
Anatomy,
LauncherAction,

View file

@ -284,7 +284,13 @@ class ProductsModel(QtGui.QStandardItemModel):
model_item.setData(label, QtCore.Qt.DisplayRole)
return model_item
def _set_version_data_to_product_item(self, model_item, version_item):
def _set_version_data_to_product_item(
self,
model_item,
version_item,
repre_count_by_version_id=None,
sync_availability_by_version_id=None,
):
"""
Args:
@ -292,6 +298,10 @@ class ProductsModel(QtGui.QStandardItemModel):
from version item.
version_item (VersionItem): Item from entities model with
information about version.
repre_count_by_version_id (Optional[str, int]): Mapping of
representation count by version id.
sync_availability_by_version_id (Optional[str, Tuple[int, int]]):
Mapping of sync availability by version id.
"""
model_item.setData(version_item.version_id, VERSION_ID_ROLE)
@ -312,12 +322,20 @@ class ProductsModel(QtGui.QStandardItemModel):
# TODO call site sync methods for all versions at once
project_name = self._last_project_name
version_id = version_item.version_id
repre_count = self._controller.get_versions_representation_count(
project_name, [version_id]
)[version_id]
active, remote = self._controller.get_version_sync_availability(
project_name, [version_id]
)[version_id]
if repre_count_by_version_id is None:
repre_count_by_version_id = (
self._controller.get_versions_representation_count(
project_name, [version_id]
)
)
if sync_availability_by_version_id is None:
sync_availability_by_version_id = (
self._controller.get_version_sync_availability(
project_name, [version_id]
)
)
repre_count = repre_count_by_version_id[version_id]
active, remote = sync_availability_by_version_id[version_id]
model_item.setData(repre_count, REPRESENTATIONS_COUNT_ROLE)
model_item.setData(active, SYNC_ACTIVE_SITE_AVAILABILITY)
@ -327,7 +345,9 @@ class ProductsModel(QtGui.QStandardItemModel):
self,
product_item,
active_site_icon,
remote_site_icon
remote_site_icon,
repre_count_by_version_id,
sync_availability_by_version_id,
):
model_item = self._items_by_id.get(product_item.product_id)
versions = list(product_item.version_items.values())
@ -357,7 +377,12 @@ class ProductsModel(QtGui.QStandardItemModel):
model_item.setData(active_site_icon, ACTIVE_SITE_ICON_ROLE)
model_item.setData(remote_site_icon, REMOTE_SITE_ICON_ROLE)
self._set_version_data_to_product_item(model_item, last_version)
self._set_version_data_to_product_item(
model_item,
last_version,
repre_count_by_version_id,
sync_availability_by_version_id,
)
return model_item
def get_last_project_name(self):
@ -387,6 +412,24 @@ class ProductsModel(QtGui.QStandardItemModel):
product_item.product_id: product_item
for product_item in product_items
}
last_version_id_by_product_id = {}
for product_item in product_items:
versions = list(product_item.version_items.values())
versions.sort()
last_version = versions[-1]
last_version_id_by_product_id[product_item.product_id] = (
last_version.version_id
)
version_ids = set(last_version_id_by_product_id.values())
repre_count_by_version_id = self._controller.get_versions_representation_count(
project_name, version_ids
)
sync_availability_by_version_id = (
self._controller.get_version_sync_availability(
project_name, version_ids
)
)
# Prepare product groups
product_name_matches_by_group = collections.defaultdict(dict)
@ -443,6 +486,8 @@ class ProductsModel(QtGui.QStandardItemModel):
product_item,
active_site_icon,
remote_site_icon,
repre_count_by_version_id,
sync_availability_by_version_id,
)
new_items.append(item)
@ -463,6 +508,8 @@ class ProductsModel(QtGui.QStandardItemModel):
product_item,
active_site_icon,
remote_site_icon,
repre_count_by_version_id,
sync_availability_by_version_id,
)
new_merged_items.append(item)
merged_product_types.add(product_item.product_type)

View file

@ -0,0 +1,3 @@
name = "applications"
title = "Applications"
version = "0.2.0"

View file

@ -6,7 +6,6 @@ from ayon_server.addons import BaseServerAddon, AddonLibrary
from ayon_server.entities.core import attribute_library
from ayon_server.lib.postgres import Postgres
from .version import __version__
from .settings import ApplicationsAddonSettings, DEFAULT_VALUES
try:
@ -87,9 +86,6 @@ def get_enum_items_from_groups(groups):
class ApplicationsAddon(BaseServerAddon):
name = "applications"
title = "Applications"
version = __version__
settings_model = ApplicationsAddonSettings
async def get_default_settings(self):

View file

@ -1 +0,0 @@
__version__ = "0.1.9"

View file

@ -4,6 +4,8 @@ import re
import shutil
import argparse
import zipfile
import types
import importlib
import platform
import collections
from pathlib import Path
@ -44,6 +46,11 @@ version = "{addon_version}"
plugin_for = ["ayon_server"]
"""
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "{}"
'''
class ZipFileLongPaths(zipfile.ZipFile):
"""Allows longer paths in zip files.
@ -175,13 +182,75 @@ def create_addon_zip(
shutil.rmtree(str(output_dir / addon_name))
def prepare_client_code(
addon_dir: Path,
addon_output_dir: Path,
addon_version: str
):
client_dir = addon_dir / "client"
if not client_dir.exists():
return
# Prepare private dir in output
private_dir = addon_output_dir / "private"
private_dir.mkdir(parents=True, exist_ok=True)
# Copy pyproject toml if available
pyproject_toml = client_dir / "pyproject.toml"
if pyproject_toml.exists():
shutil.copy(pyproject_toml, private_dir)
for subpath in client_dir.iterdir():
if subpath.name == "pyproject.toml":
continue
if subpath.is_file():
continue
# Update version.py with server version if 'version.py' is available
version_path = subpath / "version.py"
if version_path.exists():
with open(version_path, "w") as stream:
stream.write(CLIENT_VERSION_CONTENT.format(addon_version))
zip_filepath = private_dir / "client.zip"
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add client code content to zip
for path, sub_path in find_files_in_subdir(str(subpath)):
sub_path = os.path.join(subpath.name, sub_path)
zipf.write(path, sub_path)
def import_filepath(path: Path, module_name: Optional[str] = None):
if not module_name:
module_name = os.path.splitext(path.name)[0]
# Convert to string
path = str(path)
module = types.ModuleType(module_name)
module.__file__ = path
# Use loader so module has full specs
module_loader = importlib.machinery.SourceFileLoader(
module_name, path
)
module_loader.exec_module(module)
return module
def create_addon_package(
addon_dir: Path,
output_dir: Path,
create_zip: bool,
keep_source: bool,
):
addon_version = get_addon_version(addon_dir)
src_package_py = addon_dir / "package.py"
package = None
if src_package_py.exists():
package = import_filepath(src_package_py)
addon_version = package.version
else:
addon_version = get_addon_version(addon_dir)
addon_output_dir = output_dir / addon_dir.name / addon_version
if addon_output_dir.exists():
@ -189,22 +258,27 @@ def create_addon_package(
addon_output_dir.mkdir(parents=True)
# Copy server content
package_py = addon_output_dir / "package.py"
addon_name = addon_dir.name
if addon_name == "royal_render":
addon_name = "royalrender"
package_py_content = PACKAGE_PY_TEMPLATE.format(
addon_name=addon_name, addon_version=addon_version
)
dst_package_py = addon_output_dir / "package.py"
if package is not None:
shutil.copy(src_package_py, dst_package_py)
else:
addon_name = addon_dir.name
if addon_name == "royal_render":
addon_name = "royalrender"
package_py_content = PACKAGE_PY_TEMPLATE.format(
addon_name=addon_name, addon_version=addon_version
)
with open(package_py, "w+") as pkg_py:
pkg_py.write(package_py_content)
with open(dst_package_py, "w+") as pkg_py:
pkg_py.write(package_py_content)
server_dir = addon_dir / "server"
shutil.copytree(
server_dir, addon_output_dir / "server", dirs_exist_ok=True
)
prepare_client_code(addon_dir, addon_output_dir, addon_version)
if create_zip:
create_addon_zip(
output_dir, addon_dir.name, addon_version, keep_source

View file

@ -1,3 +1,4 @@
from typing import TYPE_CHECKING
from pydantic import validator
from ayon_server.settings import (
@ -5,6 +6,8 @@ from ayon_server.settings import (
SettingsField,
ensure_unique_names,
)
if TYPE_CHECKING:
from ayon_server.addons import BaseServerAddon
from .publish_plugins import (
PublishPluginsModel,
@ -19,7 +22,7 @@ class ServerListSubmodel(BaseSettingsModel):
async def defined_deadline_ws_name_enum_resolver(
addon: "BaseServerAddon",
addon: BaseServerAddon,
settings_variant: str = "production",
project_name: str | None = None,
) -> list[str]:

View file

@ -1,5 +1,5 @@
from ayon_server.settings import BaseSettingsModel, SettingsField
from ayon_server.types import ColorRGB_float, ColorRGBA_uint8
from ayon_server.types import ColorRGBA_uint8
class LoaderEnabledModel(BaseSettingsModel):

View file

@ -6,7 +6,7 @@ from ayon_server.settings import (
ensure_unique_names,
task_types_enum,
)
from ayon_server.types import ColorRGBA_uint8, ColorRGB_float
from ayon_server.types import ColorRGBA_uint8
def hardware_falloff_enum():

View file

@ -1,5 +1,5 @@
from ayon_server.settings import BaseSettingsModel, SettingsField
from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8
from ayon_server.types import ColorRGBA_uint8
class CollectRenderInstancesModel(BaseSettingsModel):