mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
33207799a5
40 changed files with 1861 additions and 195 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,7 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.17.3-nightly.2
|
||||
- 3.17.3-nightly.1
|
||||
- 3.17.2
|
||||
- 3.17.2-nightly.4
|
||||
|
|
@ -134,7 +135,6 @@ body:
|
|||
- 3.14.11-nightly.4
|
||||
- 3.14.11-nightly.3
|
||||
- 3.14.11-nightly.2
|
||||
- 3.14.11-nightly.1
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -13,12 +13,19 @@ import six
|
|||
from openpype.lib import StringTemplate
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.settings import get_current_project_settings
|
||||
from openpype.pipeline import get_current_project_name, get_current_asset_name
|
||||
from openpype.pipeline import (
|
||||
get_current_project_name,
|
||||
get_current_asset_name,
|
||||
registered_host
|
||||
)
|
||||
from openpype.pipeline.context_tools import (
|
||||
get_current_context_template_data,
|
||||
get_current_project_asset
|
||||
)
|
||||
from openpype.widgets import popup
|
||||
from openpype.tools.utils.host_tools import get_tool_by_name
|
||||
from openpype.pipeline.create import CreateContext
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
|
|
@ -325,52 +332,61 @@ def imprint(node, data, update=False):
|
|||
return
|
||||
|
||||
current_parms = {p.name(): p for p in node.spareParms()}
|
||||
update_parms = []
|
||||
templates = []
|
||||
update_parm_templates = []
|
||||
new_parm_templates = []
|
||||
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
parm = get_template_from_value(key, value)
|
||||
parm_template = get_template_from_value(key, value)
|
||||
|
||||
if key in current_parms:
|
||||
if node.evalParm(key) == data[key]:
|
||||
if node.evalParm(key) == value:
|
||||
continue
|
||||
if not update:
|
||||
log.debug(f"{key} already exists on {node}")
|
||||
else:
|
||||
log.debug(f"replacing {key}")
|
||||
update_parms.append(parm)
|
||||
update_parm_templates.append(parm_template)
|
||||
continue
|
||||
|
||||
templates.append(parm)
|
||||
new_parm_templates.append(parm_template)
|
||||
|
||||
parm_group = node.parmTemplateGroup()
|
||||
parm_folder = parm_group.findFolder("Extra")
|
||||
|
||||
# if folder doesn't exist yet, create one and append to it,
|
||||
# else append to existing one
|
||||
if not parm_folder:
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Extra")
|
||||
parm_folder.setParmTemplates(templates)
|
||||
parm_group.append(parm_folder)
|
||||
else:
|
||||
for template in templates:
|
||||
parm_group.appendToFolder(parm_folder, template)
|
||||
# this is needed because the pointer to folder
|
||||
# is for some reason lost every call to `appendToFolder()`
|
||||
parm_folder = parm_group.findFolder("Extra")
|
||||
|
||||
node.setParmTemplateGroup(parm_group)
|
||||
|
||||
# TODO: Updating is done here, by calling probably deprecated functions.
|
||||
# This needs to be addressed in the future.
|
||||
if not update_parms:
|
||||
if not new_parm_templates and not update_parm_templates:
|
||||
return
|
||||
|
||||
for parm in update_parms:
|
||||
node.replaceSpareParmTuple(parm.name(), parm)
|
||||
parm_group = node.parmTemplateGroup()
|
||||
|
||||
# Add new parm templates
|
||||
if new_parm_templates:
|
||||
parm_folder = parm_group.findFolder("Extra")
|
||||
|
||||
# if folder doesn't exist yet, create one and append to it,
|
||||
# else append to existing one
|
||||
if not parm_folder:
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Extra")
|
||||
parm_folder.setParmTemplates(new_parm_templates)
|
||||
parm_group.append(parm_folder)
|
||||
else:
|
||||
# Add to parm template folder instance then replace with updated
|
||||
# one in parm template group
|
||||
for template in new_parm_templates:
|
||||
parm_folder.addParmTemplate(template)
|
||||
parm_group.replace(parm_folder.name(), parm_folder)
|
||||
|
||||
# Update existing parm templates
|
||||
for parm_template in update_parm_templates:
|
||||
parm_group.replace(parm_template.name(), parm_template)
|
||||
|
||||
# When replacing a parm with a parm of the same name it preserves its
|
||||
# value if before the replacement the parm was not at the default,
|
||||
# because it has a value override set. Since we're trying to update the
|
||||
# parm by using the new value as `default` we enforce the parm is at
|
||||
# default state
|
||||
node.parm(parm_template.name()).revertToDefaults()
|
||||
|
||||
node.setParmTemplateGroup(parm_group)
|
||||
|
||||
|
||||
def lsattr(attr, value=None, root="/"):
|
||||
|
|
@ -847,3 +863,97 @@ def update_houdini_vars_context_dialog():
|
|||
dialog.on_clicked.connect(update_houdini_vars_context)
|
||||
|
||||
dialog.show()
|
||||
|
||||
|
||||
def publisher_show_and_publish(comment=None):
|
||||
"""Open publisher window and trigger publishing action.
|
||||
|
||||
Args:
|
||||
comment (Optional[str]): Comment to set in publisher window.
|
||||
"""
|
||||
|
||||
main_window = get_main_window()
|
||||
publisher_window = get_tool_by_name(
|
||||
tool_name="publisher",
|
||||
parent=main_window,
|
||||
)
|
||||
publisher_window.show_and_publish(comment)
|
||||
|
||||
|
||||
def find_rop_input_dependencies(input_tuple):
|
||||
"""Self publish from ROP nodes.
|
||||
|
||||
Arguments:
|
||||
tuple (hou.RopNode.inputDependencies) which can be a nested tuples
|
||||
represents the input dependencies of the ROP node, consisting of ROPs,
|
||||
and the frames that need to be be rendered prior to rendering the ROP.
|
||||
|
||||
Returns:
|
||||
list of the RopNode.path() that can be found inside
|
||||
the input tuple.
|
||||
"""
|
||||
|
||||
out_list = []
|
||||
if isinstance(input_tuple[0], hou.RopNode):
|
||||
return input_tuple[0].path()
|
||||
|
||||
if isinstance(input_tuple[0], tuple):
|
||||
for item in input_tuple:
|
||||
out_list.append(find_rop_input_dependencies(item))
|
||||
|
||||
return out_list
|
||||
|
||||
|
||||
def self_publish():
|
||||
"""Self publish from ROP nodes.
|
||||
|
||||
Firstly, it gets the node and its dependencies.
|
||||
Then, it deactivates all other ROPs
|
||||
And finaly, it triggers the publishing action.
|
||||
"""
|
||||
|
||||
result, comment = hou.ui.readInput(
|
||||
"Add Publish Comment",
|
||||
buttons=("Publish", "Cancel"),
|
||||
title="Publish comment",
|
||||
close_choice=1
|
||||
)
|
||||
|
||||
if result:
|
||||
return
|
||||
|
||||
current_node = hou.node(".")
|
||||
inputs_paths = find_rop_input_dependencies(
|
||||
current_node.inputDependencies()
|
||||
)
|
||||
inputs_paths.append(current_node.path())
|
||||
|
||||
host = registered_host()
|
||||
context = CreateContext(host, reset=True)
|
||||
|
||||
for instance in context.instances:
|
||||
node_path = instance.data.get("instance_node")
|
||||
instance["active"] = node_path and node_path in inputs_paths
|
||||
|
||||
context.save_changes()
|
||||
|
||||
publisher_show_and_publish(comment)
|
||||
|
||||
|
||||
def add_self_publish_button(node):
|
||||
"""Adds a self publish button to the rop node."""
|
||||
|
||||
label = os.environ.get("AVALON_LABEL") or "OpenPype"
|
||||
|
||||
button_parm = hou.ButtonParmTemplate(
|
||||
"ayon_self_publish",
|
||||
"{} Publish".format(label),
|
||||
script_callback="from openpype.hosts.houdini.api.lib import "
|
||||
"self_publish; self_publish()",
|
||||
script_callback_language=hou.scriptLanguage.Python,
|
||||
join_with_next=True
|
||||
)
|
||||
|
||||
template = node.parmTemplateGroup()
|
||||
template.insertBefore((0,), button_parm)
|
||||
node.setParmTemplateGroup(template)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from openpype.pipeline import (
|
|||
CreatedInstance
|
||||
)
|
||||
from openpype.lib import BoolDef
|
||||
from .lib import imprint, read, lsattr
|
||||
from .lib import imprint, read, lsattr, add_self_publish_button
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
|
|
@ -168,6 +168,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
"""Base class for most of the Houdini creator plugins."""
|
||||
selected_nodes = []
|
||||
settings_name = None
|
||||
add_publish_button = False
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
try:
|
||||
|
|
@ -195,6 +196,10 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
self)
|
||||
self._add_instance_to_context(instance)
|
||||
self.imprint(instance_node, instance.data_to_store())
|
||||
|
||||
if self.add_publish_button:
|
||||
add_self_publish_button(instance_node)
|
||||
|
||||
return instance
|
||||
|
||||
except hou.Error as er:
|
||||
|
|
@ -245,6 +250,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
# Update parm templates and values
|
||||
self.imprint(
|
||||
instance_node,
|
||||
new_values,
|
||||
|
|
@ -316,6 +322,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
# Apply General Settings
|
||||
houdini_general_settings = project_settings["houdini"]["general"]
|
||||
self.add_publish_button = houdini_general_settings.get(
|
||||
"add_self_publish_button", False)
|
||||
|
||||
# Apply Creator Settings
|
||||
settings_name = self.settings_name
|
||||
if settings_name is None:
|
||||
settings_name = self.__class__.__name__
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ import platform
|
|||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import get_current_project_name
|
||||
|
||||
from openpype.lib import StringTemplate
|
||||
from openpype.pipeline.context_tools import get_current_context_template_data
|
||||
|
||||
import hou
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini.shelves")
|
||||
|
|
@ -26,10 +29,16 @@ def generate_shelves():
|
|||
log.debug("No custom shelves found in project settings.")
|
||||
return
|
||||
|
||||
# Get Template data
|
||||
template_data = get_current_context_template_data()
|
||||
|
||||
for shelf_set_config in shelves_set_config:
|
||||
shelf_set_filepath = shelf_set_config.get('shelf_set_source_path')
|
||||
shelf_set_os_filepath = shelf_set_filepath[current_os]
|
||||
if shelf_set_os_filepath:
|
||||
shelf_set_os_filepath = get_path_using_template_data(
|
||||
shelf_set_os_filepath, template_data
|
||||
)
|
||||
if not os.path.isfile(shelf_set_os_filepath):
|
||||
log.error("Shelf path doesn't exist - "
|
||||
"{}".format(shelf_set_os_filepath))
|
||||
|
|
@ -81,7 +90,9 @@ def generate_shelves():
|
|||
"script path of the tool.")
|
||||
continue
|
||||
|
||||
tool = get_or_create_tool(tool_definition, shelf)
|
||||
tool = get_or_create_tool(
|
||||
tool_definition, shelf, template_data
|
||||
)
|
||||
|
||||
if not tool:
|
||||
continue
|
||||
|
|
@ -144,7 +155,7 @@ def get_or_create_shelf(shelf_label):
|
|||
return new_shelf
|
||||
|
||||
|
||||
def get_or_create_tool(tool_definition, shelf):
|
||||
def get_or_create_tool(tool_definition, shelf, template_data):
|
||||
"""This function verifies if the tool exists and updates it. If not, creates
|
||||
a new one.
|
||||
|
||||
|
|
@ -162,10 +173,16 @@ def get_or_create_tool(tool_definition, shelf):
|
|||
return
|
||||
|
||||
script_path = tool_definition["script"]
|
||||
script_path = get_path_using_template_data(script_path, template_data)
|
||||
if not script_path or not os.path.exists(script_path):
|
||||
log.warning("This path doesn't exist - {}".format(script_path))
|
||||
return
|
||||
|
||||
icon_path = tool_definition["icon"]
|
||||
if icon_path:
|
||||
icon_path = get_path_using_template_data(icon_path, template_data)
|
||||
tool_definition["icon"] = icon_path
|
||||
|
||||
existing_tools = shelf.tools()
|
||||
existing_tool = next(
|
||||
(tool for tool in existing_tools if tool.label() == tool_label),
|
||||
|
|
@ -184,3 +201,10 @@ def get_or_create_tool(tool_definition, shelf):
|
|||
|
||||
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
|
||||
return hou.shelves.newTool(name=tool_name, **tool_definition)
|
||||
|
||||
|
||||
def get_path_using_template_data(path, template_data):
|
||||
path = StringTemplate.format_template(path, template_data)
|
||||
path = path.replace("\\", "/")
|
||||
|
||||
return path
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from openpype.lib.vendor_bin_utils import find_executable
|
||||
|
|
@ -8,17 +9,31 @@ from openpype.pipeline import load
|
|||
class ShowInUsdview(load.LoaderPlugin):
|
||||
"""Open USD file in usdview"""
|
||||
|
||||
families = ["colorbleed.usd"]
|
||||
label = "Show in usdview"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc"]
|
||||
order = 10
|
||||
representations = ["*"]
|
||||
families = ["*"]
|
||||
extensions = {"usd", "usda", "usdlc", "usdnc", "abc"}
|
||||
order = 15
|
||||
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pathlib import Path
|
||||
|
||||
usdview = find_executable("usdview")
|
||||
if platform.system() == "Windows":
|
||||
executable = "usdview.bat"
|
||||
else:
|
||||
executable = "usdview"
|
||||
|
||||
usdview = find_executable(executable)
|
||||
if not usdview:
|
||||
raise RuntimeError("Unable to find usdview")
|
||||
|
||||
# For some reason Windows can return the path like:
|
||||
# C:/PROGRA~1/SIDEEF~1/HOUDIN~1.435/bin/usdview
|
||||
# convert to resolved path so `subprocess` can take it
|
||||
usdview = str(Path(usdview).resolve().as_posix())
|
||||
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
|
|
@ -30,14 +45,4 @@ class ShowInUsdview(load.LoaderPlugin):
|
|||
|
||||
self.log.info("Start houdini variant of usdview...")
|
||||
|
||||
# For now avoid some pipeline environment variables that initialize
|
||||
# Avalon in Houdini as it is redundant for usdview and slows boot time
|
||||
env = os.environ.copy()
|
||||
env.pop("PYTHONPATH", None)
|
||||
env.pop("HOUDINI_SCRIPT_PATH", None)
|
||||
env.pop("HOUDINI_MENU_PATH", None)
|
||||
|
||||
# Force string to avoid unicode issues
|
||||
env = {str(key): str(value) for key, value in env.items()}
|
||||
|
||||
subprocess.Popen([usdview, filepath, "--renderer", "GL"], env=env)
|
||||
subprocess.Popen([usdview, filepath, "--renderer", "GL"])
|
||||
|
|
|
|||
|
|
@ -2217,7 +2217,6 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
"""
|
||||
# replace path with env var if possible
|
||||
ocio_path = self._replace_ocio_path_with_env_var(config_data)
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
log.info("Setting OCIO config path to: `{}`".format(
|
||||
ocio_path))
|
||||
|
|
|
|||
350
openpype/hosts/nuke/plugins/load/load_ociolook.py
Normal file
350
openpype/hosts/nuke/plugins/load/load_ociolook.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import os
|
||||
import json
|
||||
import secrets
|
||||
import nuke
|
||||
import six
|
||||
|
||||
from openpype.client import (
|
||||
get_version_by_id,
|
||||
get_last_version_by_subset_id
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_current_project_name,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.hosts.nuke.api import (
|
||||
containerise,
|
||||
viewer_update_and_undo_stop,
|
||||
update_container,
|
||||
)
|
||||
|
||||
|
||||
class LoadOcioLookNodes(load.LoaderPlugin):
|
||||
"""Loading Ocio look to the nuke.Node graph"""
|
||||
|
||||
families = ["ociolook"]
|
||||
representations = ["*"]
|
||||
extensions = {"json"}
|
||||
|
||||
label = "Load OcioLook [nodes]"
|
||||
order = 0
|
||||
icon = "cc"
|
||||
color = "white"
|
||||
ignore_attr = ["useLifetime"]
|
||||
|
||||
# plugin attributes
|
||||
current_node_color = "0x4ecd91ff"
|
||||
old_node_color = "0xd88467ff"
|
||||
|
||||
# json file variables
|
||||
schema_version = 1
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): asset name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke.Node: containerized nuke.Node object
|
||||
"""
|
||||
namespace = namespace or context['asset']['name']
|
||||
suffix = secrets.token_hex(nbytes=4)
|
||||
object_name = "{}_{}_{}".format(
|
||||
name, namespace, suffix)
|
||||
|
||||
# getting file path
|
||||
filepath = self.filepath_from_context(context)
|
||||
|
||||
json_f = self._load_json_data(filepath)
|
||||
|
||||
group_node = self._create_group_node(
|
||||
object_name, filepath, json_f["data"])
|
||||
|
||||
self._node_version_color(context["version"], group_node)
|
||||
|
||||
self.log.info(
|
||||
"Loaded lut setup: `{}`".format(group_node["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=group_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data={
|
||||
"objectName": object_name,
|
||||
}
|
||||
)
|
||||
|
||||
def _create_group_node(
|
||||
self,
|
||||
object_name,
|
||||
filepath,
|
||||
data
|
||||
):
|
||||
"""Creates group node with all the nodes inside.
|
||||
|
||||
Creating mainly `OCIOFileTransform` nodes with `OCIOColorSpace` nodes
|
||||
in between - in case those are needed.
|
||||
|
||||
Arguments:
|
||||
object_name (str): name of the group node
|
||||
filepath (str): path to json file
|
||||
data (dict): data from json file
|
||||
|
||||
Returns:
|
||||
nuke.Node: group node with all the nodes inside
|
||||
"""
|
||||
# get corresponding node
|
||||
|
||||
root_working_colorspace = nuke.root()["workingSpaceLUT"].value()
|
||||
|
||||
dir_path = os.path.dirname(filepath)
|
||||
all_files = os.listdir(dir_path)
|
||||
|
||||
ocio_working_colorspace = _colorspace_name_by_type(
|
||||
data["ocioLookWorkingSpace"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
input_node = None
|
||||
output_node = None
|
||||
group_node = nuke.toNode(object_name)
|
||||
if group_node:
|
||||
# remove all nodes between Input and Output nodes
|
||||
for node in group_node.nodes():
|
||||
if node.Class() not in ["Input", "Output"]:
|
||||
nuke.delete(node)
|
||||
elif node.Class() == "Input":
|
||||
input_node = node
|
||||
elif node.Class() == "Output":
|
||||
output_node = node
|
||||
else:
|
||||
group_node = nuke.createNode(
|
||||
"Group",
|
||||
"name {}_1".format(object_name),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# adding content to the group node
|
||||
with group_node:
|
||||
pre_colorspace = root_working_colorspace
|
||||
|
||||
# reusing input node if it exists during update
|
||||
if input_node:
|
||||
pre_node = input_node
|
||||
else:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
# Compare script working colorspace with ocio working colorspace
|
||||
# found in json file and convert to json's if needed
|
||||
if pre_colorspace != ocio_working_colorspace:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
ocio_working_colorspace
|
||||
)
|
||||
pre_colorspace = ocio_working_colorspace
|
||||
|
||||
for ocio_item in data["ocioLookItems"]:
|
||||
input_space = _colorspace_name_by_type(
|
||||
ocio_item["input_colorspace"])
|
||||
output_space = _colorspace_name_by_type(
|
||||
ocio_item["output_colorspace"])
|
||||
|
||||
# making sure we are set to correct colorspace for otio item
|
||||
if pre_colorspace != input_space:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
input_space
|
||||
)
|
||||
|
||||
node = nuke.createNode("OCIOFileTransform")
|
||||
|
||||
# file path from lut representation
|
||||
extension = ocio_item["ext"]
|
||||
item_name = ocio_item["name"]
|
||||
|
||||
item_lut_file = next(
|
||||
(
|
||||
file for file in all_files
|
||||
if file.endswith(extension)
|
||||
),
|
||||
None
|
||||
)
|
||||
if not item_lut_file:
|
||||
raise ValueError(
|
||||
"File with extension '{}' not "
|
||||
"found in directory".format(extension)
|
||||
)
|
||||
|
||||
item_lut_path = os.path.join(
|
||||
dir_path, item_lut_file).replace("\\", "/")
|
||||
node["file"].setValue(item_lut_path)
|
||||
node["name"].setValue(item_name)
|
||||
node["direction"].setValue(ocio_item["direction"])
|
||||
node["interpolation"].setValue(ocio_item["interpolation"])
|
||||
node["working_space"].setValue(input_space)
|
||||
|
||||
pre_node.autoplace()
|
||||
node.setInput(0, pre_node)
|
||||
node.autoplace()
|
||||
# pass output space into pre_colorspace for next iteration
|
||||
# or for output node comparison
|
||||
pre_colorspace = output_space
|
||||
pre_node = node
|
||||
|
||||
# making sure we are back in script working colorspace
|
||||
if pre_colorspace != root_working_colorspace:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
root_working_colorspace
|
||||
)
|
||||
|
||||
# reusing output node if it exists during update
|
||||
if not output_node:
|
||||
output = nuke.createNode("Output")
|
||||
else:
|
||||
output = output_node
|
||||
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
return group_node
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
project_name = get_current_project_name()
|
||||
version_doc = get_version_by_id(project_name, representation["parent"])
|
||||
|
||||
object_name = container['objectName']
|
||||
|
||||
filepath = get_representation_path(representation)
|
||||
|
||||
json_f = self._load_json_data(filepath)
|
||||
|
||||
group_node = self._create_group_node(
|
||||
object_name,
|
||||
filepath,
|
||||
json_f["data"]
|
||||
)
|
||||
|
||||
self._node_version_color(version_doc, group_node)
|
||||
|
||||
self.log.info("Updated lut setup: `{}`".format(
|
||||
group_node["name"].value()))
|
||||
|
||||
return update_container(
|
||||
group_node, {"representation": str(representation["_id"])})
|
||||
|
||||
def _load_json_data(self, filepath):
|
||||
# getting data from json file with unicode conversion
|
||||
with open(filepath, "r") as _file:
|
||||
json_f = {self._bytify(key): self._bytify(value)
|
||||
for key, value in json.load(_file).items()}
|
||||
|
||||
# check if the version in json_f is the same as plugin version
|
||||
if json_f["version"] != self.schema_version:
|
||||
raise KeyError(
|
||||
"Version of json file is not the same as plugin version")
|
||||
|
||||
return json_f
|
||||
|
||||
def _bytify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes through all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self._bytify(key): self._bytify(value)
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self._bytify(element) for element in input]
|
||||
elif isinstance(input, six.text_type):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
||||
def _node_version_color(self, version, node):
|
||||
""" Coloring a node by correct color by actual version"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
last_version_doc = get_last_version_by_subset_id(
|
||||
project_name, version["parent"], fields=["_id"]
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version["_id"] == last_version_doc["_id"]:
|
||||
color_value = self.current_node_color
|
||||
else:
|
||||
color_value = self.old_node_color
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
|
||||
def _colorspace_name_by_type(colorspace_data):
|
||||
"""
|
||||
Returns colorspace name by type
|
||||
|
||||
Arguments:
|
||||
colorspace_data (dict): colorspace data
|
||||
|
||||
Returns:
|
||||
str: colorspace name
|
||||
"""
|
||||
if colorspace_data["type"] == "colorspaces":
|
||||
return colorspace_data["name"]
|
||||
elif colorspace_data["type"] == "roles":
|
||||
return colorspace_data["colorspace"]
|
||||
else:
|
||||
raise KeyError("Unknown colorspace type: {}".format(
|
||||
colorspace_data["type"]))
|
||||
|
||||
|
||||
def _add_ocio_colorspace_node(pre_node, input_space, output_space):
|
||||
"""
|
||||
Adds OCIOColorSpace node to the node graph
|
||||
|
||||
Arguments:
|
||||
pre_node (nuke.Node): node to connect to
|
||||
input_space (str): input colorspace
|
||||
output_space (str): output colorspace
|
||||
|
||||
Returns:
|
||||
nuke.Node: node with OCIOColorSpace node
|
||||
"""
|
||||
node = nuke.createNode("OCIOColorSpace")
|
||||
node.setInput(0, pre_node)
|
||||
node["in_colorspace"].setValue(input_space)
|
||||
node["out_colorspace"].setValue(output_space)
|
||||
|
||||
pre_node.autoplace()
|
||||
node.setInput(0, pre_node)
|
||||
node.autoplace()
|
||||
|
||||
return node
|
||||
|
|
@ -0,0 +1,173 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator of colorspace look files.
|
||||
|
||||
This creator is used to publish colorspace look files thanks to
|
||||
production type `ociolook`. All files are published as representation.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.lib.attribute_definitions import (
|
||||
FileDef, EnumDef, TextDef, UISeparatorDef
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
CreatedInstance,
|
||||
CreatorError
|
||||
)
|
||||
from openpype.pipeline import colorspace
|
||||
from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator
|
||||
|
||||
|
||||
class CreateColorspaceLook(TrayPublishCreator):
|
||||
"""Creates colorspace look files."""
|
||||
|
||||
identifier = "io.openpype.creators.traypublisher.colorspace_look"
|
||||
label = "Colorspace Look"
|
||||
family = "ociolook"
|
||||
description = "Publishes color space look file."
|
||||
extensions = [".cc", ".cube", ".3dl", ".spi1d", ".spi3d", ".csp", ".lut"]
|
||||
enabled = False
|
||||
|
||||
colorspace_items = [
|
||||
(None, "Not set")
|
||||
]
|
||||
colorspace_attr_show = False
|
||||
config_items = None
|
||||
config_data = None
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Colorspace Look
|
||||
|
||||
This creator publishes color space look file (LUT).
|
||||
"""
|
||||
|
||||
def get_icon(self):
|
||||
return "mdi.format-color-fill"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
repr_file = pre_create_data.get("luts_file")
|
||||
if not repr_file:
|
||||
raise CreatorError("No files specified")
|
||||
|
||||
files = repr_file.get("filenames")
|
||||
if not files:
|
||||
# this should never happen
|
||||
raise CreatorError("Missing files from representation")
|
||||
|
||||
asset_doc = get_asset_by_name(
|
||||
self.project_name, instance_data["asset"])
|
||||
|
||||
subset_name = self.get_subset_name(
|
||||
variant=instance_data["variant"],
|
||||
task_name=instance_data["task"] or "Not set",
|
||||
project_name=self.project_name,
|
||||
asset_doc=asset_doc,
|
||||
)
|
||||
|
||||
instance_data["creator_attributes"] = {
|
||||
"abs_lut_path": (
|
||||
Path(repr_file["directory"]) / files[0]).as_posix()
|
||||
}
|
||||
|
||||
# Create new instance
|
||||
new_instance = CreatedInstance(self.family, subset_name,
|
||||
instance_data, self)
|
||||
new_instance.transient_data["config_items"] = self.config_items
|
||||
new_instance.transient_data["config_data"] = self.config_data
|
||||
|
||||
self._store_new_instance(new_instance)
|
||||
|
||||
def collect_instances(self):
|
||||
super().collect_instances()
|
||||
for instance in self.create_context.instances:
|
||||
if instance.creator_identifier == self.identifier:
|
||||
instance.transient_data["config_items"] = self.config_items
|
||||
instance.transient_data["config_data"] = self.config_data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
EnumDef(
|
||||
"working_colorspace",
|
||||
self.colorspace_items,
|
||||
default="Not set",
|
||||
label="Working Colorspace",
|
||||
),
|
||||
UISeparatorDef(
|
||||
label="Advanced1"
|
||||
),
|
||||
TextDef(
|
||||
"abs_lut_path",
|
||||
label="LUT Path",
|
||||
),
|
||||
EnumDef(
|
||||
"input_colorspace",
|
||||
self.colorspace_items,
|
||||
default="Not set",
|
||||
label="Input Colorspace",
|
||||
),
|
||||
EnumDef(
|
||||
"direction",
|
||||
[
|
||||
(None, "Not set"),
|
||||
("forward", "Forward"),
|
||||
("inverse", "Inverse")
|
||||
],
|
||||
default="Not set",
|
||||
label="Direction"
|
||||
),
|
||||
EnumDef(
|
||||
"interpolation",
|
||||
[
|
||||
(None, "Not set"),
|
||||
("linear", "Linear"),
|
||||
("tetrahedral", "Tetrahedral"),
|
||||
("best", "Best"),
|
||||
("nearest", "Nearest")
|
||||
],
|
||||
default="Not set",
|
||||
label="Interpolation"
|
||||
),
|
||||
EnumDef(
|
||||
"output_colorspace",
|
||||
self.colorspace_items,
|
||||
default="Not set",
|
||||
label="Output Colorspace",
|
||||
),
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
FileDef(
|
||||
"luts_file",
|
||||
folders=False,
|
||||
extensions=self.extensions,
|
||||
allow_sequences=False,
|
||||
single_item=True,
|
||||
label="Look Files",
|
||||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
host = self.create_context.host
|
||||
host_name = host.name
|
||||
project_name = host.get_current_project_name()
|
||||
config_data = colorspace.get_imageio_config(
|
||||
project_name, host_name,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
if not config_data:
|
||||
self.enabled = False
|
||||
return
|
||||
|
||||
filepath = config_data["path"]
|
||||
config_items = colorspace.get_ocio_config_colorspaces(filepath)
|
||||
labeled_colorspaces = colorspace.get_colorspaces_enumerator_items(
|
||||
config_items,
|
||||
include_aliases=True,
|
||||
include_roles=True
|
||||
)
|
||||
self.config_items = config_items
|
||||
self.config_data = config_data
|
||||
self.colorspace_items.extend(labeled_colorspaces)
|
||||
self.enabled = True
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import pyblish.api
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline import colorspace
|
||||
|
||||
|
||||
class CollectColorspaceLook(pyblish.api.InstancePlugin,
|
||||
publish.OpenPypePyblishPluginMixin):
|
||||
"""Collect OCIO colorspace look from LUT file
|
||||
"""
|
||||
|
||||
label = "Collect Colorspace Look"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["traypublisher"]
|
||||
families = ["ociolook"]
|
||||
|
||||
def process(self, instance):
|
||||
creator_attrs = instance.data["creator_attributes"]
|
||||
|
||||
lut_repre_name = "LUTfile"
|
||||
file_url = creator_attrs["abs_lut_path"]
|
||||
file_name = os.path.basename(file_url)
|
||||
base_name, ext = os.path.splitext(file_name)
|
||||
|
||||
# set output name with base_name which was cleared
|
||||
# of all symbols and all parts were capitalized
|
||||
output_name = (base_name.replace("_", " ")
|
||||
.replace(".", " ")
|
||||
.replace("-", " ")
|
||||
.title()
|
||||
.replace(" ", ""))
|
||||
|
||||
# get config items
|
||||
config_items = instance.data["transientData"]["config_items"]
|
||||
config_data = instance.data["transientData"]["config_data"]
|
||||
|
||||
# get colorspace items
|
||||
converted_color_data = {}
|
||||
for colorspace_key in [
|
||||
"working_colorspace",
|
||||
"input_colorspace",
|
||||
"output_colorspace"
|
||||
]:
|
||||
if creator_attrs[colorspace_key]:
|
||||
color_data = colorspace.convert_colorspace_enumerator_item(
|
||||
creator_attrs[colorspace_key], config_items)
|
||||
converted_color_data[colorspace_key] = color_data
|
||||
else:
|
||||
converted_color_data[colorspace_key] = None
|
||||
|
||||
# add colorspace to config data
|
||||
if converted_color_data["working_colorspace"]:
|
||||
config_data["colorspace"] = (
|
||||
converted_color_data["working_colorspace"]["name"]
|
||||
)
|
||||
|
||||
# create lut representation data
|
||||
lut_repre = {
|
||||
"name": lut_repre_name,
|
||||
"output": output_name,
|
||||
"ext": ext.lstrip("."),
|
||||
"files": file_name,
|
||||
"stagingDir": os.path.dirname(file_url),
|
||||
"tags": []
|
||||
}
|
||||
instance.data.update({
|
||||
"representations": [lut_repre],
|
||||
"source": file_url,
|
||||
"ocioLookWorkingSpace": converted_color_data["working_colorspace"],
|
||||
"ocioLookItems": [
|
||||
{
|
||||
"name": lut_repre_name,
|
||||
"ext": ext.lstrip("."),
|
||||
"input_colorspace": converted_color_data[
|
||||
"input_colorspace"],
|
||||
"output_colorspace": converted_color_data[
|
||||
"output_colorspace"],
|
||||
"direction": creator_attrs["direction"],
|
||||
"interpolation": creator_attrs["interpolation"],
|
||||
"config_data": config_data
|
||||
}
|
||||
],
|
||||
})
|
||||
|
||||
self.log.debug(pformat(instance.data))
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
registered_host
|
||||
)
|
||||
from openpype.lib import EnumDef
|
||||
from openpype.pipeline import colorspace
|
||||
|
||||
|
|
@ -13,11 +15,14 @@ class CollectColorspace(pyblish.api.InstancePlugin,
|
|||
label = "Choose representation colorspace"
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
hosts = ["traypublisher"]
|
||||
families = ["render", "plate", "reference", "image", "online"]
|
||||
enabled = False
|
||||
|
||||
colorspace_items = [
|
||||
(None, "Don't override")
|
||||
]
|
||||
colorspace_attr_show = False
|
||||
config_items = None
|
||||
|
||||
def process(self, instance):
|
||||
values = self.get_attr_values_from_data(instance.data)
|
||||
|
|
@ -48,10 +53,14 @@ class CollectColorspace(pyblish.api.InstancePlugin,
|
|||
if config_data:
|
||||
filepath = config_data["path"]
|
||||
config_items = colorspace.get_ocio_config_colorspaces(filepath)
|
||||
cls.colorspace_items.extend((
|
||||
(name, name) for name in config_items.keys()
|
||||
))
|
||||
cls.colorspace_attr_show = True
|
||||
labeled_colorspaces = colorspace.get_colorspaces_enumerator_items(
|
||||
config_items,
|
||||
include_aliases=True,
|
||||
include_roles=True
|
||||
)
|
||||
cls.config_items = config_items
|
||||
cls.colorspace_items.extend(labeled_colorspaces)
|
||||
cls.enabled = True
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
|
|
@ -60,7 +69,6 @@ class CollectColorspace(pyblish.api.InstancePlugin,
|
|||
"colorspace",
|
||||
cls.colorspace_items,
|
||||
default="Don't override",
|
||||
label="Override Colorspace",
|
||||
hidden=not cls.colorspace_attr_show
|
||||
label="Override Colorspace"
|
||||
)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractColorspaceLook(publish.Extractor,
|
||||
publish.OpenPypePyblishPluginMixin):
|
||||
"""Extract OCIO colorspace look from LUT file
|
||||
"""
|
||||
|
||||
label = "Extract Colorspace Look"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
hosts = ["traypublisher"]
|
||||
families = ["ociolook"]
|
||||
|
||||
def process(self, instance):
|
||||
ociolook_items = instance.data["ocioLookItems"]
|
||||
ociolook_working_color = instance.data["ocioLookWorkingSpace"]
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# create ociolook file attributes
|
||||
ociolook_file_name = "ocioLookFile.json"
|
||||
ociolook_file_content = {
|
||||
"version": 1,
|
||||
"data": {
|
||||
"ocioLookItems": ociolook_items,
|
||||
"ocioLookWorkingSpace": ociolook_working_color
|
||||
}
|
||||
}
|
||||
|
||||
# write ociolook content into json file saved in staging dir
|
||||
file_url = os.path.join(staging_dir, ociolook_file_name)
|
||||
with open(file_url, "w") as f_:
|
||||
json.dump(ociolook_file_content, f_, indent=4)
|
||||
|
||||
# create lut representation data
|
||||
ociolook_repre = {
|
||||
"name": "ocioLookFile",
|
||||
"ext": "json",
|
||||
"files": ociolook_file_name,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": []
|
||||
}
|
||||
instance.data["representations"].append(ociolook_repre)
|
||||
|
|
@ -18,6 +18,7 @@ class ValidateColorspace(pyblish.api.InstancePlugin,
|
|||
label = "Validate representation colorspace"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["traypublisher"]
|
||||
families = ["render", "plate", "reference", "image", "online"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,89 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateColorspaceLook(pyblish.api.InstancePlugin,
|
||||
publish.OpenPypePyblishPluginMixin):
|
||||
"""Validate colorspace look attributes"""
|
||||
|
||||
label = "Validate colorspace look attributes"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["traypublisher"]
|
||||
families = ["ociolook"]
|
||||
|
||||
def process(self, instance):
|
||||
create_context = instance.context.data["create_context"]
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance.data["instance_id"])
|
||||
creator_defs = created_instance.creator_attribute_defs
|
||||
|
||||
ociolook_working_color = instance.data.get("ocioLookWorkingSpace")
|
||||
ociolook_items = instance.data.get("ocioLookItems", [])
|
||||
|
||||
creator_defs_by_key = {_def.key: _def.label for _def in creator_defs}
|
||||
|
||||
not_set_keys = {}
|
||||
if not ociolook_working_color:
|
||||
not_set_keys["working_colorspace"] = creator_defs_by_key[
|
||||
"working_colorspace"]
|
||||
|
||||
for ociolook_item in ociolook_items:
|
||||
item_not_set_keys = self.validate_colorspace_set_attrs(
|
||||
ociolook_item, creator_defs_by_key)
|
||||
if item_not_set_keys:
|
||||
not_set_keys[ociolook_item["name"]] = item_not_set_keys
|
||||
|
||||
if not_set_keys:
|
||||
message = (
|
||||
"Colorspace look attributes are not set: \n"
|
||||
)
|
||||
for key, value in not_set_keys.items():
|
||||
if isinstance(value, list):
|
||||
values_string = "\n\t- ".join(value)
|
||||
message += f"\n\t{key}:\n\t- {values_string}"
|
||||
else:
|
||||
message += f"\n\t{value}"
|
||||
|
||||
raise PublishValidationError(
|
||||
title="Colorspace Look attributes",
|
||||
message=message,
|
||||
description=message
|
||||
)
|
||||
|
||||
def validate_colorspace_set_attrs(
|
||||
self,
|
||||
ociolook_item,
|
||||
creator_defs_by_key
|
||||
):
|
||||
"""Validate colorspace look attributes"""
|
||||
|
||||
self.log.debug(f"Validate colorspace look attributes: {ociolook_item}")
|
||||
|
||||
check_keys = [
|
||||
"input_colorspace",
|
||||
"output_colorspace",
|
||||
"direction",
|
||||
"interpolation"
|
||||
]
|
||||
|
||||
not_set_keys = []
|
||||
for key in check_keys:
|
||||
if ociolook_item[key]:
|
||||
# key is set and it is correct
|
||||
continue
|
||||
|
||||
def_label = creator_defs_by_key.get(key)
|
||||
|
||||
if not def_label:
|
||||
# raise since key is not recognized by creator defs
|
||||
raise KeyError(
|
||||
f"Colorspace look attribute '{key}' is not "
|
||||
f"recognized by creator attributes: {creator_defs_by_key}"
|
||||
)
|
||||
not_set_keys.append(def_label)
|
||||
|
||||
return not_set_keys
|
||||
|
|
@ -31,13 +31,13 @@ from openpype.settings.lib import (
|
|||
get_studio_system_settings_overrides,
|
||||
load_json_file
|
||||
)
|
||||
from openpype.settings.ayon_settings import is_dev_mode_enabled
|
||||
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
import_filepath,
|
||||
import_module_from_dirpath,
|
||||
)
|
||||
from openpype.lib.openpype_version import is_staging_enabled
|
||||
|
||||
from .interfaces import (
|
||||
OpenPypeInterface,
|
||||
|
|
@ -317,21 +317,10 @@ def load_modules(force=False):
|
|||
time.sleep(0.1)
|
||||
|
||||
|
||||
def _get_ayon_addons_information():
|
||||
"""Receive information about addons to use from server.
|
||||
|
||||
Todos:
|
||||
Actually ask server for the information.
|
||||
Allow project name as optional argument to be able to query information
|
||||
about used addons for specific project.
|
||||
Returns:
|
||||
List[Dict[str, Any]]: List of addon information to use.
|
||||
"""
|
||||
|
||||
output = []
|
||||
def _get_ayon_bundle_data():
|
||||
bundle_name = os.getenv("AYON_BUNDLE_NAME")
|
||||
bundles = ayon_api.get_bundles()["bundles"]
|
||||
final_bundle = next(
|
||||
return next(
|
||||
(
|
||||
bundle
|
||||
for bundle in bundles
|
||||
|
|
@ -339,10 +328,22 @@ def _get_ayon_addons_information():
|
|||
),
|
||||
None
|
||||
)
|
||||
if final_bundle is None:
|
||||
return output
|
||||
|
||||
bundle_addons = final_bundle["addons"]
|
||||
|
||||
def _get_ayon_addons_information(bundle_info):
|
||||
"""Receive information about addons to use from server.
|
||||
|
||||
Todos:
|
||||
Actually ask server for the information.
|
||||
Allow project name as optional argument to be able to query information
|
||||
about used addons for specific project.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: List of addon information to use.
|
||||
"""
|
||||
|
||||
output = []
|
||||
bundle_addons = bundle_info["addons"]
|
||||
addons = ayon_api.get_addons_info()["addons"]
|
||||
for addon in addons:
|
||||
name = addon["name"]
|
||||
|
|
@ -378,38 +379,73 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
|
||||
v3_addons_to_skip = []
|
||||
|
||||
addons_info = _get_ayon_addons_information()
|
||||
bundle_info = _get_ayon_bundle_data()
|
||||
addons_info = _get_ayon_addons_information(bundle_info)
|
||||
if not addons_info:
|
||||
return v3_addons_to_skip
|
||||
|
||||
addons_dir = os.environ.get("AYON_ADDONS_DIR")
|
||||
if not addons_dir:
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
if not os.path.exists(addons_dir):
|
||||
|
||||
dev_mode_enabled = is_dev_mode_enabled()
|
||||
dev_addons_info = {}
|
||||
if dev_mode_enabled:
|
||||
# Get dev addons info only when dev mode is enabled
|
||||
dev_addons_info = bundle_info.get("addonDevelopment", dev_addons_info)
|
||||
|
||||
addons_dir_exists = os.path.exists(addons_dir)
|
||||
if not addons_dir_exists:
|
||||
log.warning("Addons directory does not exists. Path \"{}\"".format(
|
||||
addons_dir
|
||||
))
|
||||
return v3_addons_to_skip
|
||||
|
||||
for addon_info in addons_info:
|
||||
addon_name = addon_info["name"]
|
||||
addon_version = addon_info["version"]
|
||||
|
||||
folder_name = "{}_{}".format(addon_name, addon_version)
|
||||
addon_dir = os.path.join(addons_dir, folder_name)
|
||||
if not os.path.exists(addon_dir):
|
||||
log.debug((
|
||||
"No localized client code found for addon {} {}."
|
||||
).format(addon_name, addon_version))
|
||||
dev_addon_info = dev_addons_info.get(addon_name, {})
|
||||
use_dev_path = dev_addon_info.get("enabled", False)
|
||||
|
||||
addon_dir = None
|
||||
if use_dev_path:
|
||||
addon_dir = dev_addon_info["path"]
|
||||
if not addon_dir or not os.path.exists(addon_dir):
|
||||
log.warning((
|
||||
"Dev addon {} {} path does not exists. Path \"{}\""
|
||||
).format(addon_name, addon_version, addon_dir))
|
||||
continue
|
||||
|
||||
elif addons_dir_exists:
|
||||
folder_name = "{}_{}".format(addon_name, addon_version)
|
||||
addon_dir = os.path.join(addons_dir, folder_name)
|
||||
if not os.path.exists(addon_dir):
|
||||
log.debug((
|
||||
"No localized client code found for addon {} {}."
|
||||
).format(addon_name, addon_version))
|
||||
continue
|
||||
|
||||
if not addon_dir:
|
||||
continue
|
||||
|
||||
sys.path.insert(0, addon_dir)
|
||||
imported_modules = []
|
||||
for name in os.listdir(addon_dir):
|
||||
# Ignore of files is implemented to be able to run code from code
|
||||
# where usually is more files than just the addon
|
||||
# Ignore start and setup scripts
|
||||
if name in ("setup.py", "start.py"):
|
||||
continue
|
||||
|
||||
path = os.path.join(addon_dir, name)
|
||||
basename, ext = os.path.splitext(name)
|
||||
# Ignore folders/files with dot in name
|
||||
# - dot names cannot be imported in Python
|
||||
if "." in basename:
|
||||
continue
|
||||
is_dir = os.path.isdir(path)
|
||||
is_py_file = ext.lower() == ".py"
|
||||
if not is_py_file and not is_dir:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from copy import deepcopy
|
||||
import re
|
||||
import os
|
||||
import json
|
||||
|
|
@ -7,6 +6,7 @@ import functools
|
|||
import platform
|
||||
import tempfile
|
||||
import warnings
|
||||
from copy import deepcopy
|
||||
|
||||
from openpype import PACKAGE_DIR
|
||||
from openpype.settings import get_project_settings
|
||||
|
|
@ -356,7 +356,10 @@ def parse_colorspace_from_filepath(
|
|||
"Must provide `config_path` if `colorspaces` is not provided."
|
||||
)
|
||||
|
||||
colorspaces = colorspaces or get_ocio_config_colorspaces(config_path)
|
||||
colorspaces = (
|
||||
colorspaces
|
||||
or get_ocio_config_colorspaces(config_path)["colorspaces"]
|
||||
)
|
||||
underscored_colorspaces = {
|
||||
key.replace(" ", "_"): key for key in colorspaces
|
||||
if " " in key
|
||||
|
|
@ -393,7 +396,7 @@ def validate_imageio_colorspace_in_config(config_path, colorspace_name):
|
|||
Returns:
|
||||
bool: True if exists
|
||||
"""
|
||||
colorspaces = get_ocio_config_colorspaces(config_path)
|
||||
colorspaces = get_ocio_config_colorspaces(config_path)["colorspaces"]
|
||||
if colorspace_name not in colorspaces:
|
||||
raise KeyError(
|
||||
"Missing colorspace '{}' in config file '{}'".format(
|
||||
|
|
@ -530,6 +533,157 @@ def get_ocio_config_colorspaces(config_path):
|
|||
return CachedData.ocio_config_colorspaces[config_path]
|
||||
|
||||
|
||||
def convert_colorspace_enumerator_item(
|
||||
colorspace_enum_item,
|
||||
config_items
|
||||
):
|
||||
"""Convert colorspace enumerator item to dictionary
|
||||
|
||||
Args:
|
||||
colorspace_item (str): colorspace and family in couple
|
||||
config_items (dict[str,dict]): colorspace data
|
||||
|
||||
Returns:
|
||||
dict: colorspace data
|
||||
"""
|
||||
if "::" not in colorspace_enum_item:
|
||||
return None
|
||||
|
||||
# split string with `::` separator and set first as key and second as value
|
||||
item_type, item_name = colorspace_enum_item.split("::")
|
||||
|
||||
item_data = None
|
||||
if item_type == "aliases":
|
||||
# loop through all colorspaces and find matching alias
|
||||
for name, _data in config_items.get("colorspaces", {}).items():
|
||||
if item_name in _data.get("aliases", []):
|
||||
item_data = deepcopy(_data)
|
||||
item_data.update({
|
||||
"name": name,
|
||||
"type": "colorspace"
|
||||
})
|
||||
break
|
||||
else:
|
||||
# find matching colorspace item found in labeled_colorspaces
|
||||
item_data = config_items.get(item_type, {}).get(item_name)
|
||||
if item_data:
|
||||
item_data = deepcopy(item_data)
|
||||
item_data.update({
|
||||
"name": item_name,
|
||||
"type": item_type
|
||||
})
|
||||
|
||||
# raise exception if item is not found
|
||||
if not item_data:
|
||||
message_config_keys = ", ".join(
|
||||
"'{}':{}".format(
|
||||
key,
|
||||
set(config_items.get(key, {}).keys())
|
||||
) for key in config_items.keys()
|
||||
)
|
||||
raise KeyError(
|
||||
"Missing colorspace item '{}' in config data: [{}]".format(
|
||||
colorspace_enum_item, message_config_keys
|
||||
)
|
||||
)
|
||||
|
||||
return item_data
|
||||
|
||||
|
||||
def get_colorspaces_enumerator_items(
|
||||
config_items,
|
||||
include_aliases=False,
|
||||
include_looks=False,
|
||||
include_roles=False,
|
||||
include_display_views=False
|
||||
):
|
||||
"""Get all colorspace data with labels
|
||||
|
||||
Wrapper function for aggregating all names and its families.
|
||||
Families can be used for building menu and submenus in gui.
|
||||
|
||||
Args:
|
||||
config_items (dict[str,dict]): colorspace data coming from
|
||||
`get_ocio_config_colorspaces` function
|
||||
include_aliases (bool): include aliases in result
|
||||
include_looks (bool): include looks in result
|
||||
include_roles (bool): include roles in result
|
||||
|
||||
Returns:
|
||||
list[tuple[str,str]]: colorspace and family in couple
|
||||
"""
|
||||
labeled_colorspaces = []
|
||||
aliases = set()
|
||||
colorspaces = set()
|
||||
looks = set()
|
||||
roles = set()
|
||||
display_views = set()
|
||||
for items_type, colorspace_items in config_items.items():
|
||||
if items_type == "colorspaces":
|
||||
for color_name, color_data in colorspace_items.items():
|
||||
if color_data.get("aliases"):
|
||||
aliases.update([
|
||||
(
|
||||
"aliases::{}".format(alias_name),
|
||||
"[alias] {} ({})".format(alias_name, color_name)
|
||||
)
|
||||
for alias_name in color_data["aliases"]
|
||||
])
|
||||
colorspaces.add((
|
||||
"{}::{}".format(items_type, color_name),
|
||||
"[colorspace] {}".format(color_name)
|
||||
))
|
||||
|
||||
elif items_type == "looks":
|
||||
looks.update([
|
||||
(
|
||||
"{}::{}".format(items_type, name),
|
||||
"[look] {} ({})".format(name, role_data["process_space"])
|
||||
)
|
||||
for name, role_data in colorspace_items.items()
|
||||
])
|
||||
|
||||
elif items_type == "displays_views":
|
||||
display_views.update([
|
||||
(
|
||||
"{}::{}".format(items_type, name),
|
||||
"[view (display)] {}".format(name)
|
||||
)
|
||||
for name, _ in colorspace_items.items()
|
||||
])
|
||||
|
||||
elif items_type == "roles":
|
||||
roles.update([
|
||||
(
|
||||
"{}::{}".format(items_type, name),
|
||||
"[role] {} ({})".format(name, role_data["colorspace"])
|
||||
)
|
||||
for name, role_data in colorspace_items.items()
|
||||
])
|
||||
|
||||
if roles and include_roles:
|
||||
roles = sorted(roles, key=lambda x: x[0])
|
||||
labeled_colorspaces.extend(roles)
|
||||
|
||||
# add colorspaces as second so it is not first in menu
|
||||
colorspaces = sorted(colorspaces, key=lambda x: x[0])
|
||||
labeled_colorspaces.extend(colorspaces)
|
||||
|
||||
if aliases and include_aliases:
|
||||
aliases = sorted(aliases, key=lambda x: x[0])
|
||||
labeled_colorspaces.extend(aliases)
|
||||
|
||||
if looks and include_looks:
|
||||
looks = sorted(looks, key=lambda x: x[0])
|
||||
labeled_colorspaces.extend(looks)
|
||||
|
||||
if display_views and include_display_views:
|
||||
display_views = sorted(display_views, key=lambda x: x[0])
|
||||
labeled_colorspaces.extend(display_views)
|
||||
|
||||
return labeled_colorspaces
|
||||
|
||||
|
||||
# TODO: remove this in future - backward compatibility
|
||||
@deprecated("_get_wrapped_with_subprocess")
|
||||
def get_colorspace_data_subprocess(config_path):
|
||||
|
|
|
|||
|
|
@ -107,6 +107,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"ociolook",
|
||||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
|
|
|
|||
|
|
@ -55,6 +55,9 @@ def get_openpype_staging_icon_filepath():
|
|||
|
||||
|
||||
def get_openpype_icon_filepath(staging=None):
|
||||
if AYON_SERVER_ENABLED and os.getenv("AYON_USE_DEV") == "1":
|
||||
return get_resource("icons", "AYON_icon_dev.png")
|
||||
|
||||
if staging is None:
|
||||
staging = is_running_staging()
|
||||
|
||||
|
|
@ -68,7 +71,9 @@ def get_openpype_splash_filepath(staging=None):
|
|||
staging = is_running_staging()
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
if staging:
|
||||
if os.getenv("AYON_USE_DEV") == "1":
|
||||
splash_file_name = "AYON_splash_dev.png"
|
||||
elif staging:
|
||||
splash_file_name = "AYON_splash_staging.png"
|
||||
else:
|
||||
splash_file_name = "AYON_splash.png"
|
||||
|
|
|
|||
BIN
openpype/resources/icons/AYON_icon_dev.png
Normal file
BIN
openpype/resources/icons/AYON_icon_dev.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
BIN
openpype/resources/icons/AYON_splash_dev.png
Normal file
BIN
openpype/resources/icons/AYON_splash_dev.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
|
|
@ -106,11 +106,47 @@ def _get_colorspace_data(config_path):
|
|||
|
||||
config = ocio.Config().CreateFromFile(str(config_path))
|
||||
|
||||
return {
|
||||
c_.getName(): c_.getFamily()
|
||||
for c_ in config.getColorSpaces()
|
||||
colorspace_data = {
|
||||
"roles": {},
|
||||
"colorspaces": {
|
||||
color.getName(): {
|
||||
"family": color.getFamily(),
|
||||
"categories": list(color.getCategories()),
|
||||
"aliases": list(color.getAliases()),
|
||||
"equalitygroup": color.getEqualityGroup(),
|
||||
}
|
||||
for color in config.getColorSpaces()
|
||||
},
|
||||
"displays_views": {
|
||||
f"{view} ({display})": {
|
||||
"display": display,
|
||||
"view": view
|
||||
|
||||
}
|
||||
for display in config.getDisplays()
|
||||
for view in config.getViews(display)
|
||||
},
|
||||
"looks": {}
|
||||
}
|
||||
|
||||
# add looks
|
||||
looks = config.getLooks()
|
||||
if looks:
|
||||
colorspace_data["looks"] = {
|
||||
look.getName(): {"process_space": look.getProcessSpace()}
|
||||
for look in looks
|
||||
}
|
||||
|
||||
# add roles
|
||||
roles = config.getRoles()
|
||||
if roles:
|
||||
colorspace_data["roles"] = {
|
||||
role: {"colorspace": colorspace}
|
||||
for (role, colorspace) in roles
|
||||
}
|
||||
|
||||
return colorspace_data
|
||||
|
||||
|
||||
@config.command(
|
||||
name="get_views",
|
||||
|
|
|
|||
|
|
@ -290,6 +290,16 @@ def _convert_modules_system(
|
|||
modules_settings[key] = value
|
||||
|
||||
|
||||
def is_dev_mode_enabled():
|
||||
"""Dev mode is enabled in AYON.
|
||||
|
||||
Returns:
|
||||
bool: True if dev mode is enabled.
|
||||
"""
|
||||
|
||||
return os.getenv("AYON_USE_DEV") == "1"
|
||||
|
||||
|
||||
def convert_system_settings(ayon_settings, default_settings, addon_versions):
|
||||
default_settings = copy.deepcopy(default_settings)
|
||||
output = {
|
||||
|
|
@ -1400,15 +1410,39 @@ class _AyonSettingsCache:
|
|||
if _AyonSettingsCache.variant is None:
|
||||
from openpype.lib.openpype_version import is_staging_enabled
|
||||
|
||||
_AyonSettingsCache.variant = (
|
||||
"staging" if is_staging_enabled() else "production"
|
||||
)
|
||||
variant = "production"
|
||||
if is_dev_mode_enabled():
|
||||
variant = cls._get_dev_mode_settings_variant()
|
||||
elif is_staging_enabled():
|
||||
variant = "staging"
|
||||
_AyonSettingsCache.variant = variant
|
||||
return _AyonSettingsCache.variant
|
||||
|
||||
@classmethod
|
||||
def _get_bundle_name(cls):
|
||||
return os.environ["AYON_BUNDLE_NAME"]
|
||||
|
||||
@classmethod
|
||||
def _get_dev_mode_settings_variant(cls):
|
||||
"""Develop mode settings variant.
|
||||
|
||||
Returns:
|
||||
str: Name of settings variant.
|
||||
"""
|
||||
|
||||
bundles = ayon_api.get_bundles()
|
||||
user = ayon_api.get_user()
|
||||
username = user["name"]
|
||||
for bundle in bundles:
|
||||
if (
|
||||
bundle.get("isDev")
|
||||
and bundle.get("activeUser") == username
|
||||
):
|
||||
return bundle["name"]
|
||||
# Return fake variant - distribution logic will tell user that he
|
||||
# does not have set any dev bundle
|
||||
return "dev"
|
||||
|
||||
@classmethod
|
||||
def get_value_by_project(cls, project_name):
|
||||
cache_item = _AyonSettingsCache.cache_by_project_name[project_name]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"general": {
|
||||
"add_self_publish_button": false,
|
||||
"update_houdini_var_context": {
|
||||
"enabled": true,
|
||||
"houdini_vars":[
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@
|
|||
"collapsible": true,
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "add_self_publish_button",
|
||||
"label": "Add Self Publish Button"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -251,6 +251,30 @@ class LabelAttrWidget(_BaseAttrDefWidget):
|
|||
self.main_layout.addWidget(input_widget, 0)
|
||||
|
||||
|
||||
class ClickableLineEdit(QtWidgets.QLineEdit):
|
||||
clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, text, parent):
|
||||
super(ClickableLineEdit, self).__init__(parent)
|
||||
self.setText(text)
|
||||
self.setReadOnly(True)
|
||||
|
||||
self._mouse_pressed = False
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
if event.button() == QtCore.Qt.LeftButton:
|
||||
self._mouse_pressed = True
|
||||
super(ClickableLineEdit, self).mousePressEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
if self._mouse_pressed:
|
||||
self._mouse_pressed = False
|
||||
if self.rect().contains(event.pos()):
|
||||
self.clicked.emit()
|
||||
|
||||
super(ClickableLineEdit, self).mouseReleaseEvent(event)
|
||||
|
||||
|
||||
class NumberAttrWidget(_BaseAttrDefWidget):
|
||||
def _ui_init(self):
|
||||
decimals = self.attr_def.decimals
|
||||
|
|
@ -270,20 +294,37 @@ class NumberAttrWidget(_BaseAttrDefWidget):
|
|||
input_widget.setButtonSymbols(
|
||||
QtWidgets.QAbstractSpinBox.ButtonSymbols.NoButtons
|
||||
)
|
||||
input_line_edit = input_widget.lineEdit()
|
||||
input_widget.installEventFilter(self)
|
||||
|
||||
multisel_widget = ClickableLineEdit("< Multiselection >", self)
|
||||
|
||||
input_widget.valueChanged.connect(self._on_value_change)
|
||||
multisel_widget.clicked.connect(self._on_multi_click)
|
||||
|
||||
self._input_widget = input_widget
|
||||
self._input_line_edit = input_line_edit
|
||||
self._multisel_widget = multisel_widget
|
||||
self._last_multivalue = None
|
||||
self._multivalue = False
|
||||
|
||||
self.main_layout.addWidget(input_widget, 0)
|
||||
self.main_layout.addWidget(multisel_widget, 0)
|
||||
|
||||
def _on_value_change(self, new_value):
|
||||
self.value_changed.emit(new_value, self.attr_def.id)
|
||||
def eventFilter(self, obj, event):
|
||||
if (
|
||||
self._multivalue
|
||||
and obj is self._input_widget
|
||||
and event.type() == QtCore.QEvent.FocusOut
|
||||
):
|
||||
self._set_multiselection_visible(True)
|
||||
return False
|
||||
|
||||
def current_value(self):
|
||||
return self._input_widget.value()
|
||||
|
||||
def set_value(self, value, multivalue=False):
|
||||
self._last_multivalue = None
|
||||
if multivalue:
|
||||
set_value = set(value)
|
||||
if None in set_value:
|
||||
|
|
@ -291,13 +332,47 @@ class NumberAttrWidget(_BaseAttrDefWidget):
|
|||
set_value.add(self.attr_def.default)
|
||||
|
||||
if len(set_value) > 1:
|
||||
self._input_widget.setSpecialValueText("Multiselection")
|
||||
self._last_multivalue = next(iter(set_value), None)
|
||||
self._set_multiselection_visible(True)
|
||||
self._multivalue = True
|
||||
return
|
||||
value = tuple(set_value)[0]
|
||||
|
||||
self._multivalue = False
|
||||
self._set_multiselection_visible(False)
|
||||
|
||||
if self.current_value != value:
|
||||
self._input_widget.setValue(value)
|
||||
|
||||
def _on_value_change(self, new_value):
|
||||
self._multivalue = False
|
||||
self.value_changed.emit(new_value, self.attr_def.id)
|
||||
|
||||
def _on_multi_click(self):
|
||||
self._set_multiselection_visible(False, True)
|
||||
|
||||
def _set_multiselection_visible(self, visible, change_focus=False):
|
||||
self._input_widget.setVisible(not visible)
|
||||
self._multisel_widget.setVisible(visible)
|
||||
if visible:
|
||||
return
|
||||
|
||||
# Change value once user clicked on the input field
|
||||
if self._last_multivalue is None:
|
||||
value = self.attr_def.default
|
||||
else:
|
||||
value = self._last_multivalue
|
||||
self._input_widget.blockSignals(True)
|
||||
self._input_widget.setValue(value)
|
||||
self._input_widget.blockSignals(False)
|
||||
if not change_focus:
|
||||
return
|
||||
# Change focus to input field and move cursor to the end
|
||||
self._input_widget.setFocus(QtCore.Qt.MouseFocusReason)
|
||||
self._input_line_edit.setCursorPosition(
|
||||
len(self._input_line_edit.text())
|
||||
)
|
||||
|
||||
|
||||
class TextAttrWidget(_BaseAttrDefWidget):
|
||||
def _ui_init(self):
|
||||
|
|
|
|||
|
|
@ -447,11 +447,12 @@ class LoaderActionsModel:
|
|||
project_doc["code"] = project_doc["data"]["code"]
|
||||
|
||||
for version_doc in version_docs:
|
||||
version_id = version_doc["_id"]
|
||||
product_id = version_doc["parent"]
|
||||
product_doc = product_docs_by_id[product_id]
|
||||
folder_id = product_doc["parent"]
|
||||
folder_doc = folder_docs_by_id[folder_id]
|
||||
version_context_by_id[product_id] = {
|
||||
version_context_by_id[version_id] = {
|
||||
"project": project_doc,
|
||||
"asset": folder_doc,
|
||||
"subset": product_doc,
|
||||
|
|
|
|||
|
|
@ -797,6 +797,7 @@ class InstanceCardView(AbstractInstanceView):
|
|||
widget.set_active(value)
|
||||
else:
|
||||
self._select_item_clear(instance_id, group_name, instance_widget)
|
||||
self.selection_changed.emit()
|
||||
self.active_changed.emit()
|
||||
|
||||
def _on_widget_selection(self, instance_id, group_name, selection_type):
|
||||
|
|
|
|||
|
|
@ -388,6 +388,45 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
def controller(self):
|
||||
return self._controller
|
||||
|
||||
def show_and_publish(self, comment=None):
|
||||
"""Show the window and start publishing.
|
||||
|
||||
The method will reset controller and start the publishing afterwards.
|
||||
|
||||
Todos:
|
||||
Move validations from '_on_publish_clicked' and change of
|
||||
'comment' value in controller to controller so it can be
|
||||
simplified.
|
||||
|
||||
Args:
|
||||
comment (Optional[str]): Comment to be set to publish.
|
||||
If is set to 'None' a comment is not changed at all.
|
||||
"""
|
||||
|
||||
self._reset_on_show = False
|
||||
self._reset_on_first_show = False
|
||||
|
||||
if comment is not None:
|
||||
self.set_comment(comment)
|
||||
self.make_sure_is_visible()
|
||||
# Reset controller
|
||||
self._controller.reset()
|
||||
# Fake publish click to trigger save validation and propagate
|
||||
# comment to controller
|
||||
self._on_publish_clicked()
|
||||
|
||||
def set_comment(self, comment):
|
||||
"""Change comment text.
|
||||
|
||||
Todos:
|
||||
Be able to set the comment via controller.
|
||||
|
||||
Args:
|
||||
comment (str): Comment text.
|
||||
"""
|
||||
|
||||
self._comment_input.setText(comment)
|
||||
|
||||
def make_sure_is_visible(self):
|
||||
if self._window_is_visible:
|
||||
self.setWindowState(QtCore.Qt.WindowActive)
|
||||
|
|
|
|||
|
|
@ -296,7 +296,8 @@ class HostToolsHelper:
|
|||
ILoadHost.validate_load_methods(host)
|
||||
|
||||
publisher_window = PublisherWindow(
|
||||
controller=controller, parent=parent or self._parent
|
||||
controller=controller,
|
||||
parent=parent or self._parent
|
||||
)
|
||||
self._publisher_tool = publisher_window
|
||||
|
||||
|
|
|
|||
14
openpype/vendor/python/common/ayon_api/_api.py
vendored
14
openpype/vendor/python/common/ayon_api/_api.py
vendored
|
|
@ -602,12 +602,12 @@ def delete_installer(*args, **kwargs):
|
|||
|
||||
def download_installer(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
con.download_installer(*args, **kwargs)
|
||||
return con.download_installer(*args, **kwargs)
|
||||
|
||||
|
||||
def upload_installer(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
con.upload_installer(*args, **kwargs)
|
||||
return con.upload_installer(*args, **kwargs)
|
||||
|
||||
|
||||
# Dependency packages
|
||||
|
|
@ -753,12 +753,12 @@ def get_secrets(*args, **kwargs):
|
|||
|
||||
def get_secret(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.delete_secret(*args, **kwargs)
|
||||
return con.get_secret(*args, **kwargs)
|
||||
|
||||
|
||||
def save_secret(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.delete_secret(*args, **kwargs)
|
||||
return con.save_secret(*args, **kwargs)
|
||||
|
||||
|
||||
def delete_secret(*args, **kwargs):
|
||||
|
|
@ -978,12 +978,14 @@ def delete_project(project_name):
|
|||
|
||||
def get_thumbnail_by_id(project_name, thumbnail_id):
|
||||
con = get_server_api_connection()
|
||||
con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
return con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
|
||||
|
||||
def get_thumbnail(project_name, entity_type, entity_id, thumbnail_id=None):
|
||||
con = get_server_api_connection()
|
||||
con.get_thumbnail(project_name, entity_type, entity_id, thumbnail_id)
|
||||
return con.get_thumbnail(
|
||||
project_name, entity_type, entity_id, thumbnail_id
|
||||
)
|
||||
|
||||
|
||||
def get_folder_thumbnail(project_name, folder_id, thumbnail_id=None):
|
||||
|
|
|
|||
|
|
@ -144,6 +144,7 @@ def product_types_query(fields):
|
|||
query_queue.append((k, v, field))
|
||||
return query
|
||||
|
||||
|
||||
def project_product_types_query(fields):
|
||||
query = GraphQlQuery("ProjectProductTypes")
|
||||
project_query = query.add_field("project")
|
||||
|
|
@ -175,6 +176,8 @@ def folders_graphql_query(fields):
|
|||
parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]")
|
||||
folder_paths_var = query.add_variable("folderPaths", "[String!]")
|
||||
folder_names_var = query.add_variable("folderNames", "[String!]")
|
||||
folder_types_var = query.add_variable("folderTypes", "[String!]")
|
||||
statuses_var = query.add_variable("folderStatuses", "[String!]")
|
||||
has_products_var = query.add_variable("folderHasProducts", "Boolean!")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
|
|
@ -185,6 +188,8 @@ def folders_graphql_query(fields):
|
|||
folders_field.set_filter("parentIds", parent_folder_ids_var)
|
||||
folders_field.set_filter("names", folder_names_var)
|
||||
folders_field.set_filter("paths", folder_paths_var)
|
||||
folders_field.set_filter("folderTypes", folder_types_var)
|
||||
folders_field.set_filter("statuses", statuses_var)
|
||||
folders_field.set_filter("hasProducts", has_products_var)
|
||||
|
||||
nested_fields = fields_to_dict(fields)
|
||||
|
|
|
|||
177
openpype/vendor/python/common/ayon_api/server_api.py
vendored
177
openpype/vendor/python/common/ayon_api/server_api.py
vendored
|
|
@ -75,6 +75,7 @@ from .utils import (
|
|||
TransferProgress,
|
||||
create_dependency_package_basename,
|
||||
ThumbnailContent,
|
||||
get_default_timeout,
|
||||
)
|
||||
|
||||
PatternType = type(re.compile(""))
|
||||
|
|
@ -351,7 +352,6 @@ class ServerAPI(object):
|
|||
timeout (Optional[float]): Timeout for requests.
|
||||
max_retries (Optional[int]): Number of retries for requests.
|
||||
"""
|
||||
_default_timeout = 10.0
|
||||
_default_max_retries = 3
|
||||
|
||||
def __init__(
|
||||
|
|
@ -500,20 +500,13 @@ class ServerAPI(object):
|
|||
def get_default_timeout(cls):
|
||||
"""Default value for requests timeout.
|
||||
|
||||
First looks for environment variable SERVER_TIMEOUT_ENV_KEY which
|
||||
can affect timeout value. If not available then use class
|
||||
attribute '_default_timeout'.
|
||||
Utils function 'get_default_timeout' is used by default.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
try:
|
||||
return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return cls._default_timeout
|
||||
return get_default_timeout()
|
||||
|
||||
@classmethod
|
||||
def get_default_max_retries(cls):
|
||||
|
|
@ -662,13 +655,10 @@ class ServerAPI(object):
|
|||
as default variant.
|
||||
|
||||
Args:
|
||||
variant (Literal['production', 'staging']): Settings variant name.
|
||||
variant (str): Settings variant name. It is possible to use
|
||||
'production', 'staging' or name of dev bundle.
|
||||
"""
|
||||
|
||||
if variant not in ("production", "staging"):
|
||||
raise ValueError((
|
||||
"Invalid variant name {}. Expected 'production' or 'staging'"
|
||||
).format(variant))
|
||||
self._default_settings_variant = variant
|
||||
|
||||
default_settings_variant = property(
|
||||
|
|
@ -938,8 +928,8 @@ class ServerAPI(object):
|
|||
int(re_match.group("major")),
|
||||
int(re_match.group("minor")),
|
||||
int(re_match.group("patch")),
|
||||
re_match.group("prerelease"),
|
||||
re_match.group("buildmetadata")
|
||||
re_match.group("prerelease") or "",
|
||||
re_match.group("buildmetadata") or "",
|
||||
)
|
||||
return self._server_version_tuple
|
||||
|
||||
|
|
@ -1140,31 +1130,41 @@ class ServerAPI(object):
|
|||
|
||||
response = None
|
||||
new_response = None
|
||||
for _ in range(max_retries):
|
||||
for retry_idx in reversed(range(max_retries)):
|
||||
try:
|
||||
response = function(url, **kwargs)
|
||||
break
|
||||
|
||||
except ConnectionRefusedError:
|
||||
if retry_idx == 0:
|
||||
self.log.warning(
|
||||
"Connection error happened.", exc_info=True
|
||||
)
|
||||
|
||||
# Server may be restarting
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection refused"}
|
||||
)
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
# Connection timed out
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Connection timed out."}
|
||||
)
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
# Other connection error (ssl, etc) - does not make sense to
|
||||
# try call server again
|
||||
# Log warning only on last attempt
|
||||
if retry_idx == 0:
|
||||
self.log.warning(
|
||||
"Connection error happened.", exc_info=True
|
||||
)
|
||||
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection error"}
|
||||
)
|
||||
break
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
|
|
@ -1349,7 +1349,9 @@ class ServerAPI(object):
|
|||
status=None,
|
||||
description=None,
|
||||
summary=None,
|
||||
payload=None
|
||||
payload=None,
|
||||
progress=None,
|
||||
retries=None
|
||||
):
|
||||
kwargs = {
|
||||
key: value
|
||||
|
|
@ -1360,9 +1362,27 @@ class ServerAPI(object):
|
|||
("description", description),
|
||||
("summary", summary),
|
||||
("payload", payload),
|
||||
("progress", progress),
|
||||
("retries", retries),
|
||||
)
|
||||
if value is not None
|
||||
}
|
||||
# 'progress' and 'retries' are available since 0.5.x server version
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if (major, minor) < (0, 5):
|
||||
args = []
|
||||
if progress is not None:
|
||||
args.append("progress")
|
||||
if retries is not None:
|
||||
args.append("retries")
|
||||
fields = ", ".join("'{}'".format(f) for f in args)
|
||||
ending = "s" if len(args) > 1 else ""
|
||||
raise ValueError((
|
||||
"Your server version '{}' does not support update"
|
||||
" of {} field{} on event. The fields are supported since"
|
||||
" server version '0.5'."
|
||||
).format(self.get_server_version(), fields, ending))
|
||||
|
||||
response = self.patch(
|
||||
"events/{}".format(event_id),
|
||||
**kwargs
|
||||
|
|
@ -1434,6 +1454,7 @@ class ServerAPI(object):
|
|||
description=None,
|
||||
sequential=None,
|
||||
events_filter=None,
|
||||
max_retries=None,
|
||||
):
|
||||
"""Enroll job based on events.
|
||||
|
||||
|
|
@ -1475,8 +1496,12 @@ class ServerAPI(object):
|
|||
in target event.
|
||||
sequential (Optional[bool]): The source topic must be processed
|
||||
in sequence.
|
||||
events_filter (Optional[ayon_server.sqlfilter.Filter]): A dict-like
|
||||
with conditions to filter the source event.
|
||||
events_filter (Optional[dict[str, Any]]): Filtering conditions
|
||||
to filter the source event. For more technical specifications
|
||||
look to server backed 'ayon_server.sqlfilter.Filter'.
|
||||
TODO: Add example of filters.
|
||||
max_retries (Optional[int]): How many times can be event retried.
|
||||
Default value is based on server (3 at the time of this PR).
|
||||
|
||||
Returns:
|
||||
Union[None, dict[str, Any]]: None if there is no event matching
|
||||
|
|
@ -1487,6 +1512,7 @@ class ServerAPI(object):
|
|||
"sourceTopic": source_topic,
|
||||
"targetTopic": target_topic,
|
||||
"sender": sender,
|
||||
"maxRetries": max_retries,
|
||||
}
|
||||
if sequential is not None:
|
||||
kwargs["sequential"] = sequential
|
||||
|
|
@ -2236,6 +2262,34 @@ class ServerAPI(object):
|
|||
response.raise_for_status("Failed to create/update dependency")
|
||||
return response.data
|
||||
|
||||
def _get_dependency_package_route(
|
||||
self, filename=None, platform_name=None
|
||||
):
|
||||
major, minor, patch, _, _ = self.server_version_tuple
|
||||
if (major, minor, patch) <= (0, 2, 0):
|
||||
# Backwards compatibility for AYON server 0.2.0 and lower
|
||||
self.log.warning((
|
||||
"Using deprecated dependency package route."
|
||||
" Please update your AYON server to version 0.2.1 or higher."
|
||||
" Backwards compatibility for this route will be removed"
|
||||
" in future releases of ayon-python-api."
|
||||
))
|
||||
if platform_name is None:
|
||||
platform_name = platform.system().lower()
|
||||
base = "dependencies"
|
||||
if not filename:
|
||||
return base
|
||||
return "{}/{}/{}".format(base, filename, platform_name)
|
||||
|
||||
if (major, minor) <= (0, 3):
|
||||
endpoint = "desktop/dependency_packages"
|
||||
else:
|
||||
endpoint = "desktop/dependencyPackages"
|
||||
|
||||
if filename:
|
||||
return "{}/{}".format(endpoint, filename)
|
||||
return endpoint
|
||||
|
||||
def get_dependency_packages(self):
|
||||
"""Information about dependency packages on server.
|
||||
|
||||
|
|
@ -2263,33 +2317,11 @@ class ServerAPI(object):
|
|||
server.
|
||||
"""
|
||||
|
||||
endpoint = "desktop/dependencyPackages"
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if major == 0 and minor <= 3:
|
||||
endpoint = "desktop/dependency_packages"
|
||||
|
||||
endpoint = self._get_dependency_package_route()
|
||||
result = self.get(endpoint)
|
||||
result.raise_for_status()
|
||||
return result.data
|
||||
|
||||
def _get_dependency_package_route(
|
||||
self, filename=None, platform_name=None
|
||||
):
|
||||
major, minor, patch, _, _ = self.server_version_tuple
|
||||
if major == 0 and (minor > 2 or (minor == 2 and patch >= 1)):
|
||||
base = "desktop/dependency_packages"
|
||||
if not filename:
|
||||
return base
|
||||
return "{}/{}".format(base, filename)
|
||||
|
||||
# Backwards compatibility for AYON server 0.2.0 and lower
|
||||
if platform_name is None:
|
||||
platform_name = platform.system().lower()
|
||||
base = "dependencies"
|
||||
if not filename:
|
||||
return base
|
||||
return "{}/{}/{}".format(base, filename, platform_name)
|
||||
|
||||
def create_dependency_package(
|
||||
self,
|
||||
filename,
|
||||
|
|
@ -3515,7 +3547,9 @@ class ServerAPI(object):
|
|||
folder_ids=None,
|
||||
folder_paths=None,
|
||||
folder_names=None,
|
||||
folder_types=None,
|
||||
parent_ids=None,
|
||||
statuses=None,
|
||||
active=True,
|
||||
fields=None,
|
||||
own_attributes=False
|
||||
|
|
@ -3536,8 +3570,12 @@ class ServerAPI(object):
|
|||
for filtering.
|
||||
folder_names (Optional[Iterable[str]]): Folder names used
|
||||
for filtering.
|
||||
folder_types (Optional[Iterable[str]]): Folder types used
|
||||
for filtering.
|
||||
parent_ids (Optional[Iterable[str]]): Ids of folder parents.
|
||||
Use 'None' if folder is direct child of project.
|
||||
statuses (Optional[Iterable[str]]): Folder statuses used
|
||||
for filtering.
|
||||
active (Optional[bool]): Filter active/inactive folders.
|
||||
Both are returned if is set to None.
|
||||
fields (Optional[Iterable[str]]): Fields to be queried for
|
||||
|
|
@ -3574,6 +3612,18 @@ class ServerAPI(object):
|
|||
return
|
||||
filters["folderNames"] = list(folder_names)
|
||||
|
||||
if folder_types is not None:
|
||||
folder_types = set(folder_types)
|
||||
if not folder_types:
|
||||
return
|
||||
filters["folderTypes"] = list(folder_types)
|
||||
|
||||
if statuses is not None:
|
||||
statuses = set(statuses)
|
||||
if not statuses:
|
||||
return
|
||||
filters["folderStatuses"] = list(statuses)
|
||||
|
||||
if parent_ids is not None:
|
||||
parent_ids = set(parent_ids)
|
||||
if not parent_ids:
|
||||
|
|
@ -4312,9 +4362,6 @@ class ServerAPI(object):
|
|||
fields.remove("attrib")
|
||||
fields |= self.get_attributes_fields_for_type("version")
|
||||
|
||||
if active is not None:
|
||||
fields.add("active")
|
||||
|
||||
# Make sure fields have minimum required fields
|
||||
fields |= {"id", "version"}
|
||||
|
||||
|
|
@ -4323,6 +4370,9 @@ class ServerAPI(object):
|
|||
use_rest = True
|
||||
fields = {"id"}
|
||||
|
||||
if active is not None:
|
||||
fields.add("active")
|
||||
|
||||
if own_attributes:
|
||||
fields.add("ownAttrib")
|
||||
|
||||
|
|
@ -5845,19 +5895,22 @@ class ServerAPI(object):
|
|||
"""Helper method to get links from server for entity types.
|
||||
|
||||
Example output:
|
||||
[
|
||||
{
|
||||
"id": "59a212c0d2e211eda0e20242ac120002",
|
||||
"linkType": "reference",
|
||||
"description": "reference link between folders",
|
||||
"projectName": "my_project",
|
||||
"author": "frantadmin",
|
||||
"entityId": "b1df109676db11ed8e8c6c9466b19aa8",
|
||||
"entityType": "folder",
|
||||
"direction": "out"
|
||||
},
|
||||
{
|
||||
"59a212c0d2e211eda0e20242ac120001": [
|
||||
{
|
||||
"id": "59a212c0d2e211eda0e20242ac120002",
|
||||
"linkType": "reference",
|
||||
"description": "reference link between folders",
|
||||
"projectName": "my_project",
|
||||
"author": "frantadmin",
|
||||
"entityId": "b1df109676db11ed8e8c6c9466b19aa8",
|
||||
"entityType": "folder",
|
||||
"direction": "out"
|
||||
},
|
||||
...
|
||||
],
|
||||
...
|
||||
]
|
||||
}
|
||||
|
||||
Args:
|
||||
project_name (str): Project where links are.
|
||||
|
|
|
|||
83
openpype/vendor/python/common/ayon_api/utils.py
vendored
83
openpype/vendor/python/common/ayon_api/utils.py
vendored
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
import re
|
||||
import datetime
|
||||
import uuid
|
||||
|
|
@ -15,6 +16,7 @@ except ImportError:
|
|||
import requests
|
||||
import unidecode
|
||||
|
||||
from .constants import SERVER_TIMEOUT_ENV_KEY
|
||||
from .exceptions import UrlError
|
||||
|
||||
REMOVED_VALUE = object()
|
||||
|
|
@ -27,6 +29,23 @@ RepresentationParents = collections.namedtuple(
|
|||
)
|
||||
|
||||
|
||||
def get_default_timeout():
|
||||
"""Default value for requests timeout.
|
||||
|
||||
First looks for environment variable SERVER_TIMEOUT_ENV_KEY which
|
||||
can affect timeout value. If not available then use 10.0 s.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
try:
|
||||
return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
return 10.0
|
||||
|
||||
|
||||
class ThumbnailContent:
|
||||
"""Wrapper for thumbnail content.
|
||||
|
||||
|
|
@ -231,30 +250,36 @@ def _try_parse_url(url):
|
|||
return None
|
||||
|
||||
|
||||
def _try_connect_to_server(url):
|
||||
def _try_connect_to_server(url, timeout=None):
|
||||
if timeout is None:
|
||||
timeout = get_default_timeout()
|
||||
try:
|
||||
# TODO add validation if the url lead to Ayon server
|
||||
# - thiw won't validate if the url lead to 'google.com'
|
||||
requests.get(url)
|
||||
# - this won't validate if the url lead to 'google.com'
|
||||
requests.get(url, timeout=timeout)
|
||||
|
||||
except BaseException:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def login_to_server(url, username, password):
|
||||
def login_to_server(url, username, password, timeout=None):
|
||||
"""Use login to the server to receive token.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
username (str): User's username.
|
||||
password (str): User's password.
|
||||
timeout (Optional[float]): Timeout for request. Value from
|
||||
'get_default_timeout' is used if not specified.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: User's token if login was successfull.
|
||||
Otherwise 'None'.
|
||||
"""
|
||||
|
||||
if timeout is None:
|
||||
timeout = get_default_timeout()
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.post(
|
||||
"{}/api/auth/login".format(url),
|
||||
|
|
@ -262,7 +287,8 @@ def login_to_server(url, username, password):
|
|||
json={
|
||||
"name": username,
|
||||
"password": password
|
||||
}
|
||||
},
|
||||
timeout=timeout,
|
||||
)
|
||||
token = None
|
||||
# 200 - success
|
||||
|
|
@ -273,47 +299,67 @@ def login_to_server(url, username, password):
|
|||
return token
|
||||
|
||||
|
||||
def logout_from_server(url, token):
|
||||
def logout_from_server(url, token, timeout=None):
|
||||
"""Logout from server and throw token away.
|
||||
|
||||
Args:
|
||||
url (str): Url from which should be logged out.
|
||||
token (str): Token which should be used to log out.
|
||||
timeout (Optional[float]): Timeout for request. Value from
|
||||
'get_default_timeout' is used if not specified.
|
||||
"""
|
||||
|
||||
if timeout is None:
|
||||
timeout = get_default_timeout()
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer {}".format(token)
|
||||
}
|
||||
requests.post(
|
||||
url + "/api/auth/logout",
|
||||
headers=headers
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
def is_token_valid(url, token):
|
||||
def is_token_valid(url, token, timeout=None):
|
||||
"""Check if token is valid.
|
||||
|
||||
Token can be a user token or service api key.
|
||||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
token (str): User's token.
|
||||
timeout (Optional[float]): Timeout for request. Value from
|
||||
'get_default_timeout' is used if not specified.
|
||||
|
||||
Returns:
|
||||
bool: True if token is valid.
|
||||
"""
|
||||
|
||||
headers = {
|
||||
if timeout is None:
|
||||
timeout = get_default_timeout()
|
||||
|
||||
base_headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer {}".format(token)
|
||||
}
|
||||
response = requests.get(
|
||||
"{}/api/users/me".format(url),
|
||||
headers=headers
|
||||
)
|
||||
return response.status_code == 200
|
||||
for header_value in (
|
||||
{"Authorization": "Bearer {}".format(token)},
|
||||
{"X-Api-Key": token},
|
||||
):
|
||||
headers = base_headers.copy()
|
||||
headers.update(header_value)
|
||||
response = requests.get(
|
||||
"{}/api/users/me".format(url),
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def validate_url(url):
|
||||
def validate_url(url, timeout=None):
|
||||
"""Validate url if is valid and server is available.
|
||||
|
||||
Validation checks if can be parsed as url and contains scheme.
|
||||
|
|
@ -334,6 +380,7 @@ def validate_url(url):
|
|||
|
||||
Args:
|
||||
url (str): Server url.
|
||||
timeout (Optional[int]): Timeout in seconds for connection to server.
|
||||
|
||||
Returns:
|
||||
Url which was used to connect to server.
|
||||
|
|
@ -369,10 +416,10 @@ def validate_url(url):
|
|||
# - this will trigger UrlError if both will crash
|
||||
if not parsed_url.scheme:
|
||||
new_url = "https://" + modified_url
|
||||
if _try_connect_to_server(new_url):
|
||||
if _try_connect_to_server(new_url, timeout=timeout):
|
||||
return new_url
|
||||
|
||||
if _try_connect_to_server(modified_url):
|
||||
if _try_connect_to_server(modified_url, timeout=timeout):
|
||||
return modified_url
|
||||
|
||||
hints = []
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
"""Package declaring Python API for Ayon server."""
|
||||
__version__ = "0.4.1"
|
||||
__version__ = "0.5.1"
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"maya": {
|
||||
"mayapy": {
|
||||
"enabled": true,
|
||||
"label": "Maya",
|
||||
"icon": "{}/app_icons/maya.png",
|
||||
|
|
|
|||
|
|
@ -12,6 +12,27 @@ from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_VALUES
|
|||
from .tools import GlobalToolsModel, DEFAULT_TOOLS_VALUES
|
||||
|
||||
|
||||
class DiskMappingItemModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
source: str = Field("", title="Source")
|
||||
destination: str = Field("", title="Destination")
|
||||
|
||||
|
||||
class DiskMappingModel(BaseSettingsModel):
|
||||
windows: list[DiskMappingItemModel] = Field(
|
||||
title="Windows",
|
||||
default_factory=list,
|
||||
)
|
||||
linux: list[DiskMappingItemModel] = Field(
|
||||
title="Linux",
|
||||
default_factory=list,
|
||||
)
|
||||
darwin: list[DiskMappingItemModel] = Field(
|
||||
title="MacOS",
|
||||
default_factory=list,
|
||||
)
|
||||
|
||||
|
||||
class ImageIOFileRuleModel(BaseSettingsModel):
|
||||
name: str = Field("", title="Rule name")
|
||||
pattern: str = Field("", title="Regex pattern")
|
||||
|
|
@ -97,6 +118,10 @@ class CoreSettings(BaseSettingsModel):
|
|||
widget="textarea",
|
||||
scope=["studio"],
|
||||
)
|
||||
disk_mapping: DiskMappingModel = Field(
|
||||
default_factory=DiskMappingModel,
|
||||
title="Disk mapping",
|
||||
)
|
||||
tools: GlobalToolsModel = Field(
|
||||
default_factory=GlobalToolsModel,
|
||||
title="Tools"
|
||||
|
|
|
|||
|
|
@ -25,6 +25,10 @@ class UpdateHoudiniVarcontextModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class GeneralSettingsModel(BaseSettingsModel):
|
||||
add_self_publish_button: bool = Field(
|
||||
False,
|
||||
title="Add Self Publish Button"
|
||||
)
|
||||
update_houdini_var_context: UpdateHoudiniVarcontextModel = Field(
|
||||
default_factory=UpdateHoudiniVarcontextModel,
|
||||
title="Update Houdini Vars on context change"
|
||||
|
|
@ -32,6 +36,7 @@ class GeneralSettingsModel(BaseSettingsModel):
|
|||
|
||||
|
||||
DEFAULT_GENERAL_SETTINGS = {
|
||||
"add_self_publish_button": False,
|
||||
"update_houdini_var_context": {
|
||||
"enabled": True,
|
||||
"houdini_vars": [
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.4"
|
||||
__version__ = "0.1.5"
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class TestPipelinePublishPlugins(TestPipeline):
|
|||
# files are the same as those used in `test_pipeline_colorspace`
|
||||
TEST_FILES = [
|
||||
(
|
||||
"1Lf-mFxev7xiwZCWfImlRcw7Fj8XgNQMh",
|
||||
"1csqimz8bbNcNgxtEXklLz6GRv91D3KgA",
|
||||
"test_pipeline_colorspace.zip",
|
||||
""
|
||||
)
|
||||
|
|
@ -123,8 +123,7 @@ class TestPipelinePublishPlugins(TestPipeline):
|
|||
|
||||
def test_get_colorspace_settings(self, context, config_path_asset):
|
||||
expected_config_template = (
|
||||
"{root[work]}/{project[name]}"
|
||||
"/{hierarchy}/{asset}/config/aces.ocio"
|
||||
"{root[work]}/{project[name]}/config/aces.ocio"
|
||||
)
|
||||
expected_file_rules = {
|
||||
"comp_review": {
|
||||
|
|
@ -177,16 +176,16 @@ class TestPipelinePublishPlugins(TestPipeline):
|
|||
# load plugin function for testing
|
||||
plugin = publish_plugins.ColormanagedPyblishPluginMixin()
|
||||
plugin.log = log
|
||||
context.data["imageioSettings"] = (config_data_nuke, file_rules_nuke)
|
||||
plugin.set_representation_colorspace(
|
||||
representation_nuke, context,
|
||||
colorspace_settings=(config_data_nuke, file_rules_nuke)
|
||||
representation_nuke, context
|
||||
)
|
||||
# load plugin function for testing
|
||||
plugin = publish_plugins.ColormanagedPyblishPluginMixin()
|
||||
plugin.log = log
|
||||
context.data["imageioSettings"] = (config_data_hiero, file_rules_hiero)
|
||||
plugin.set_representation_colorspace(
|
||||
representation_hiero, context,
|
||||
colorspace_settings=(config_data_hiero, file_rules_hiero)
|
||||
representation_hiero, context
|
||||
)
|
||||
|
||||
colorspace_data_nuke = representation_nuke.get("colorspaceData")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,118 @@
|
|||
import unittest
|
||||
from openpype.pipeline.colorspace import convert_colorspace_enumerator_item
|
||||
|
||||
|
||||
class TestConvertColorspaceEnumeratorItem(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.config_items = {
|
||||
"colorspaces": {
|
||||
"sRGB": {
|
||||
"aliases": ["sRGB_1"],
|
||||
"family": "colorspace",
|
||||
"categories": ["colors"],
|
||||
"equalitygroup": "equalitygroup",
|
||||
},
|
||||
"Rec.709": {
|
||||
"aliases": ["rec709_1", "rec709_2"],
|
||||
},
|
||||
},
|
||||
"looks": {
|
||||
"sRGB_to_Rec.709": {
|
||||
"process_space": "sRGB",
|
||||
},
|
||||
},
|
||||
"displays_views": {
|
||||
"sRGB (ACES)": {
|
||||
"view": "sRGB",
|
||||
"display": "ACES",
|
||||
},
|
||||
"Rec.709 (ACES)": {
|
||||
"view": "Rec.709",
|
||||
"display": "ACES",
|
||||
},
|
||||
},
|
||||
"roles": {
|
||||
"compositing_linear": {
|
||||
"colorspace": "linear",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def test_valid_item(self):
|
||||
colorspace_item_data = convert_colorspace_enumerator_item(
|
||||
"colorspaces::sRGB", self.config_items)
|
||||
self.assertEqual(
|
||||
colorspace_item_data,
|
||||
{
|
||||
"name": "sRGB",
|
||||
"type": "colorspaces",
|
||||
"aliases": ["sRGB_1"],
|
||||
"family": "colorspace",
|
||||
"categories": ["colors"],
|
||||
"equalitygroup": "equalitygroup"
|
||||
}
|
||||
)
|
||||
|
||||
alias_item_data = convert_colorspace_enumerator_item(
|
||||
"aliases::rec709_1", self.config_items)
|
||||
self.assertEqual(
|
||||
alias_item_data,
|
||||
{
|
||||
"aliases": ["rec709_1", "rec709_2"],
|
||||
"name": "Rec.709",
|
||||
"type": "colorspace"
|
||||
}
|
||||
)
|
||||
|
||||
display_view_item_data = convert_colorspace_enumerator_item(
|
||||
"displays_views::sRGB (ACES)", self.config_items)
|
||||
self.assertEqual(
|
||||
display_view_item_data,
|
||||
{
|
||||
"type": "displays_views",
|
||||
"name": "sRGB (ACES)",
|
||||
"view": "sRGB",
|
||||
"display": "ACES"
|
||||
}
|
||||
)
|
||||
|
||||
role_item_data = convert_colorspace_enumerator_item(
|
||||
"roles::compositing_linear", self.config_items)
|
||||
self.assertEqual(
|
||||
role_item_data,
|
||||
{
|
||||
"name": "compositing_linear",
|
||||
"type": "roles",
|
||||
"colorspace": "linear"
|
||||
}
|
||||
)
|
||||
|
||||
look_item_data = convert_colorspace_enumerator_item(
|
||||
"looks::sRGB_to_Rec.709", self.config_items)
|
||||
self.assertEqual(
|
||||
look_item_data,
|
||||
{
|
||||
"type": "looks",
|
||||
"name": "sRGB_to_Rec.709",
|
||||
"process_space": "sRGB"
|
||||
}
|
||||
)
|
||||
|
||||
def test_invalid_item(self):
|
||||
config_items = {
|
||||
"RGB": {
|
||||
"sRGB": {"red": 255, "green": 255, "blue": 255},
|
||||
"AdobeRGB": {"red": 255, "green": 255, "blue": 255},
|
||||
}
|
||||
}
|
||||
with self.assertRaises(KeyError):
|
||||
convert_colorspace_enumerator_item("RGB::invalid", config_items)
|
||||
|
||||
def test_missing_config_data(self):
|
||||
config_items = {}
|
||||
with self.assertRaises(KeyError):
|
||||
convert_colorspace_enumerator_item("RGB::sRGB", config_items)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
import unittest
|
||||
|
||||
from openpype.pipeline.colorspace import get_colorspaces_enumerator_items
|
||||
|
||||
|
||||
class TestGetColorspacesEnumeratorItems(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.config_items = {
|
||||
"colorspaces": {
|
||||
"sRGB": {
|
||||
"aliases": ["sRGB_1"],
|
||||
},
|
||||
"Rec.709": {
|
||||
"aliases": ["rec709_1", "rec709_2"],
|
||||
},
|
||||
},
|
||||
"looks": {
|
||||
"sRGB_to_Rec.709": {
|
||||
"process_space": "sRGB",
|
||||
},
|
||||
},
|
||||
"displays_views": {
|
||||
"sRGB (ACES)": {
|
||||
"view": "sRGB",
|
||||
"display": "ACES",
|
||||
},
|
||||
"Rec.709 (ACES)": {
|
||||
"view": "Rec.709",
|
||||
"display": "ACES",
|
||||
},
|
||||
},
|
||||
"roles": {
|
||||
"compositing_linear": {
|
||||
"colorspace": "linear",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def test_colorspaces(self):
|
||||
result = get_colorspaces_enumerator_items(self.config_items)
|
||||
expected = [
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_aliases(self):
|
||||
result = get_colorspaces_enumerator_items(
|
||||
self.config_items, include_aliases=True)
|
||||
expected = [
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
("aliases::rec709_1", "[alias] rec709_1 (Rec.709)"),
|
||||
("aliases::rec709_2", "[alias] rec709_2 (Rec.709)"),
|
||||
("aliases::sRGB_1", "[alias] sRGB_1 (sRGB)"),
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_looks(self):
|
||||
result = get_colorspaces_enumerator_items(
|
||||
self.config_items, include_looks=True)
|
||||
expected = [
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
("looks::sRGB_to_Rec.709", "[look] sRGB_to_Rec.709 (sRGB)"),
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_display_views(self):
|
||||
result = get_colorspaces_enumerator_items(
|
||||
self.config_items, include_display_views=True)
|
||||
expected = [
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
("displays_views::Rec.709 (ACES)", "[view (display)] Rec.709 (ACES)"), # noqa: E501
|
||||
("displays_views::sRGB (ACES)", "[view (display)] sRGB (ACES)"),
|
||||
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_roles(self):
|
||||
result = get_colorspaces_enumerator_items(
|
||||
self.config_items, include_roles=True)
|
||||
expected = [
|
||||
("roles::compositing_linear", "[role] compositing_linear (linear)"), # noqa: E501
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_all(self):
|
||||
message_config_keys = ", ".join(
|
||||
"'{}':{}".format(
|
||||
key,
|
||||
set(self.config_items.get(key, {}).keys())
|
||||
) for key in self.config_items.keys()
|
||||
)
|
||||
print("Testing with config: [{}]".format(message_config_keys))
|
||||
result = get_colorspaces_enumerator_items(
|
||||
self.config_items,
|
||||
include_aliases=True,
|
||||
include_looks=True,
|
||||
include_roles=True,
|
||||
include_display_views=True,
|
||||
)
|
||||
expected = [
|
||||
("roles::compositing_linear", "[role] compositing_linear (linear)"), # noqa: E501
|
||||
("colorspaces::Rec.709", "[colorspace] Rec.709"),
|
||||
("colorspaces::sRGB", "[colorspace] sRGB"),
|
||||
("aliases::rec709_1", "[alias] rec709_1 (Rec.709)"),
|
||||
("aliases::rec709_2", "[alias] rec709_2 (Rec.709)"),
|
||||
("aliases::sRGB_1", "[alias] sRGB_1 (sRGB)"),
|
||||
("looks::sRGB_to_Rec.709", "[look] sRGB_to_Rec.709 (sRGB)"),
|
||||
("displays_views::Rec.709 (ACES)", "[view (display)] Rec.709 (ACES)"), # noqa: E501
|
||||
("displays_views::sRGB (ACES)", "[view (display)] sRGB (ACES)"),
|
||||
]
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue