mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into bugfix/OP-2803_nuke-farm-publishing-with-multiple-bake-profiles
This commit is contained in:
commit
138cec73a2
11 changed files with 446 additions and 17 deletions
|
|
@ -451,6 +451,8 @@ class ExporterReviewMov(ExporterReview):
|
|||
|
||||
def generate_mov(self, farm=False, **kwargs):
|
||||
self.publish_on_farm = farm
|
||||
reformat_node_add = kwargs["reformat_node_add"]
|
||||
reformat_node_config = kwargs["reformat_node_config"]
|
||||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
bake_viewer_input_process_node = kwargs[
|
||||
"bake_viewer_input_process"]
|
||||
|
|
@ -488,6 +490,30 @@ class ExporterReviewMov(ExporterReview):
|
|||
self.previous_node = r_node
|
||||
self.log.debug("Read... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# add reformat node
|
||||
if reformat_node_add:
|
||||
# append reformated tag
|
||||
add_tags.append("reformated")
|
||||
|
||||
rf_node = nuke.createNode("Reformat")
|
||||
for kn_conf in reformat_node_config:
|
||||
_type = kn_conf["type"]
|
||||
k_name = str(kn_conf["name"])
|
||||
k_value = kn_conf["value"]
|
||||
|
||||
# to remove unicode as nuke doesn't like it
|
||||
if _type == "string":
|
||||
k_value = str(kn_conf["value"])
|
||||
|
||||
rf_node[k_name].setValue(k_value)
|
||||
|
||||
# connect
|
||||
rf_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes[subset].append(rf_node)
|
||||
self.previous_node = rf_node
|
||||
self.log.debug(
|
||||
"Reformat... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
if bake_viewer_process:
|
||||
if bake_viewer_input_process_node:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
import openpype
|
||||
from openpype.hosts.nuke.api import plugin
|
||||
|
|
@ -25,6 +26,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
task_type = instance.context.data["taskType"]
|
||||
subset = instance.data["subset"]
|
||||
self.log.info("Creating staging dir...")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -46,6 +48,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
for o_name, o_data in self.outputs.items():
|
||||
f_families = o_data["filter"]["families"]
|
||||
f_task_types = o_data["filter"]["task_types"]
|
||||
f_subsets = o_data["filter"]["sebsets"]
|
||||
|
||||
# test if family found in context
|
||||
test_families = any([
|
||||
|
|
@ -69,11 +72,25 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
bool(not f_task_types)
|
||||
])
|
||||
|
||||
# test subsets from filter
|
||||
test_subsets = any([
|
||||
# check if any of subset filter inputs
|
||||
# converted to regex patern is not found in subset
|
||||
# we keep strict case sensitivity
|
||||
bool(next((
|
||||
s for s in f_subsets
|
||||
if re.search(re.compile(s), subset)
|
||||
), None)),
|
||||
# but if no subsets were set then make this acuntable too
|
||||
bool(not f_subsets)
|
||||
])
|
||||
|
||||
# we need all filters to be positive for this
|
||||
# preset to be activated
|
||||
test_all = all([
|
||||
test_families,
|
||||
test_task_types
|
||||
test_task_types,
|
||||
test_subsets
|
||||
])
|
||||
|
||||
# if it is not positive then skip this preset
|
||||
|
|
@ -122,6 +139,13 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
if generated_repres:
|
||||
# assign to representations
|
||||
instance.data["representations"] += generated_repres
|
||||
else:
|
||||
instance.data["families"].remove("review")
|
||||
self.log.info((
|
||||
"Removing `review` from families. "
|
||||
"Not available baking profile."
|
||||
))
|
||||
self.log.debug(instance.data["families"])
|
||||
|
||||
self.log.debug(
|
||||
"_ representations: {}".format(
|
||||
|
|
|
|||
|
|
@ -20,11 +20,16 @@ from openpype_modules.ftrack.lib import (
|
|||
query_custom_attributes,
|
||||
CUST_ATTR_ID_KEY,
|
||||
CUST_ATTR_AUTO_SYNC,
|
||||
FPS_KEYS,
|
||||
|
||||
avalon_sync,
|
||||
|
||||
BaseEvent
|
||||
)
|
||||
from openpype_modules.ftrack.lib.avalon_sync import (
|
||||
convert_to_fps,
|
||||
InvalidFpsValue
|
||||
)
|
||||
from openpype.lib import CURRENT_DOC_SCHEMAS
|
||||
|
||||
|
||||
|
|
@ -1149,12 +1154,31 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"description": ftrack_ent["description"]
|
||||
}
|
||||
}
|
||||
invalid_fps_items = []
|
||||
cust_attrs = self.get_cust_attr_values(ftrack_ent)
|
||||
for key, val in cust_attrs.items():
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
val = convert_to_fps(val)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((ftrack_ent["id"], val))
|
||||
continue
|
||||
|
||||
final_entity["data"][key] = val
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
_mongo_id_str = cust_attrs.get(CUST_ATTR_ID_KEY)
|
||||
if _mongo_id_str:
|
||||
try:
|
||||
|
|
@ -2155,11 +2179,19 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
)
|
||||
|
||||
convert_types_by_id[attr_id] = convert_type
|
||||
default_value = attr["default"]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
default_value = convert_to_fps(default_value)
|
||||
except InvalidFpsValue:
|
||||
pass
|
||||
|
||||
entities_dict[ftrack_project_id]["hier_attrs"][key] = (
|
||||
attr["default"]
|
||||
)
|
||||
|
||||
# PREPARE DATA BEFORE THIS
|
||||
invalid_fps_items = []
|
||||
avalon_hier = []
|
||||
for item in values:
|
||||
value = item["value"]
|
||||
|
|
@ -2173,8 +2205,25 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
if convert_type:
|
||||
value = convert_type(value)
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
continue
|
||||
entities_dict[entity_id]["hier_attrs"][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# Get dictionary with not None hierarchical values to pull to childs
|
||||
project_values = {}
|
||||
for key, value in (
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from openpype_modules.ftrack.lib import (
|
|||
CUST_ATTR_TOOLS,
|
||||
CUST_ATTR_APPLICATIONS,
|
||||
CUST_ATTR_INTENT,
|
||||
FPS_KEYS,
|
||||
|
||||
default_custom_attributes_definition,
|
||||
app_definitions_from_app_manager,
|
||||
|
|
@ -519,20 +520,28 @@ class CustomAttributes(BaseAction):
|
|||
self.show_message(event, msg)
|
||||
|
||||
def process_attribute(self, data):
|
||||
existing_attrs = self.session.query(
|
||||
"CustomAttributeConfiguration"
|
||||
).all()
|
||||
existing_attrs = self.session.query((
|
||||
"select is_hierarchical, key, type, entity_type, object_type_id"
|
||||
" from CustomAttributeConfiguration"
|
||||
)).all()
|
||||
matching = []
|
||||
is_hierarchical = data.get("is_hierarchical", False)
|
||||
for attr in existing_attrs:
|
||||
if (
|
||||
attr["key"] != data["key"] or
|
||||
attr["type"]["name"] != data["type"]["name"]
|
||||
is_hierarchical != attr["is_hierarchical"]
|
||||
or attr["key"] != data["key"]
|
||||
):
|
||||
continue
|
||||
|
||||
if data.get("is_hierarchical") is True:
|
||||
if attr["is_hierarchical"] is True:
|
||||
matching.append(attr)
|
||||
if attr["type"]["name"] != data["type"]["name"]:
|
||||
if data["key"] in FPS_KEYS and attr["type"]["name"] == "text":
|
||||
self.log.info("Kept 'fps' as text custom attribute.")
|
||||
return
|
||||
continue
|
||||
|
||||
if is_hierarchical:
|
||||
matching.append(attr)
|
||||
|
||||
elif "object_type_id" in data:
|
||||
if (
|
||||
attr["entity_type"] == data["entity_type"] and
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from .constants import (
|
|||
CUST_ATTR_GROUP,
|
||||
CUST_ATTR_TOOLS,
|
||||
CUST_ATTR_APPLICATIONS,
|
||||
CUST_ATTR_INTENT
|
||||
CUST_ATTR_INTENT,
|
||||
FPS_KEYS
|
||||
)
|
||||
from .settings import (
|
||||
get_ftrack_event_mongo_info
|
||||
|
|
@ -30,6 +31,8 @@ __all__ = (
|
|||
"CUST_ATTR_GROUP",
|
||||
"CUST_ATTR_TOOLS",
|
||||
"CUST_ATTR_APPLICATIONS",
|
||||
"CUST_ATTR_INTENT",
|
||||
"FPS_KEYS",
|
||||
|
||||
"get_ftrack_event_mongo_info",
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@ import re
|
|||
import json
|
||||
import collections
|
||||
import copy
|
||||
import numbers
|
||||
|
||||
import six
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
|
@ -14,7 +17,7 @@ from openpype.api import (
|
|||
)
|
||||
from openpype.lib import ApplicationManager
|
||||
|
||||
from .constants import CUST_ATTR_ID_KEY
|
||||
from .constants import CUST_ATTR_ID_KEY, FPS_KEYS
|
||||
from .custom_attributes import get_openpype_attr, query_custom_attributes
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
|
@ -33,6 +36,106 @@ CURRENT_DOC_SCHEMAS = {
|
|||
}
|
||||
|
||||
|
||||
class InvalidFpsValue(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def is_string_number(value):
|
||||
"""Can string value be converted to number (float)."""
|
||||
if not isinstance(value, six.string_types):
|
||||
raise TypeError("Expected {} got {}".format(
|
||||
", ".join(str(t) for t in six.string_types), str(type(value))
|
||||
))
|
||||
if value == ".":
|
||||
return False
|
||||
|
||||
if value.startswith("."):
|
||||
value = "0" + value
|
||||
elif value.endswith("."):
|
||||
value = value + "0"
|
||||
|
||||
if re.match(r"^\d+(\.\d+)?$", value) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def convert_to_fps(source_value):
|
||||
"""Convert value into fps value.
|
||||
|
||||
Non string values are kept untouched. String is tried to convert.
|
||||
Valid values:
|
||||
"1000"
|
||||
"1000.05"
|
||||
"1000,05"
|
||||
",05"
|
||||
".05"
|
||||
"1000,"
|
||||
"1000."
|
||||
"1000/1000"
|
||||
"1000.05/1000"
|
||||
"1000/1000.05"
|
||||
"1000.05/1000.05"
|
||||
"1000,05/1000"
|
||||
"1000/1000,05"
|
||||
"1000,05/1000,05"
|
||||
|
||||
Invalid values:
|
||||
"/"
|
||||
"/1000"
|
||||
"1000/"
|
||||
","
|
||||
"."
|
||||
...any other string
|
||||
|
||||
Returns:
|
||||
float: Converted value.
|
||||
|
||||
Raises:
|
||||
InvalidFpsValue: When value can't be converted to float.
|
||||
"""
|
||||
if not isinstance(source_value, six.string_types):
|
||||
if isinstance(source_value, numbers.Number):
|
||||
return float(source_value)
|
||||
return source_value
|
||||
|
||||
value = source_value.strip().replace(",", ".")
|
||||
if not value:
|
||||
raise InvalidFpsValue("Got empty value")
|
||||
|
||||
subs = value.split("/")
|
||||
if len(subs) == 1:
|
||||
str_value = subs[0]
|
||||
if not is_string_number(str_value):
|
||||
raise InvalidFpsValue(
|
||||
"Value \"{}\" can't be converted to number.".format(value)
|
||||
)
|
||||
return float(str_value)
|
||||
|
||||
elif len(subs) == 2:
|
||||
divident, divisor = subs
|
||||
if not divident or not is_string_number(divident):
|
||||
raise InvalidFpsValue(
|
||||
"Divident value \"{}\" can't be converted to number".format(
|
||||
divident
|
||||
)
|
||||
)
|
||||
|
||||
if not divisor or not is_string_number(divisor):
|
||||
raise InvalidFpsValue(
|
||||
"Divisor value \"{}\" can't be converted to number".format(
|
||||
divident
|
||||
)
|
||||
)
|
||||
divisor_float = float(divisor)
|
||||
if divisor_float == 0.0:
|
||||
raise InvalidFpsValue("Can't divide by zero")
|
||||
return float(divident) / divisor_float
|
||||
|
||||
raise InvalidFpsValue(
|
||||
"Value can't be converted to number \"{}\"".format(source_value)
|
||||
)
|
||||
|
||||
|
||||
def create_chunks(iterable, chunk_size=None):
|
||||
"""Separate iterable into multiple chunks by size.
|
||||
|
||||
|
|
@ -980,6 +1083,7 @@ class SyncEntitiesFactory:
|
|||
sync_ids
|
||||
)
|
||||
|
||||
invalid_fps_items = []
|
||||
for item in items:
|
||||
entity_id = item["entity_id"]
|
||||
attr_id = item["configuration_id"]
|
||||
|
|
@ -992,8 +1096,24 @@ class SyncEntitiesFactory:
|
|||
value = item["value"]
|
||||
if convert_type:
|
||||
value = convert_type(value)
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
self.entities_dict[entity_id][store_key][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# process hierarchical attributes
|
||||
self.set_hierarchical_attribute(
|
||||
hier_attrs, sync_ids, cust_attr_type_name_by_id
|
||||
|
|
@ -1026,8 +1146,15 @@ class SyncEntitiesFactory:
|
|||
if key.startswith("avalon_"):
|
||||
store_key = "avalon_attrs"
|
||||
|
||||
default_value = attr["default"]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
default_value = convert_to_fps(default_value)
|
||||
except InvalidFpsValue:
|
||||
pass
|
||||
|
||||
self.entities_dict[self.ft_project_id][store_key][key] = (
|
||||
attr["default"]
|
||||
default_value
|
||||
)
|
||||
|
||||
# Add attribute ids to entities dictionary
|
||||
|
|
@ -1069,6 +1196,7 @@ class SyncEntitiesFactory:
|
|||
True
|
||||
)
|
||||
|
||||
invalid_fps_items = []
|
||||
avalon_hier = []
|
||||
for item in items:
|
||||
value = item["value"]
|
||||
|
|
@ -1088,6 +1216,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
entity_id = item["entity_id"]
|
||||
key = attribute_key_by_id[attr_id]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
continue
|
||||
|
||||
if key.startswith("avalon_"):
|
||||
store_key = "avalon_attrs"
|
||||
avalon_hier.append(key)
|
||||
|
|
@ -1095,6 +1230,16 @@ class SyncEntitiesFactory:
|
|||
store_key = "hier_attrs"
|
||||
self.entities_dict[entity_id][store_key][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# Get dictionary with not None hierarchical values to pull to childs
|
||||
top_id = self.ft_project_id
|
||||
project_values = {}
|
||||
|
|
|
|||
|
|
@ -12,3 +12,9 @@ CUST_ATTR_APPLICATIONS = "applications"
|
|||
CUST_ATTR_TOOLS = "tools_env"
|
||||
# Intent custom attribute name
|
||||
CUST_ATTR_INTENT = "intent"
|
||||
|
||||
FPS_KEYS = {
|
||||
"fps",
|
||||
# For development purposes
|
||||
"fps_string"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1171,6 +1171,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("input_width: `{}`".format(input_width))
|
||||
self.log.debug("input_height: `{}`".format(input_height))
|
||||
|
||||
reformat_in_baking = bool("reformated" in new_repre["tags"])
|
||||
self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking))
|
||||
|
||||
# Use instance resolution if output definition has not set it.
|
||||
if output_width is None or output_height is None:
|
||||
output_width = temp_data["resolution_width"]
|
||||
|
|
@ -1182,6 +1185,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output_width = input_width
|
||||
output_height = input_height
|
||||
|
||||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from baking process"
|
||||
))
|
||||
output_width = input_width
|
||||
output_height = input_height
|
||||
pixel_aspect = 1
|
||||
new_repre["resolutionWidth"] = input_width
|
||||
new_repre["resolutionHeight"] = input_height
|
||||
|
||||
output_width = int(output_width)
|
||||
output_height = int(output_height)
|
||||
|
||||
|
|
|
|||
|
|
@ -116,13 +116,42 @@
|
|||
"baking": {
|
||||
"filter": {
|
||||
"task_types": [],
|
||||
"families": []
|
||||
"families": [],
|
||||
"sebsets": []
|
||||
},
|
||||
"extension": "mov",
|
||||
"viewer_process_override": "",
|
||||
"bake_viewer_process": true,
|
||||
"bake_viewer_input_process": true,
|
||||
"add_tags": []
|
||||
"add_tags": [],
|
||||
"reformat_node_add": false,
|
||||
"reformat_node_config": [
|
||||
{
|
||||
"type": "string",
|
||||
"name": "type",
|
||||
"value": "to format"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"name": "format",
|
||||
"value": "HD_1080"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"name": "filter",
|
||||
"value": "Lanczos6"
|
||||
},
|
||||
{
|
||||
"type": "bool",
|
||||
"name": "black_outside",
|
||||
"value": true
|
||||
},
|
||||
{
|
||||
"type": "bool",
|
||||
"name": "pbb",
|
||||
"value": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -584,8 +584,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_default_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_default_value(child_value)
|
||||
|
||||
def update_studio_value(self, value):
|
||||
|
|
@ -620,8 +621,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_studio_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_studio_value(child_value)
|
||||
|
||||
def update_project_value(self, value):
|
||||
|
|
@ -656,8 +658,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_project_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_project_value(child_value)
|
||||
|
||||
def _discard_changes(self, on_change_trigger):
|
||||
|
|
|
|||
|
|
@ -195,6 +195,12 @@
|
|||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "sebsets",
|
||||
"label": "Subsets",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -226,6 +232,121 @@
|
|||
"label": "Add additional tags to representations",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "reformat_node_add",
|
||||
"label": "Add Reformat Node",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"type": "collapsible-wrap",
|
||||
"label": "Reformat Node Knobs",
|
||||
"collapsible": true,
|
||||
"collapsed": false,
|
||||
"children": [
|
||||
{
|
||||
"type": "list",
|
||||
"key": "reformat_node_config",
|
||||
"object_type": {
|
||||
"type": "dict-conditional",
|
||||
"enum_key": "type",
|
||||
"enum_label": "Type",
|
||||
"enum_children": [
|
||||
{
|
||||
"key": "string",
|
||||
"label": "String",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "value",
|
||||
"label": "Value"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "bool",
|
||||
"label": "Boolean",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "value",
|
||||
"label": "Value"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "number",
|
||||
"label": "Number",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "list-strict",
|
||||
"key": "value",
|
||||
"label": "Value",
|
||||
"object_types": [
|
||||
{
|
||||
"type": "number",
|
||||
"key": "number",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "list_numbers",
|
||||
"label": "2 Numbers",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "list-strict",
|
||||
"key": "value",
|
||||
"label": "Value",
|
||||
"object_types": [
|
||||
{
|
||||
"type": "number",
|
||||
"key": "x",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "y",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue