mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
e0e1812f97
33 changed files with 131 additions and 302 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,7 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.18.2-nightly.2
|
||||
- 3.18.2-nightly.1
|
||||
- 3.18.1
|
||||
- 3.18.1-nightly.1
|
||||
|
|
@ -134,7 +135,6 @@ body:
|
|||
- 3.15.4
|
||||
- 3.15.4-nightly.3
|
||||
- 3.15.4-nightly.2
|
||||
- 3.15.4-nightly.1
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -121,62 +121,6 @@ def get_id_required_nodes():
|
|||
return list(nodes)
|
||||
|
||||
|
||||
def get_export_parameter(node):
|
||||
"""Return the export output parameter of the given node
|
||||
|
||||
Example:
|
||||
root = hou.node("/obj")
|
||||
my_alembic_node = root.createNode("alembic")
|
||||
get_output_parameter(my_alembic_node)
|
||||
# Result: "output"
|
||||
|
||||
Args:
|
||||
node(hou.Node): node instance
|
||||
|
||||
Returns:
|
||||
hou.Parm
|
||||
|
||||
"""
|
||||
node_type = node.type().description()
|
||||
|
||||
# Ensures the proper Take is selected for each ROP to retrieve the correct
|
||||
# ifd
|
||||
try:
|
||||
rop_take = hou.takes.findTake(node.parm("take").eval())
|
||||
if rop_take is not None:
|
||||
hou.takes.setCurrentTake(rop_take)
|
||||
except AttributeError:
|
||||
# hou object doesn't always have the 'takes' attribute
|
||||
pass
|
||||
|
||||
if node_type == "Mantra" and node.parm("soho_outputmode").eval():
|
||||
return node.parm("soho_diskfile")
|
||||
elif node_type == "Alfred":
|
||||
return node.parm("alf_diskfile")
|
||||
elif (node_type == "RenderMan" or node_type == "RenderMan RIS"):
|
||||
pre_ris22 = node.parm("rib_outputmode") and \
|
||||
node.parm("rib_outputmode").eval()
|
||||
ris22 = node.parm("diskfile") and node.parm("diskfile").eval()
|
||||
if pre_ris22 or ris22:
|
||||
return node.parm("soho_diskfile")
|
||||
elif node_type == "Redshift" and node.parm("RS_archive_enable").eval():
|
||||
return node.parm("RS_archive_file")
|
||||
elif node_type == "Wedge" and node.parm("driver").eval():
|
||||
return get_export_parameter(node.node(node.parm("driver").eval()))
|
||||
elif node_type == "Arnold":
|
||||
return node.parm("ar_ass_file")
|
||||
elif node_type == "Alembic" and node.parm("use_sop_path").eval():
|
||||
return node.parm("sop_path")
|
||||
elif node_type == "Shotgun Mantra" and node.parm("soho_outputmode").eval():
|
||||
return node.parm("sgtk_soho_diskfile")
|
||||
elif node_type == "Shotgun Alembic" and node.parm("use_sop_path").eval():
|
||||
return node.parm("sop_path")
|
||||
elif node.type().nameWithCategory() == "Driver/vray_renderer":
|
||||
return node.parm("render_export_filepath")
|
||||
|
||||
raise TypeError("Node type '%s' not supported" % node_type)
|
||||
|
||||
|
||||
def get_output_parameter(node):
|
||||
"""Return the render output parameter of the given node
|
||||
|
||||
|
|
@ -184,41 +128,59 @@ def get_output_parameter(node):
|
|||
root = hou.node("/obj")
|
||||
my_alembic_node = root.createNode("alembic")
|
||||
get_output_parameter(my_alembic_node)
|
||||
# Result: "output"
|
||||
>>> "filename"
|
||||
|
||||
Notes:
|
||||
I'm using node.type().name() to get on par with the creators,
|
||||
Because the return value of `node.type().name()` is the
|
||||
same string value used in creators
|
||||
e.g. instance_data.update({"node_type": "alembic"})
|
||||
|
||||
Rop nodes in different network categories have
|
||||
the same output parameter.
|
||||
So, I took that into consideration as a hint for
|
||||
future development.
|
||||
|
||||
Args:
|
||||
node(hou.Node): node instance
|
||||
|
||||
Returns:
|
||||
hou.Parm
|
||||
|
||||
"""
|
||||
node_type = node.type().description()
|
||||
category = node.type().category().name()
|
||||
|
||||
node_type = node.type().name()
|
||||
|
||||
# Figure out which type of node is being rendered
|
||||
if node_type == "Geometry" or node_type == "Filmbox FBX" or \
|
||||
(node_type == "ROP Output Driver" and category == "Sop"):
|
||||
return node.parm("sopoutput")
|
||||
elif node_type == "Composite":
|
||||
return node.parm("copoutput")
|
||||
elif node_type == "opengl":
|
||||
return node.parm("picture")
|
||||
if node_type in {"alembic", "rop_alembic"}:
|
||||
return node.parm("filename")
|
||||
elif node_type == "arnold":
|
||||
if node.evalParm("ar_ass_export_enable"):
|
||||
if node_type.evalParm("ar_ass_export_enable"):
|
||||
return node.parm("ar_ass_file")
|
||||
elif node_type == "Redshift_Proxy_Output":
|
||||
return node.parm("RS_archive_file")
|
||||
elif node_type == "ifd":
|
||||
return node.parm("ar_picture")
|
||||
elif node_type in {
|
||||
"geometry",
|
||||
"rop_geometry",
|
||||
"filmboxfbx",
|
||||
"rop_fbx"
|
||||
}:
|
||||
return node.parm("sopoutput")
|
||||
elif node_type == "comp":
|
||||
return node.parm("copoutput")
|
||||
elif node_type in {"karma", "opengl"}:
|
||||
return node.parm("picture")
|
||||
elif node_type == "ifd": # Mantra
|
||||
if node.evalParm("soho_outputmode"):
|
||||
return node.parm("soho_diskfile")
|
||||
elif node_type == "Octane":
|
||||
return node.parm("HO_img_fileName")
|
||||
elif node_type == "Fetch":
|
||||
inner_node = node.node(node.parm("source").eval())
|
||||
if inner_node:
|
||||
return get_output_parameter(inner_node)
|
||||
elif node.type().nameWithCategory() == "Driver/vray_renderer":
|
||||
return node.parm("vm_picture")
|
||||
elif node_type == "Redshift_Proxy_Output":
|
||||
return node.parm("RS_archive_file")
|
||||
elif node_type == "Redshift_ROP":
|
||||
return node.parm("RS_outputFileNamePrefix")
|
||||
elif node_type in {"usd", "usd_rop", "usdexport"}:
|
||||
return node.parm("lopoutput")
|
||||
elif node_type in {"usdrender", "usdrender_rop"}:
|
||||
return node.parm("outputimage")
|
||||
elif node_type == "vray_renderer":
|
||||
return node.parm("SettingsOutput_img_file_path")
|
||||
|
||||
raise TypeError("Node type '%s' not supported" % node_type)
|
||||
|
|
|
|||
|
|
@ -41,11 +41,11 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
export_job = bool(rop.parm("ar_ass_export_enable").eval())
|
||||
instance.data["exportJob"] = export_job
|
||||
split_render = bool(rop.parm("ar_ass_export_enable").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if export_job:
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "ar_ass_file", pad_character="0"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -45,11 +45,11 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
export_job = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["exportJob"] = export_job
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if export_job:
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,11 +46,11 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
# TODO: add render elements if render element
|
||||
|
||||
# Store whether we are splitting the render job in an export + render
|
||||
export_job = rop.parm("render_export_mode").eval() == "2"
|
||||
instance.data["exportJob"] = export_job
|
||||
split_render = rop.parm("render_export_mode").eval() == "2"
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if export_job:
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "render_export_filepath", pad_character="0"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.maya.api import lib
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
SETTINGS = {"renderDensity",
|
||||
|
|
@ -116,7 +117,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
resources = []
|
||||
|
||||
image_search_paths = cmds.getAttr("{}.imageSearchPath".format(node))
|
||||
texture_filenames = []
|
||||
if image_search_paths:
|
||||
|
||||
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
|
||||
|
|
@ -127,9 +127,16 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
image_search_paths = self._replace_tokens(image_search_paths)
|
||||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.debug("Found %i texture(s)" % len(texture_filenames))
|
||||
# List all related textures
|
||||
texture_nodes = cmds.pgYetiGraph(
|
||||
node, listNodes=True, type="texture")
|
||||
texture_filenames = [
|
||||
cmds.pgYetiGraph(
|
||||
node, node=texture_node,
|
||||
param="file_name", getParamValue=True)
|
||||
for texture_node in texture_nodes
|
||||
]
|
||||
self.log.debug("Found %i texture(s)" % len(texture_filenames))
|
||||
|
||||
# Get all reference nodes
|
||||
reference_nodes = cmds.pgYetiGraph(node,
|
||||
|
|
@ -137,11 +144,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
type="reference")
|
||||
self.log.debug("Found %i reference node(s)" % len(reference_nodes))
|
||||
|
||||
if texture_filenames and not image_search_paths:
|
||||
raise ValueError("pgYetiMaya node '%s' is missing the path to the "
|
||||
"files in the 'imageSearchPath "
|
||||
"atttribute'" % node)
|
||||
|
||||
# Collect all texture files
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
texture_filenames = self._replace_tokens(texture_filenames)
|
||||
|
|
@ -161,7 +163,7 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
break
|
||||
|
||||
if not files:
|
||||
self.log.warning(
|
||||
raise KnownPublishError(
|
||||
"No texture found for: %s "
|
||||
"(searched: %s)" % (texture, image_search_paths))
|
||||
|
||||
|
|
|
|||
|
|
@ -6,9 +6,11 @@ from maya import cmds
|
|||
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
from openpype.pipeline import AVALON_CONTAINER_ID, publish
|
||||
from openpype.pipeline.publish import OpenPypePyblishPluginMixin
|
||||
from openpype.lib import BoolDef
|
||||
|
||||
|
||||
class ExtractMayaSceneRaw(publish.Extractor):
|
||||
class ExtractMayaSceneRaw(publish.Extractor, OpenPypePyblishPluginMixin):
|
||||
"""Extract as Maya Scene (raw).
|
||||
|
||||
This will preserve all references, construction history, etc.
|
||||
|
|
@ -23,6 +25,22 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
"camerarig"]
|
||||
scene_type = "ma"
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
BoolDef(
|
||||
"preserve_references",
|
||||
label="Preserve References",
|
||||
tooltip=(
|
||||
"When enabled references will still be references "
|
||||
"in the published file.\nWhen disabled the references "
|
||||
"are imported into the published file generating a "
|
||||
"file without references."
|
||||
),
|
||||
default=True
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = (
|
||||
|
|
@ -64,13 +82,18 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
|
||||
# Perform extraction
|
||||
self.log.debug("Performing extraction ...")
|
||||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data
|
||||
)
|
||||
with maintained_selection():
|
||||
cmds.select(selection, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=True,
|
||||
preserveReferences=attribute_values[
|
||||
"preserve_references"
|
||||
],
|
||||
constructionHistory=True,
|
||||
shader=True,
|
||||
constraints=True,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ def set_context_favorites(favorites=None):
|
|||
favorites (dict): couples of {name:path}
|
||||
"""
|
||||
favorites = favorites or {}
|
||||
icon_path = resources.get_resource("icons", "folder-favorite3.png")
|
||||
icon_path = resources.get_resource("icons", "folder-favorite.png")
|
||||
for name, path in favorites.items():
|
||||
nuke.addFavoriteDir(
|
||||
name,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class ValidateFilePath(pyblish.api.InstancePlugin):
|
|||
This is primarily created for Simple Creator instances.
|
||||
"""
|
||||
|
||||
label = "Validate Workfile"
|
||||
label = "Validate Filepaths"
|
||||
order = pyblish.api.ValidatorOrder - 0.49
|
||||
|
||||
hosts = ["traypublisher"]
|
||||
|
|
|
|||
|
|
@ -464,7 +464,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
if "exportJob" in instance.data:
|
||||
if instance.data.get("splitRender"):
|
||||
self.log.info("Splitting export and render in two jobs")
|
||||
self.log.info("Export job id: %s", job_id)
|
||||
render_job_info = self.get_job_info(dependency_job_ids=[job_id])
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class HoudiniSubmitDeadline(
|
|||
|
||||
# Whether Deadline render submission is being split in two
|
||||
# (extract + render)
|
||||
split_render_job = instance.data["exportJob"]
|
||||
split_render_job = instance.data.get("splitRender")
|
||||
|
||||
# If there's some dependency job ids we can assume this is a render job
|
||||
# and not an export job
|
||||
|
|
@ -132,18 +132,21 @@ class HoudiniSubmitDeadline(
|
|||
if dependency_job_ids:
|
||||
is_export_job = False
|
||||
|
||||
job_type = "[RENDER]"
|
||||
if split_render_job and not is_export_job:
|
||||
# Convert from family to Deadline plugin name
|
||||
# i.e., arnold_rop -> Arnold
|
||||
plugin = instance.data["family"].replace("_rop", "").capitalize()
|
||||
else:
|
||||
plugin = "Houdini"
|
||||
if split_render_job:
|
||||
job_type = "[EXPORT IFD]"
|
||||
|
||||
job_info = DeadlineJobInfo(Plugin=plugin)
|
||||
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
job_info.Name = "{} - {}".format(filename, instance.name)
|
||||
job_info.Name = "{} - {} {}".format(filename, instance.name, job_type)
|
||||
job_info.BatchName = filename
|
||||
|
||||
job_info.UserName = context.data.get(
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
|
||||
label = "Submit image sequence jobs to Deadline or Muster"
|
||||
label = "Submit Image Publishing job to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
|
||||
|
|
@ -582,16 +582,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
'''
|
||||
|
||||
render_job = None
|
||||
submission_type = ""
|
||||
if instance.data.get("toBeRenderedOn") == "deadline":
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
submission_type = "deadline"
|
||||
|
||||
if instance.data.get("toBeRenderedOn") == "muster":
|
||||
render_job = instance.data.pop("musterSubmissionJob", None)
|
||||
submission_type = "muster"
|
||||
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
if not render_job and instance.data.get("tileRendering") is False:
|
||||
raise AssertionError(("Cannot continue without valid Deadline "
|
||||
"or Muster submission."))
|
||||
|
|
@ -624,21 +615,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
|
|
@ -664,15 +653,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
if audio_file and os.path.isfile(audio_file):
|
||||
publish_job.update({"audio": audio_file})
|
||||
|
||||
# pass Ftrack credentials in case of Muster
|
||||
if submission_type == "muster":
|
||||
ftrack = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
|
||||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
publish_job.update({"ftrack": ftrack})
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
|
|
|
|||
|
|
@ -352,7 +352,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
# add extended name if any
|
||||
if (
|
||||
not self.keep_first_subset_name_for_review
|
||||
multiple_reviewable
|
||||
and not self.keep_first_subset_name_for_review
|
||||
and extended_asset_name
|
||||
):
|
||||
other_item["asset_data"]["name"] = extended_asset_name
|
||||
|
|
|
|||
|
|
@ -354,7 +354,7 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
default_width = 1000
|
||||
default_height = 600
|
||||
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, allow_save_registry=True, parent=None):
|
||||
super(PythonInterpreterWidget, self).__init__(parent)
|
||||
|
||||
self.setWindowTitle("{} Console".format(
|
||||
|
|
@ -414,6 +414,8 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
|
||||
self._first_show = True
|
||||
self._splitter_size_ratio = None
|
||||
self._allow_save_registry = allow_save_registry
|
||||
self._registry_saved = True
|
||||
|
||||
self._init_from_registry()
|
||||
|
||||
|
|
@ -457,6 +459,11 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
pass
|
||||
|
||||
def save_registry(self):
|
||||
# Window was not showed
|
||||
if not self._allow_save_registry or self._registry_saved:
|
||||
return
|
||||
|
||||
self._registry_saved = True
|
||||
setting_registry = PythonInterpreterRegistry()
|
||||
|
||||
setting_registry.set_item("width", self.width())
|
||||
|
|
@ -650,6 +657,7 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
|
||||
def showEvent(self, event):
|
||||
self._line_check_timer.start()
|
||||
self._registry_saved = False
|
||||
super(PythonInterpreterWidget, self).showEvent(event)
|
||||
# First show setup
|
||||
if self._first_show:
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 9.8 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 22 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 7.8 KiB |
|
|
@ -21,7 +21,7 @@ Providing functionality:
|
|||
|
||||
import click
|
||||
import json
|
||||
from pathlib2 import Path
|
||||
from pathlib import Path
|
||||
import PyOpenColorIO as ocio
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -478,15 +478,6 @@ def _convert_maya_project_settings(ayon_settings, output):
|
|||
for item in ayon_maya["ext_mapping"]
|
||||
}
|
||||
|
||||
# Publish UI filters
|
||||
new_filters = {}
|
||||
for item in ayon_maya["filters"]:
|
||||
new_filters[item["name"]] = {
|
||||
subitem["name"]: subitem["value"]
|
||||
for subitem in item["value"]
|
||||
}
|
||||
ayon_maya["filters"] = new_filters
|
||||
|
||||
# Maya dirmap
|
||||
ayon_maya_dirmap = ayon_maya.pop("maya_dirmap")
|
||||
ayon_maya_dirmap_path = ayon_maya_dirmap["paths"]
|
||||
|
|
@ -743,16 +734,6 @@ def _convert_nuke_project_settings(ayon_settings, output):
|
|||
dirmap["paths"][dst_key] = dirmap["paths"].pop(src_key)
|
||||
ayon_nuke["nuke-dirmap"] = dirmap
|
||||
|
||||
# --- Filters ---
|
||||
new_gui_filters = {}
|
||||
for item in ayon_nuke.pop("filters"):
|
||||
subvalue = {}
|
||||
key = item["name"]
|
||||
for subitem in item["value"]:
|
||||
subvalue[subitem["name"]] = subitem["value"]
|
||||
new_gui_filters[key] = subvalue
|
||||
ayon_nuke["filters"] = new_gui_filters
|
||||
|
||||
# --- Load ---
|
||||
ayon_load = ayon_nuke["load"]
|
||||
ayon_load["LoadClip"]["_representations"] = (
|
||||
|
|
@ -896,7 +877,7 @@ def _convert_hiero_project_settings(ayon_settings, output):
|
|||
_convert_host_imageio(ayon_hiero)
|
||||
|
||||
new_gui_filters = {}
|
||||
for item in ayon_hiero.pop("filters"):
|
||||
for item in ayon_hiero.pop("filters", []):
|
||||
subvalue = {}
|
||||
key = item["name"]
|
||||
for subitem in item["value"]:
|
||||
|
|
@ -963,17 +944,6 @@ def _convert_tvpaint_project_settings(ayon_settings, output):
|
|||
|
||||
_convert_host_imageio(ayon_tvpaint)
|
||||
|
||||
filters = {}
|
||||
for item in ayon_tvpaint["filters"]:
|
||||
value = item["value"]
|
||||
try:
|
||||
value = json.loads(value)
|
||||
|
||||
except ValueError:
|
||||
value = {}
|
||||
filters[item["name"]] = value
|
||||
ayon_tvpaint["filters"] = filters
|
||||
|
||||
ayon_publish_settings = ayon_tvpaint["publish"]
|
||||
for plugin_name in (
|
||||
"ValidateProjectSettings",
|
||||
|
|
|
|||
|
|
@ -1608,14 +1608,5 @@
|
|||
},
|
||||
"templated_workfile_build": {
|
||||
"profiles": []
|
||||
},
|
||||
"filters": {
|
||||
"preset 1": {
|
||||
"ValidateNoAnimation": false,
|
||||
"ValidateShapeDefaultNames": false
|
||||
},
|
||||
"preset 2": {
|
||||
"ValidateNoAnimation": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -540,6 +540,5 @@
|
|||
},
|
||||
"templated_workfile_build": {
|
||||
"profiles": []
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -107,6 +107,5 @@
|
|||
"workfile_builder": {
|
||||
"create_first_version": false,
|
||||
"custom_templates": []
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -258,10 +258,6 @@
|
|||
{
|
||||
"type": "schema",
|
||||
"name": "schema_templated_workfile_build"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -291,10 +291,6 @@
|
|||
{
|
||||
"type": "schema",
|
||||
"name": "schema_templated_workfile_build"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -436,10 +436,6 @@
|
|||
"workfile_builder/builder_on_start",
|
||||
"workfile_builder/profiles"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,23 +23,6 @@ class ExtMappingItemModel(BaseSettingsModel):
|
|||
value: str = Field(title="Extension")
|
||||
|
||||
|
||||
class PublishGUIFilterItemModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: bool = Field(True, title="Active")
|
||||
|
||||
|
||||
class PublishGUIFiltersModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: list[PublishGUIFilterItemModel] = Field(default_factory=list)
|
||||
|
||||
@validator("value")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class MayaSettings(BaseSettingsModel):
|
||||
"""Maya Project Settings."""
|
||||
|
||||
|
|
@ -76,11 +59,8 @@ class MayaSettings(BaseSettingsModel):
|
|||
templated_workfile_build: TemplatedProfilesModel = Field(
|
||||
default_factory=TemplatedProfilesModel,
|
||||
title="Templated Workfile Build Settings")
|
||||
filters: list[PublishGUIFiltersModel] = Field(
|
||||
default_factory=list,
|
||||
title="Publish GUI Filters")
|
||||
|
||||
@validator("filters", "ext_mapping")
|
||||
@validator("ext_mapping")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
|
@ -123,20 +103,5 @@ DEFAULT_MAYA_SETTING = {
|
|||
"publish": DEFAULT_PUBLISH_SETTINGS,
|
||||
"load": DEFAULT_LOADERS_SETTING,
|
||||
"workfile_build": DEFAULT_WORKFILE_SETTING,
|
||||
"templated_workfile_build": DEFAULT_TEMPLATED_WORKFILE_SETTINGS,
|
||||
"filters": [
|
||||
{
|
||||
"name": "preset 1",
|
||||
"value": [
|
||||
{"name": "ValidateNoAnimation", "value": False},
|
||||
{"name": "ValidateShapeDefaultNames", "value": False},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "preset 2",
|
||||
"value": [
|
||||
{"name": "ValidateNoAnimation", "value": False},
|
||||
]
|
||||
},
|
||||
]
|
||||
"templated_workfile_build": DEFAULT_TEMPLATED_WORKFILE_SETTINGS
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
from pydantic import Field, validator
|
||||
from ayon_server.settings import BaseSettingsModel, ensure_unique_names
|
||||
|
||||
|
||||
class PublishGUIFilterItemModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: bool = Field(True, title="Active")
|
||||
|
||||
|
||||
class PublishGUIFiltersModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: list[PublishGUIFilterItemModel] = Field(default_factory=list)
|
||||
|
||||
@validator("value")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
|
@ -44,7 +44,6 @@ from .workfile_builder import (
|
|||
from .templated_workfile_build import (
|
||||
TemplatedWorkfileBuildModel
|
||||
)
|
||||
from .filters import PublishGUIFilterItemModel
|
||||
|
||||
|
||||
class NukeSettings(BaseSettingsModel):
|
||||
|
|
@ -98,16 +97,6 @@ class NukeSettings(BaseSettingsModel):
|
|||
default_factory=TemplatedWorkfileBuildModel
|
||||
)
|
||||
|
||||
filters: list[PublishGUIFilterItemModel] = Field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
@validator("filters")
|
||||
def ensure_unique_names(cls, value):
|
||||
"""Ensure name fields within the lists have unique names."""
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"general": DEFAULT_GENERAL_SETTINGS,
|
||||
|
|
@ -121,6 +110,5 @@ DEFAULT_VALUES = {
|
|||
"workfile_builder": DEFAULT_WORKFILE_BUILDER_SETTINGS,
|
||||
"templated_workfile_build": {
|
||||
"profiles": []
|
||||
},
|
||||
"filters": []
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.7"
|
||||
__version__ = "0.1.8"
|
||||
|
|
|
|||
|
|
@ -8,17 +8,11 @@ aiohttp_json_rpc = "*" # TVPaint server
|
|||
aiohttp-middlewares = "^2.0.0"
|
||||
wsrpc_aiohttp = "^3.1.1" # websocket server
|
||||
clique = "1.6.*"
|
||||
gazu = "^0.9.3"
|
||||
google-api-python-client = "^1.12.8" # sync server google support (should be separate?)
|
||||
jsonschema = "^2.6.0"
|
||||
pymongo = "^3.11.2"
|
||||
log4mongo = "^1.7"
|
||||
pathlib2= "^2.3.5" # deadline submit publish job only (single place, maybe not needed?)
|
||||
pyblish-base = "^1.8.11"
|
||||
pynput = "^1.7.2" # Timers manager - TODO replace
|
||||
pynput = "^1.7.2" # Timers manager - TODO remove
|
||||
"Qt.py" = "^1.3.3"
|
||||
qtawesome = "0.7.3"
|
||||
speedcopy = "^2.1"
|
||||
slack-sdk = "^3.6.0"
|
||||
pysftp = "^0.2.9"
|
||||
dropbox = "^11.20.0"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pydantic import Field, validator
|
||||
from pydantic import Field
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
ensure_unique_names,
|
||||
|
|
@ -14,23 +14,6 @@ from .publish_plugins import (
|
|||
)
|
||||
|
||||
|
||||
class PublishGUIFilterItemModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: bool = Field(True, title="Active")
|
||||
|
||||
|
||||
class PublishGUIFiltersModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = Field(title="Name")
|
||||
value: list[PublishGUIFilterItemModel] = Field(default_factory=list)
|
||||
|
||||
@validator("value")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class TvpaintSettings(BaseSettingsModel):
|
||||
imageio: TVPaintImageIOModel = Field(
|
||||
default_factory=TVPaintImageIOModel,
|
||||
|
|
@ -52,14 +35,6 @@ class TvpaintSettings(BaseSettingsModel):
|
|||
default_factory=WorkfileBuilderPlugin,
|
||||
title="Workfile Builder"
|
||||
)
|
||||
filters: list[PublishGUIFiltersModel] = Field(
|
||||
default_factory=list,
|
||||
title="Publish GUI Filters")
|
||||
|
||||
@validator("filters")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.0"
|
||||
__version__ = "0.1.1"
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 2))
|
||||
DBAssert.count_of_types(dbcon, "representation", 3))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue