diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index b6a243bcfe..5826d99d38 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -35,6 +35,8 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
+ - 3.16.3-nightly.5
+ - 3.16.3-nightly.4
- 3.16.3-nightly.3
- 3.16.3-nightly.2
- 3.16.3-nightly.1
@@ -133,8 +135,6 @@ body:
- 3.14.7-nightly.6
- 3.14.7-nightly.5
- 3.14.7-nightly.4
- - 3.14.7-nightly.3
- - 3.14.7-nightly.2
validations:
required: true
- type: dropdown
diff --git a/openpype/cli.py b/openpype/cli.py
index 6d6a34b0fb..0df277fb0a 100644
--- a/openpype/cli.py
+++ b/openpype/cli.py
@@ -196,47 +196,6 @@ def publish(paths, targets, gui):
PypeCommands.publish(list(paths), targets, gui)
-@main.command()
-@click.argument("path")
-@click.option("-h", "--host", help="Host")
-@click.option("-u", "--user", help="User email address")
-@click.option("-p", "--project", help="Project")
-@click.option("-t", "--targets", help="Targets", default=None,
- multiple=True)
-def remotepublishfromapp(project, path, host, user=None, targets=None):
- """Start CLI publishing.
-
- Publish collects json from paths provided as an argument.
- More than one path is allowed.
- """
-
- if AYON_SERVER_ENABLED:
- raise RuntimeError(
- "AYON does not support 'remotepublishfromapp' command."
- )
- PypeCommands.remotepublishfromapp(
- project, path, host, user, targets=targets
- )
-
-
-@main.command()
-@click.argument("path")
-@click.option("-u", "--user", help="User email address")
-@click.option("-p", "--project", help="Project")
-@click.option("-t", "--targets", help="Targets", default=None,
- multiple=True)
-def remotepublish(project, path, user=None, targets=None):
- """Start CLI publishing.
-
- Publish collects json from paths provided as an argument.
- More than one path is allowed.
- """
-
- if AYON_SERVER_ENABLED:
- raise RuntimeError("AYON does not support 'remotepublish' command.")
- PypeCommands.remotepublish(project, path, user, targets=targets)
-
-
@main.command(context_settings={"ignore_unknown_options": True})
def projectmanager():
if AYON_SERVER_ENABLED:
@@ -338,12 +297,18 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
persist, app_variant, timeout, setup_only)
-@main.command()
+@main.command(help="DEPRECATED - run sync server")
+@click.pass_context
@click.option("-a", "--active_site", required=True,
- help="Name of active stie")
-def syncserver(active_site):
+ help="Name of active site")
+def syncserver(ctx, active_site):
"""Run sync site server in background.
+ Deprecated:
+ This command is deprecated and will be removed in future versions.
+ Use '~/openpype_console module sync_server syncservice' instead.
+
+ Details:
Some Site Sync use cases need to expose site to another one.
For example if majority of artists work in studio, they are not using
SS at all, but if you want to expose published assets to 'studio' site
@@ -359,7 +324,10 @@ def syncserver(active_site):
if AYON_SERVER_ENABLED:
raise RuntimeError("AYON does not support 'syncserver' command.")
- PypeCommands().syncserver(active_site)
+
+ from openpype.modules.sync_server.sync_server_module import (
+ syncservice)
+ ctx.invoke(syncservice, active_site=active_site)
@main.command()
diff --git a/openpype/client/server/conversion_utils.py b/openpype/client/server/conversion_utils.py
index 24d4678095..42df337b6d 100644
--- a/openpype/client/server/conversion_utils.py
+++ b/openpype/client/server/conversion_utils.py
@@ -133,7 +133,6 @@ def _get_default_template_name(templates):
def _template_replacements_to_v3(template):
return (
template
- .replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
)
@@ -715,7 +714,6 @@ def convert_v4_representation_to_v3(representation):
if "template" in output_data:
output_data["template"] = (
output_data["template"]
- .replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
)
@@ -977,7 +975,6 @@ def convert_create_representation_to_v4(representation, con):
representation_data = representation["data"]
representation_data["template"] = (
representation_data["template"]
- .replace("{asset}", "{folder[name]}")
.replace("{subset}", "{product[name]}")
.replace("{family}", "{product[type]}")
)
@@ -1266,7 +1263,6 @@ def convert_update_representation_to_v4(
if "template" in attribs:
attribs["template"] = (
attribs["template"]
- .replace("{asset}", "{folder[name]}")
.replace("{family}", "{product[type]}")
.replace("{subset}", "{product[name]}")
)
diff --git a/openpype/hooks/pre_ocio_hook.py b/openpype/hooks/pre_ocio_hook.py
index 1ac305b635..1307ed9f76 100644
--- a/openpype/hooks/pre_ocio_hook.py
+++ b/openpype/hooks/pre_ocio_hook.py
@@ -38,7 +38,8 @@ class OCIOEnvHook(PreLaunchHook):
host_name=self.host_name,
project_settings=self.data["project_settings"],
anatomy_data=template_data,
- anatomy=self.data["anatomy"]
+ anatomy=self.data["anatomy"],
+ env=self.launch_context.env,
)
if config_data:
diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py
index e77f06e9d6..96a98e808e 100644
--- a/openpype/host/dirmap.py
+++ b/openpype/host/dirmap.py
@@ -32,19 +32,26 @@ class HostDirmap(object):
"""
def __init__(
- self, host_name, project_name, project_settings=None, sync_module=None
+ self,
+ host_name,
+ project_name,
+ project_settings=None,
+ sync_module=None
):
self.host_name = host_name
self.project_name = project_name
self._project_settings = project_settings
- self._sync_module = sync_module # to limit reinit of Modules
+ self._sync_module = sync_module
+ # to limit reinit of Modules
+ self._sync_module_discovered = sync_module is not None
self._log = None
@property
def sync_module(self):
- if self._sync_module is None:
+ if not self._sync_module_discovered:
+ self._sync_module_discovered = True
manager = ModulesManager()
- self._sync_module = manager["sync_server"]
+ self._sync_module = manager.get("sync_server")
return self._sync_module
@property
@@ -151,21 +158,25 @@ class HostDirmap(object):
"""
project_name = self.project_name
+ sync_module = self.sync_module
mapping = {}
- if (not self.sync_module.enabled or
- project_name not in self.sync_module.get_enabled_projects()):
+ if (
+ sync_module is None
+ or not sync_module.enabled
+ or project_name not in sync_module.get_enabled_projects()
+ ):
return mapping
- active_site = self.sync_module.get_local_normalized_site(
- self.sync_module.get_active_site(project_name))
- remote_site = self.sync_module.get_local_normalized_site(
- self.sync_module.get_remote_site(project_name))
+ active_site = sync_module.get_local_normalized_site(
+ sync_module.get_active_site(project_name))
+ remote_site = sync_module.get_local_normalized_site(
+ sync_module.get_remote_site(project_name))
self.log.debug(
"active {} - remote {}".format(active_site, remote_site)
)
if active_site == "local" and active_site != remote_site:
- sync_settings = self.sync_module.get_sync_project_setting(
+ sync_settings = sync_module.get_sync_project_setting(
project_name,
exclude_locals=False,
cached=False)
@@ -179,7 +190,7 @@ class HostDirmap(object):
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
- remote_provider = self.sync_module.get_provider_for_site(
+ remote_provider = sync_module.get_provider_for_site(
project_name, remote_site
)
# dirmap has sense only with regular disk provider, in the workfile
diff --git a/openpype/hosts/aftereffects/plugins/publish/closeAE.py b/openpype/hosts/aftereffects/plugins/publish/closeAE.py
index eff2573e8f..0be20d9f05 100644
--- a/openpype/hosts/aftereffects/plugins/publish/closeAE.py
+++ b/openpype/hosts/aftereffects/plugins/publish/closeAE.py
@@ -15,7 +15,7 @@ class CloseAE(pyblish.api.ContextPlugin):
active = True
hosts = ["aftereffects"]
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
self.log.info("CloseAE")
diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py
index 05e52e2478..70c837205e 100644
--- a/openpype/hosts/houdini/api/plugin.py
+++ b/openpype/hosts/houdini/api/plugin.py
@@ -167,6 +167,7 @@ class HoudiniCreatorBase(object):
class HoudiniCreator(NewCreator, HoudiniCreatorBase):
"""Base class for most of the Houdini creator plugins."""
selected_nodes = []
+ settings_name = None
def create(self, subset_name, instance_data, pre_create_data):
try:
@@ -294,3 +295,21 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
"""
return [hou.ropNodeTypeCategory()]
+
+ def apply_settings(self, project_settings, system_settings):
+ """Method called on initialization of plugin to apply settings."""
+
+ settings_name = self.settings_name
+ if settings_name is None:
+ settings_name = self.__class__.__name__
+
+ settings = project_settings["houdini"]["create"]
+ settings = settings.get(settings_name)
+ if settings is None:
+ self.log.debug(
+ "No settings found for {}".format(self.__class__.__name__)
+ )
+ return
+
+ for key, value in settings.items():
+ setattr(self, key, value)
diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
index 8b310753d0..12d08f7d83 100644
--- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
+++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
@@ -10,9 +10,10 @@ class CreateArnoldAss(plugin.HoudiniCreator):
label = "Arnold ASS"
family = "ass"
icon = "magic"
- defaults = ["Main"]
# Default extension: `.ass` or `.ass.gz`
+ # however calling HoudiniCreator.create()
+ # will override it by the value in the project settings
ext = ".ass"
def create(self, subset_name, instance_data, pre_create_data):
diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_rop.py b/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
index ca516619f6..b58c377a20 100644
--- a/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
@@ -9,7 +9,6 @@ class CreateArnoldRop(plugin.HoudiniCreator):
label = "Arnold ROP"
family = "arnold_rop"
icon = "magic"
- defaults = ["master"]
# Default extension
ext = "exr"
diff --git a/openpype/hosts/houdini/plugins/create/create_karma_rop.py b/openpype/hosts/houdini/plugins/create/create_karma_rop.py
index c7a9fe0968..4e1360ca45 100644
--- a/openpype/hosts/houdini/plugins/create/create_karma_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_karma_rop.py
@@ -11,7 +11,6 @@ class CreateKarmaROP(plugin.HoudiniCreator):
label = "Karma ROP"
family = "karma_rop"
icon = "magic"
- defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa
diff --git a/openpype/hosts/houdini/plugins/create/create_mantra_rop.py b/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
index 5c29adb33f..d2f0e735a8 100644
--- a/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
@@ -11,7 +11,6 @@ class CreateMantraROP(plugin.HoudiniCreator):
label = "Mantra ROP"
family = "mantra_rop"
icon = "magic"
- defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa
diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
index 8f4aa1327d..1b8826a932 100644
--- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
@@ -13,7 +13,6 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
label = "Redshift ROP"
family = "redshift_rop"
icon = "magic"
- defaults = ["master"]
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):
diff --git a/openpype/hosts/houdini/plugins/create/create_vray_rop.py b/openpype/hosts/houdini/plugins/create/create_vray_rop.py
index 58748d4c34..793a544fdf 100644
--- a/openpype/hosts/houdini/plugins/create/create_vray_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_vray_rop.py
@@ -14,8 +14,6 @@ class CreateVrayROP(plugin.HoudiniCreator):
label = "VRay ROP"
family = "vray_rop"
icon = "magic"
- defaults = ["master"]
-
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):
diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
index ca06617ab0..471fa5b6d1 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
@@ -32,8 +32,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
+ nodes = [n.path() for n in invalid]
raise PublishValidationError(
- "See log for details. " "Invalid nodes: {0}".format(invalid),
+ "See log for details. " "Invalid nodes: {0}".format(nodes),
title=self.label
)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
index 543c8e1407..afe05e3173 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
@@ -7,8 +7,6 @@ from openpype.pipeline import (
)
from openpype.pipeline.publish import RepairAction
-from openpype.pipeline.publish import RepairAction
-
class ValidateWorkfilePaths(
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py
index d8db716e6d..3389447cb0 100644
--- a/openpype/hosts/max/api/plugin.py
+++ b/openpype/hosts/max/api/plugin.py
@@ -136,6 +136,7 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
temp_arr = #()
for x in all_handles do
(
+ if x.node == undefined do continue
handle_name = node_to_name x.node
append temp_arr handle_name
)
@@ -185,7 +186,10 @@ class MaxCreatorBase(object):
node = rt.Container(name=node)
attrs = rt.Execute(MS_CUSTOM_ATTRIB)
- rt.custAttributes.add(node.baseObject, attrs)
+ modifier = rt.EmptyModifier()
+ rt.addModifier(node, modifier)
+ node.modifiers[0].name = "OP Data"
+ rt.custAttributes.add(node.modifiers[0], attrs)
return node
@@ -209,13 +213,19 @@ class MaxCreator(Creator, MaxCreatorBase):
if pre_create_data.get("use_selection"):
node_list = []
+ sel_list = []
for i in self.selected_nodes:
node_ref = rt.NodeTransformMonitor(node=i)
node_list.append(node_ref)
+ sel_list.append(str(i))
# Setting the property
rt.setProperty(
- instance_node.openPypeData, "all_handles", node_list)
+ instance_node.modifiers[0].openPypeData,
+ "all_handles", node_list)
+ rt.setProperty(
+ instance_node.modifiers[0].openPypeData,
+ "sel_list", sel_list)
self._add_instance_to_context(instance)
imprint(instance_node.name, instance.data_to_store())
@@ -254,8 +264,8 @@ class MaxCreator(Creator, MaxCreatorBase):
instance_node = rt.GetNodeByName(
instance.data.get("instance_node"))
if instance_node:
- count = rt.custAttributes.count(instance_node)
- rt.custAttributes.delete(instance_node, count)
+ count = rt.custAttributes.count(instance_node.modifiers[0])
+ rt.custAttributes.delete(instance_node.modifiers[0], count)
rt.Delete(instance_node)
self._remove_instance_from_context(instance)
diff --git a/openpype/hosts/max/plugins/publish/collect_members.py b/openpype/hosts/max/plugins/publish/collect_members.py
index 812d82ff26..2970cf0e24 100644
--- a/openpype/hosts/max/plugins/publish/collect_members.py
+++ b/openpype/hosts/max/plugins/publish/collect_members.py
@@ -17,6 +17,6 @@ class CollectMembers(pyblish.api.InstancePlugin):
container = rt.GetNodeByName(instance.data["instance_node"])
instance.data["members"] = [
member.node for member
- in container.openPypeData.all_handles
+ in container.modifiers[0].openPypeData.all_handles
]
self.log.debug("{}".format(instance.data["members"]))
diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py
index 0ee02d8485..4d467840dd 100644
--- a/openpype/hosts/maya/api/plugin.py
+++ b/openpype/hosts/maya/api/plugin.py
@@ -575,12 +575,15 @@ class ReferenceLoader(Loader):
raise LoadError("No namespace specified in "
"Maya ReferenceLoader settings")
elif not custom_naming['group_name']:
- raise LoadError("No group name specified in "
- "Maya ReferenceLoader settings")
+ self.log.debug("No custom group_name, no group will be created.")
+ options["attach_to_root"] = False
formatting_data = {
"asset_name": asset['name'],
"asset_type": asset['type'],
+ "folder": {
+ "name": asset["name"],
+ },
"subset": subset['name'],
"family": (
subset['data'].get('family') or
diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py
index d339aff69c..c8d3b3128a 100644
--- a/openpype/hosts/maya/plugins/load/load_reference.py
+++ b/openpype/hosts/maya/plugins/load/load_reference.py
@@ -9,7 +9,8 @@ from openpype.hosts.maya.api.lib import (
maintained_selection,
get_container_members,
parent_nodes,
- create_rig_animation_instance
+ create_rig_animation_instance,
+ get_reference_node
)
@@ -123,6 +124,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
attach_to_root = options.get("attach_to_root", True)
group_name = options["group_name"]
+ # no group shall be created
+ if not attach_to_root:
+ group_name = namespace
+
path = self.filepath_from_context(context)
with maintained_selection():
cmds.loadPlugin("AbcImport.mll", quiet=True)
@@ -148,11 +153,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
if current_namespace != ":":
group_name = current_namespace + ":" + group_name
- group_name = "|" + group_name
-
self[:] = new_nodes
if attach_to_root:
+ group_name = "|" + group_name
roots = cmds.listRelatives(group_name,
children=True,
fullPath=True) or []
@@ -205,6 +209,11 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
self._post_process_rig(name, namespace, context, options)
else:
if "translate" in options:
+ if not attach_to_root and new_nodes:
+ root_nodes = cmds.ls(new_nodes, assemblies=True,
+ long=True)
+ # we assume only a single root is ever loaded
+ group_name = root_nodes[0]
cmds.setAttr("{}.translate".format(group_name),
*options["translate"])
return new_nodes
diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py
index 364c8eeff4..fcf162c84f 100644
--- a/openpype/hosts/nuke/api/lib.py
+++ b/openpype/hosts/nuke/api/lib.py
@@ -424,10 +424,13 @@ def add_publish_knob(node):
return node
-@deprecated
+@deprecated("openpype.hosts.nuke.api.lib.set_node_data")
def set_avalon_knob_data(node, data=None, prefix="avalon:"):
"""[DEPRECATED] Sets data into nodes's avalon knob
+ This function is still used but soon will be deprecated.
+ Use `set_node_data` instead.
+
Arguments:
node (nuke.Node): Nuke node to imprint with data,
data (dict, optional): Data to be imprinted into AvalonTab
@@ -487,10 +490,13 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"):
return node
-@deprecated
+@deprecated("openpype.hosts.nuke.api.lib.get_node_data")
def get_avalon_knob_data(node, prefix="avalon:", create=True):
"""[DEPRECATED] Gets a data from nodes's avalon knob
+ This function is still used but soon will be deprecated.
+ Use `get_node_data` instead.
+
Arguments:
node (obj): Nuke node to search for data,
prefix (str, optional): filtering prefix
@@ -1699,7 +1705,7 @@ def create_write_node_legacy(
knob_value = float(knob_value)
if knob_type == "bool":
knob_value = bool(knob_value)
- if knob_type in ["2d_vector", "3d_vector"]:
+ if knob_type in ["2d_vector", "3d_vector", "color", "box"]:
knob_value = list(knob_value)
GN[knob_name].setValue(knob_value)
@@ -1715,7 +1721,7 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
Args:
node (nuke.Node): nuke node
knob_settings (list): list of dict. Keys are `type`, `name`, `value`
- kwargs (dict)[optional]: keys for formatable knob settings
+ kwargs (dict)[optional]: keys for formattable knob settings
"""
for knob in knob_settings:
log.debug("__ knob: {}".format(pformat(knob)))
@@ -1732,7 +1738,7 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
)
continue
- # first deal with formatable knob settings
+ # first deal with formattable knob settings
if knob_type == "formatable":
template = knob["template"]
to_type = knob["to_type"]
@@ -1741,8 +1747,8 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
**kwargs
)
except KeyError as msg:
- log.warning("__ msg: {}".format(msg))
- raise KeyError(msg)
+ raise KeyError(
+ "Not able to format expression: {}".format(msg))
# convert value to correct type
if to_type == "2d_vector":
@@ -1781,8 +1787,8 @@ def convert_knob_value_to_correct_type(knob_type, knob_value):
knob_value = knob_value
elif knob_type == "color_gui":
knob_value = color_gui_to_int(knob_value)
- elif knob_type in ["2d_vector", "3d_vector", "color"]:
- knob_value = [float(v) for v in knob_value]
+ elif knob_type in ["2d_vector", "3d_vector", "color", "box"]:
+ knob_value = [float(val_) for val_ in knob_value]
return knob_value
@@ -2204,7 +2210,6 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
continue
preset_clrsp = input["colorspace"]
- log.debug(preset_clrsp)
if preset_clrsp is not None:
current = n["colorspace"].value()
future = str(preset_clrsp)
@@ -2686,7 +2691,15 @@ def _launch_workfile_app():
host_tools.show_workfiles(parent=None, on_top=True)
+@deprecated("openpype.hosts.nuke.api.lib.start_workfile_template_builder")
def process_workfile_builder():
+ """ [DEPRECATED] Process workfile builder on nuke start
+
+ This function is deprecated and will be removed in future versions.
+ Use settings for `project_settings/nuke/templated_workfile_build` which are
+ supported by api `start_workfile_template_builder()`.
+ """
+
# to avoid looping of the callback, remove it!
nuke.removeOnCreate(process_workfile_builder, nodeClass="Root")
@@ -2695,11 +2708,6 @@ def process_workfile_builder():
workfile_builder = project_settings["nuke"].get(
"workfile_builder", {})
- # get all imortant settings
- openlv_on = env_value_to_bool(
- env_key="AVALON_OPEN_LAST_WORKFILE",
- default=None)
-
# get settings
createfv_on = workfile_builder.get("create_first_version") or None
builder_on = workfile_builder.get("builder_on_start") or None
@@ -2740,20 +2748,15 @@ def process_workfile_builder():
save_file(last_workfile_path)
return
- # skip opening of last version if it is not enabled
- if not openlv_on or not os.path.exists(last_workfile_path):
- return
-
- log.info("Opening last workfile...")
- # open workfile
- open_file(last_workfile_path)
-
def start_workfile_template_builder():
from .workfile_template_builder import (
build_workfile_template
)
+ # remove callback since it would be duplicating the workfile
+ nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root")
+
# to avoid looping of the callback, remove it!
log.info("Starting workfile template builder...")
try:
@@ -2761,8 +2764,6 @@ def start_workfile_template_builder():
except TemplateProfileNotFound:
log.warning("Template profile not found. Skipping...")
- # remove callback since it would be duplicating the workfile
- nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root")
@deprecated
def recreate_instance(origin_node, avalon_data=None):
@@ -2954,6 +2955,7 @@ class DirmapCache:
"""Caching class to get settings and sync_module easily and only once."""
_project_name = None
_project_settings = None
+ _sync_module_discovered = False
_sync_module = None
_mapping = None
@@ -2971,8 +2973,10 @@ class DirmapCache:
@classmethod
def sync_module(cls):
- if cls._sync_module is None:
- cls._sync_module = ModulesManager().modules_by_name["sync_server"]
+ if not cls._sync_module_discovered:
+ cls._sync_module_discovered = True
+ cls._sync_module = ModulesManager().modules_by_name.get(
+ "sync_server")
return cls._sync_module
@classmethod
diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py
index 045f7ec85d..65b4b91323 100644
--- a/openpype/hosts/nuke/api/pipeline.py
+++ b/openpype/hosts/nuke/api/pipeline.py
@@ -34,6 +34,7 @@ from .lib import (
get_main_window,
add_publish_knob,
WorkfileSettings,
+ # TODO: remove this once workfile builder will be removed
process_workfile_builder,
start_workfile_template_builder,
launch_workfiles_app,
@@ -155,11 +156,18 @@ def add_nuke_callbacks():
"""
nuke_settings = get_current_project_settings()["nuke"]
workfile_settings = WorkfileSettings()
+
# Set context settings.
nuke.addOnCreate(
workfile_settings.set_context_settings, nodeClass="Root")
+
+ # adding favorites to file browser
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
+
+ # template builder callbacks
nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root")
+
+ # TODO: remove this callback once workfile builder will be removed
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
# fix ffmpeg settings on script
@@ -169,11 +177,12 @@ def add_nuke_callbacks():
nuke.addOnScriptLoad(check_inventory_versions)
nuke.addOnScriptSave(check_inventory_versions)
- # # set apply all workfile settings on script load and save
+ # set apply all workfile settings on script load and save
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
+
if nuke_settings["nuke-dirmap"]["enabled"]:
- log.info("Added Nuke's dirmaping callback ...")
+ log.info("Added Nuke's dir-mapping callback ...")
# Add dirmap for file paths.
nuke.addFilenameFilter(dirmap_file_name_filter)
diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py
index 8d29e0441f..98e59eff71 100644
--- a/openpype/hosts/nuke/api/workio.py
+++ b/openpype/hosts/nuke/api/workio.py
@@ -1,6 +1,7 @@
"""Host API required Work Files tool"""
import os
import nuke
+import shutil
from .utils import is_headless
@@ -21,21 +22,37 @@ def save_file(filepath):
def open_file(filepath):
+
+ def read_script(nuke_script):
+ nuke.scriptClear()
+ nuke.scriptReadFile(nuke_script)
+ nuke.Root()["name"].setValue(nuke_script)
+ nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script))
+ nuke.Root().setModified(False)
+
filepath = filepath.replace("\\", "/")
# To remain in the same window, we have to clear the script and read
# in the contents of the workfile.
- nuke.scriptClear()
+ # Nuke Preferences can be read after the script is read.
+ read_script(filepath)
+
if not is_headless():
autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate()
- autosave_prmpt = "Autosave detected.\nWould you like to load the autosave file?" # noqa
+ autosave_prmpt = "Autosave detected.\n" \
+ "Would you like to load the autosave file?" # noqa
if os.path.isfile(autosave) and nuke.ask(autosave_prmpt):
- filepath = autosave
+ try:
+ # Overwrite the filepath with autosave
+ shutil.copy(autosave, filepath)
+ # Now read the (auto-saved) script again
+ read_script(filepath)
+ except shutil.Error as err:
+ nuke.message(
+ "Detected autosave file could not be used.\n{}"
+
+ .format(err))
- nuke.scriptReadFile(filepath)
- nuke.Root()["name"].setValue(filepath)
- nuke.Root()["project_directory"].setValue(os.path.dirname(filepath))
- nuke.Root().setModified(False)
return True
diff --git a/openpype/hosts/photoshop/plugins/publish/closePS.py b/openpype/hosts/photoshop/plugins/publish/closePS.py
index b4ded96001..b4c3a4c966 100644
--- a/openpype/hosts/photoshop/plugins/publish/closePS.py
+++ b/openpype/hosts/photoshop/plugins/publish/closePS.py
@@ -17,7 +17,7 @@ class ClosePS(pyblish.api.ContextPlugin):
active = True
hosts = ["photoshop"]
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
self.log.info("ClosePS")
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py
index ce408f8d01..f1d8419608 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py
@@ -6,8 +6,6 @@ from openpype.pipeline.create import get_subset_name
class CollectAutoImage(pyblish.api.ContextPlugin):
"""Creates auto image in non artist based publishes (Webpublisher).
-
- 'remotepublish' should be renamed to 'autopublish' or similar in the future
"""
label = "Collect Auto Image"
@@ -15,7 +13,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
hosts = ["photoshop"]
order = pyblish.api.CollectorOrder + 0.2
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
family = "image"
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py
index 7de4adcaf4..82ba0ac09c 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py
@@ -20,7 +20,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
label = "Collect Auto Review"
hosts = ["photoshop"]
order = pyblish.api.CollectorOrder + 0.2
- targets = ["remotepublish"]
+ targets = ["automated"]
publish = True
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py
index d10cf62c67..01dc50af40 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py
@@ -12,7 +12,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
label = "Collect Workfile"
hosts = ["photoshop"]
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
family = "workfile"
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
index a5fea7ac7d..b13ff5e476 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
@@ -35,7 +35,7 @@ class CollectBatchData(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.495
label = "Collect batch data"
hosts = ["photoshop"]
- targets = ["remotepublish"]
+ targets = ["webpublish"]
def process(self, context):
self.log.info("CollectBatchData")
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
index 90fca8398f..c16616bcb2 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
@@ -34,7 +34,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
label = "Instances"
order = pyblish.api.CollectorOrder
hosts = ["photoshop"]
- targets = ["remotepublish"]
+ targets = ["automated"]
# configurable by Settings
color_code_mapping = []
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_published_version.py b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py
index 2502689e4b..7371c0564f 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_published_version.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py
@@ -26,7 +26,7 @@ class CollectPublishedVersion(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.190
label = "Collect published version"
hosts = ["photoshop"]
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
workfile_subset_name = None
diff --git a/openpype/hosts/webpublisher/README.md b/openpype/hosts/webpublisher/README.md
index 0826e44490..07a957fa7f 100644
--- a/openpype/hosts/webpublisher/README.md
+++ b/openpype/hosts/webpublisher/README.md
@@ -3,4 +3,4 @@ Webpublisher
Plugins meant for processing of Webpublisher.
-Gets triggered by calling openpype.cli.remotepublish with appropriate arguments.
\ No newline at end of file
+Gets triggered by calling `openpype_console modules webpublisher publish` with appropriate arguments.
diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py
index eb7fced2e6..4438775b03 100644
--- a/openpype/hosts/webpublisher/addon.py
+++ b/openpype/hosts/webpublisher/addon.py
@@ -20,11 +20,10 @@ class WebpublisherAddon(OpenPypeModule, IHostAddon):
Close Python process at the end.
"""
- from openpype.pipeline.publish.lib import remote_publish
- from .lib import get_webpublish_conn, publish_and_log
+ from .lib import get_webpublish_conn, publish_and_log, publish_in_test
if is_test:
- remote_publish(log, close_plugin_name)
+ publish_in_test(log, close_plugin_name)
return
dbcon = get_webpublish_conn()
diff --git a/openpype/hosts/webpublisher/lib.py b/openpype/hosts/webpublisher/lib.py
index b207f85b46..ecd28d2432 100644
--- a/openpype/hosts/webpublisher/lib.py
+++ b/openpype/hosts/webpublisher/lib.py
@@ -12,7 +12,6 @@ from openpype.client.mongo import OpenPypeMongoConnection
from openpype.settings import get_project_settings
from openpype.lib import Logger
from openpype.lib.profiles_filtering import filter_profiles
-from openpype.pipeline.publish.lib import find_close_plugin
ERROR_STATUS = "error"
IN_PROGRESS_STATUS = "in_progress"
@@ -68,6 +67,46 @@ def get_batch_asset_task_info(ctx):
return asset, task_name, task_type
+def find_close_plugin(close_plugin_name, log):
+ if close_plugin_name:
+ plugins = pyblish.api.discover()
+ for plugin in plugins:
+ if plugin.__name__ == close_plugin_name:
+ return plugin
+
+ log.debug("Close plugin not found, app might not close.")
+
+
+def publish_in_test(log, close_plugin_name=None):
+ """Loops through all plugins, logs to console. Used for tests.
+
+ Args:
+ log (Logger)
+ close_plugin_name (Optional[str]): Name of plugin with responsibility
+ to close application.
+ """
+
+ # Error exit as soon as any error occurs.
+ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
+
+ close_plugin = find_close_plugin(close_plugin_name, log)
+
+ for result in pyblish.util.publish_iter():
+ for record in result["records"]:
+ # Why do we log again? pyblish logger is logging to stdout...
+ log.info("{}: {}".format(result["plugin"].label, record.msg))
+
+ if not result["error"]:
+ continue
+
+ # QUESTION We don't break on error?
+ error_message = error_format.format(**result)
+ log.error(error_message)
+ if close_plugin: # close host app explicitly after error
+ context = pyblish.api.Context()
+ close_plugin().process(context)
+
+
def get_webpublish_conn():
"""Get connection to OP 'webpublishes' collection."""
mongo_client = OpenPypeMongoConnection.get_mongo_client()
@@ -231,7 +270,7 @@ def find_variant_key(application_manager, host):
def get_task_data(batch_dir):
"""Return parsed data from first task manifest.json
- Used for `remotepublishfromapp` command where batch contains only
+ Used for `publishfromapp` command where batch contains only
single task with publishable workfile.
Returns:
diff --git a/openpype/hosts/webpublisher/publish_functions.py b/openpype/hosts/webpublisher/publish_functions.py
index 41aab68cce..f5dc88f54d 100644
--- a/openpype/hosts/webpublisher/publish_functions.py
+++ b/openpype/hosts/webpublisher/publish_functions.py
@@ -34,7 +34,7 @@ def cli_publish(project_name, batch_path, user_email, targets):
Args:
project_name (str): project to publish (only single context is
- expected per call of remotepublish
+ expected per call of 'publish')
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
user_email (string): email address for webpublisher - used to
@@ -49,8 +49,8 @@ def cli_publish(project_name, batch_path, user_email, targets):
if not batch_path:
raise RuntimeError("No publish paths specified")
- log = Logger.get_logger("remotepublish")
- log.info("remotepublish command")
+ log = Logger.get_logger("Webpublish")
+ log.info("Webpublish command")
# Register target and host
webpublisher_host = WebpublisherHost()
@@ -107,7 +107,7 @@ def cli_publish_from_app(
Args:
project_name (str): project to publish (only single context is
- expected per call of remotepublish
+ expected per call of publish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
host_name (str): 'photoshop'
@@ -117,9 +117,9 @@ def cli_publish_from_app(
(to choose validator for example)
"""
- log = Logger.get_logger("RemotePublishFromApp")
+ log = Logger.get_logger("PublishFromApp")
- log.info("remotepublishphotoshop command")
+ log.info("Webpublish photoshop command")
task_data = get_task_data(batch_path)
diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
index 9fe4b4d3c1..e56f245d27 100644
--- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
+++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
@@ -216,7 +216,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
"extensions": [".tvpp"],
"command": "publish",
"arguments": {
- "targets": ["tvpaint_worker"]
+ "targets": ["tvpaint_worker", "webpublish"]
},
"add_to_queue": False
},
@@ -230,7 +230,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
# Make sure targets are set to None for cases that default
# would change
# - targets argument is not used in 'publishfromapp'
- "targets": ["remotepublish"]
+ "targets": ["automated", "webpublish"]
},
# does publish need to be handled by a queue, eg. only
# single process running concurrently?
@@ -247,7 +247,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
"project": content["project_name"],
"user": content["user"],
- "targets": ["filespublish"]
+ "targets": ["filespublish", "webpublish"]
}
add_to_queue = False
diff --git a/openpype/hosts/webpublisher/webserver_service/webserver.py b/openpype/hosts/webpublisher/webserver_service/webserver.py
index 093b53d9d3..d7c2ea01b9 100644
--- a/openpype/hosts/webpublisher/webserver_service/webserver.py
+++ b/openpype/hosts/webpublisher/webserver_service/webserver.py
@@ -45,7 +45,7 @@ def run_webserver(executable, upload_dir, host=None, port=None):
server_manager = webserver_module.create_new_server_manager(port, host)
webserver_url = server_manager.url
- # queue for remotepublishfromapp tasks
+ # queue for publishfromapp tasks
studio_task_queue = collections.deque()
resource = RestApiResource(server_manager,
diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py
index cb96a0c1d0..c166feb3a6 100644
--- a/openpype/lib/usdlib.py
+++ b/openpype/lib/usdlib.py
@@ -334,6 +334,9 @@ def get_usd_master_path(asset, subset, representation):
"name": project_name,
"code": project_doc.get("data", {}).get("code")
},
+ "folder": {
+ "name": asset_doc["name"],
+ },
"asset": asset_doc["name"],
"subset": subset,
"representation": representation,
diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
index 8c814bec95..108c377078 100644
--- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py
@@ -106,7 +106,7 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
job_info.EnvironmentKeyValue[key] = value
# to recognize render jobs
- job_info.add_render_job_env_var(job_info)
+ job_info.add_render_job_env_var()
for i, filepath in enumerate(instance.data["files"]):
dirname = os.path.dirname(filepath)
diff --git a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py
index 2c1db1c880..8e05582962 100644
--- a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py
+++ b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py
@@ -132,7 +132,7 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
job_info.EnvironmentKeyValue[key] = value
# to recognize render jobs
- job_info.add_render_job_env_var(job_info)
+ job_info.add_render_job_env_var()
job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1"
# Add list of expected files to job
diff --git a/openpype/modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/ftrack/plugins/publish/collect_username.py
index 798f3960a8..0c7c0a57be 100644
--- a/openpype/modules/ftrack/plugins/publish/collect_username.py
+++ b/openpype/modules/ftrack/plugins/publish/collect_username.py
@@ -33,7 +33,7 @@ class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.0015
label = "Collect ftrack username"
hosts = ["webpublisher", "photoshop"]
- targets = ["remotepublish", "filespublish", "tvpaint_worker"]
+ targets = ["webpublish"]
def process(self, context):
self.log.info("{}".format(self.__class__.__name__))
diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py
index 67856f0d8e..8a92697920 100644
--- a/openpype/modules/sync_server/sync_server_module.py
+++ b/openpype/modules/sync_server/sync_server_module.py
@@ -34,7 +34,12 @@ from openpype.settings.constants import (
from .providers.local_drive import LocalDriveHandler
from .providers import lib
-from .utils import time_function, SyncStatus, SiteAlreadyPresentError
+from .utils import (
+ time_function,
+ SyncStatus,
+ SiteAlreadyPresentError,
+ SYNC_SERVER_ROOT,
+)
log = Logger.get_logger("SyncServer")
@@ -138,9 +143,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule, IPluginPaths):
def get_plugin_paths(self):
"""Deadline plugin paths."""
- current_dir = os.path.dirname(os.path.abspath(__file__))
return {
- "load": [os.path.join(current_dir, "plugins", "load")]
+ "load": [os.path.join(SYNC_SERVER_ROOT, "plugins", "load")]
+ }
+
+ def get_site_icons(self):
+ """Icons for sites.
+
+ Returns:
+ dict[str, str]: Path to icon by site.
+ """
+
+ resource_path = os.path.join(
+ SYNC_SERVER_ROOT, "providers", "resources"
+ )
+ return {
+ provider: "{}/{}.png".format(resource_path, provider)
+ for provider in ["studio", "local_drive", "gdrive"]
}
""" Start of Public API """
@@ -904,10 +923,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule, IPluginPaths):
(str): full absolut path to directory with hooks for the module
"""
- return os.path.join(
- os.path.dirname(os.path.abspath(__file__)),
- "launch_hooks"
- )
+ return os.path.join(SYNC_SERVER_ROOT, "launch_hooks")
# Needs to be refactored after Settings are updated
# # Methods for Settings to get appriate values to fill forms
diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py
index 4caa01e9d7..b2f855539f 100644
--- a/openpype/modules/sync_server/utils.py
+++ b/openpype/modules/sync_server/utils.py
@@ -1,9 +1,12 @@
+import os
import time
from openpype.lib import Logger
log = Logger.get_logger("SyncServer")
+SYNC_SERVER_ROOT = os.path.dirname(os.path.abspath(__file__))
+
class ResumableError(Exception):
"""Error which could be temporary, skip current loop, try next time"""
diff --git a/openpype/pipeline/colorspace.py b/openpype/pipeline/colorspace.py
index caa0f6dcd7..731132911a 100644
--- a/openpype/pipeline/colorspace.py
+++ b/openpype/pipeline/colorspace.py
@@ -329,7 +329,8 @@ def get_imageio_config(
host_name,
project_settings=None,
anatomy_data=None,
- anatomy=None
+ anatomy=None,
+ env=None
):
"""Returns config data from settings
@@ -342,6 +343,7 @@ def get_imageio_config(
project_settings (Optional[dict]): Project settings.
anatomy_data (Optional[dict]): anatomy formatting data.
anatomy (Optional[Anatomy]): Anatomy object.
+ env (Optional[dict]): Environment variables.
Returns:
dict: config path data or empty dict
@@ -414,13 +416,13 @@ def get_imageio_config(
if override_global_config:
config_data = _get_config_data(
- host_ocio_config["filepath"], formatting_data
+ host_ocio_config["filepath"], formatting_data, env
)
else:
# get config path from global
config_global = imageio_global["ocio_config"]
config_data = _get_config_data(
- config_global["filepath"], formatting_data
+ config_global["filepath"], formatting_data, env
)
if not config_data:
@@ -432,7 +434,7 @@ def get_imageio_config(
return config_data
-def _get_config_data(path_list, anatomy_data):
+def _get_config_data(path_list, anatomy_data, env=None):
"""Return first existing path in path list.
If template is used in path inputs,
@@ -442,14 +444,17 @@ def _get_config_data(path_list, anatomy_data):
Args:
path_list (list[str]): list of abs paths
anatomy_data (dict): formatting data
+ env (Optional[dict]): Environment variables.
Returns:
dict: config data
"""
formatting_data = deepcopy(anatomy_data)
+ environment_vars = env or dict(**os.environ)
+
# format the path for potential env vars
- formatting_data.update(dict(**os.environ))
+ formatting_data.update(environment_vars)
# first try host config paths
for path_ in path_list:
diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py
index c14b6d2445..ada12800a9 100644
--- a/openpype/pipeline/publish/lib.py
+++ b/openpype/pipeline/publish/lib.py
@@ -537,44 +537,24 @@ def filter_pyblish_plugins(plugins):
plugins.remove(plugin)
-def find_close_plugin(close_plugin_name, log):
- if close_plugin_name:
- plugins = pyblish.api.discover()
- for plugin in plugins:
- if plugin.__name__ == close_plugin_name:
- return plugin
-
- log.debug("Close plugin not found, app might not close.")
-
-
-def remote_publish(log, close_plugin_name=None, raise_error=False):
+def remote_publish(log):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (Logger)
- close_plugin_name (str): name of plugin with responsibility to
- close host app
"""
- # Error exit as soon as any error occurs.
- error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
- close_plugin = find_close_plugin(close_plugin_name, log)
+ # Error exit as soon as any error occurs.
+ error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
for result in pyblish.util.publish_iter():
- for record in result["records"]:
- log.info("{}: {}".format(
- result["plugin"].label, record.msg))
+ if not result["error"]:
+ continue
- if result["error"]:
- error_message = error_format.format(**result)
- log.error(error_message)
- if close_plugin: # close host app explicitly after error
- context = pyblish.api.Context()
- close_plugin().process(context)
- if raise_error:
- # Fatal Error is because of Deadline
- error_message = "Fatal Error: " + error_format.format(**result)
- raise RuntimeError(error_message)
+ error_message = error_format.format(**result)
+ log.error(error_message)
+ # 'Fatal Error: ' is because of Deadline
+ raise RuntimeError("Fatal Error: {}".format(error_message))
def get_errored_instances_from_context(context, plugin=None):
diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py
index fd21930ecc..a48f0721b6 100644
--- a/openpype/pipeline/template_data.py
+++ b/openpype/pipeline/template_data.py
@@ -94,6 +94,9 @@ def get_asset_template_data(asset_doc, project_name):
return {
"asset": asset_doc["name"],
+ "folder": {
+ "name": asset_doc["name"]
+ },
"hierarchy": hierarchy,
"parent": parent_name
}
diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py
index 8f31f10c42..886384fee6 100644
--- a/openpype/plugins/publish/extract_review_slate.py
+++ b/openpype/plugins/publish/extract_review_slate.py
@@ -85,8 +85,11 @@ class ExtractReviewSlate(publish.Extractor):
input_width,
input_height,
input_timecode,
- input_frame_rate
+ input_frame_rate,
+ input_pixel_aspect
) = self._get_video_metadata(streams)
+ if input_pixel_aspect:
+ pixel_aspect = input_pixel_aspect
# Raise exception of any stream didn't define input resolution
if input_width is None:
@@ -419,6 +422,7 @@ class ExtractReviewSlate(publish.Extractor):
input_width = None
input_height = None
input_frame_rate = None
+ input_pixel_aspect = None
for stream in streams:
if stream.get("codec_type") != "video":
continue
@@ -436,6 +440,16 @@ class ExtractReviewSlate(publish.Extractor):
input_width = width
input_height = height
+ input_pixel_aspect = stream.get("sample_aspect_ratio")
+ if input_pixel_aspect is not None:
+ try:
+ input_pixel_aspect = float(
+ eval(str(input_pixel_aspect).replace(':', '/')))
+ except Exception:
+ self.log.debug(
+ "__Converting pixel aspect to float failed: {}".format(
+ input_pixel_aspect))
+
tags = stream.get("tags") or {}
input_timecode = tags.get("timecode") or ""
@@ -446,7 +460,8 @@ class ExtractReviewSlate(publish.Extractor):
input_width,
input_height,
input_timecode,
- input_frame_rate
+ input_frame_rate,
+ input_pixel_aspect
)
def _get_audio_metadata(self, streams):
diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py
index ffb9acf4a7..be07cffe72 100644
--- a/openpype/plugins/publish/integrate.py
+++ b/openpype/plugins/publish/integrate.py
@@ -2,9 +2,10 @@ import os
import logging
import sys
import copy
+import datetime
+
import clique
import six
-
from bson.objectid import ObjectId
import pyblish.api
@@ -320,10 +321,16 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Get the accessible sites for Site Sync
modules_by_name = instance.context.data["openPypeModules"]
- sync_server_module = modules_by_name["sync_server"]
- sites = sync_server_module.compute_resource_sync_sites(
- project_name=instance.data["projectEntity"]["name"]
- )
+ sync_server_module = modules_by_name.get("sync_server")
+ if sync_server_module is None:
+ sites = [{
+ "name": "studio",
+ "created_dt": datetime.datetime.now()
+ }]
+ else:
+ sites = sync_server_module.compute_resource_sync_sites(
+ project_name=instance.data["projectEntity"]["name"]
+ )
self.log.debug("Sync Server Sites: {}".format(sites))
# Compute the resource file infos once (files belonging to the
diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py
index 4cb4b97707..7f1c3b01e2 100644
--- a/openpype/pype_commands.py
+++ b/openpype/pype_commands.py
@@ -165,74 +165,6 @@ class PypeCommands:
log.info("Publish finished.")
- @staticmethod
- def remotepublishfromapp(project_name, batch_path, host_name,
- user_email, targets=None):
- """Opens installed variant of 'host' and run remote publish there.
-
- Eventually should be yanked out to Webpublisher cli.
-
- Currently implemented and tested for Photoshop where customer
- wants to process uploaded .psd file and publish collected layers
- from there. Triggered by Webpublisher.
-
- Checks if no other batches are running (status =='in_progress). If
- so, it sleeps for SLEEP (this is separate process),
- waits for WAIT_FOR seconds altogether.
-
- Requires installed host application on the machine.
-
- Runs publish process as user would, in automatic fashion.
-
- Args:
- project_name (str): project to publish (only single context is
- expected per call of remotepublish
- batch_path (str): Path batch folder. Contains subfolders with
- resources (workfile, another subfolder 'renders' etc.)
- host_name (str): 'photoshop'
- user_email (string): email address for webpublisher - used to
- find Ftrack user with same email
- targets (list): Pyblish targets
- (to choose validator for example)
- """
-
- from openpype.hosts.webpublisher.publish_functions import (
- cli_publish_from_app
- )
-
- cli_publish_from_app(
- project_name, batch_path, host_name, user_email, targets
- )
-
- @staticmethod
- def remotepublish(project, batch_path, user_email, targets=None):
- """Start headless publishing.
-
- Used to publish rendered assets, workfiles etc via Webpublisher.
- Eventually should be yanked out to Webpublisher cli.
-
- Publish use json from passed paths argument.
-
- Args:
- project (str): project to publish (only single context is expected
- per call of remotepublish
- batch_path (str): Path batch folder. Contains subfolders with
- resources (workfile, another subfolder 'renders' etc.)
- user_email (string): email address for webpublisher - used to
- find Ftrack user with same email
- targets (list): Pyblish targets
- (to choose validator for example)
-
- Raises:
- RuntimeError: When there is no path to process.
- """
-
- from openpype.hosts.webpublisher.publish_functions import (
- cli_publish
- )
-
- cli_publish(project, batch_path, user_email, targets)
-
@staticmethod
def extractenvironments(output_json_path, project, asset, task, app,
env_group):
@@ -336,34 +268,6 @@ class PypeCommands:
import pytest
pytest.main(args)
- def syncserver(self, active_site):
- """Start running sync_server in background.
-
- This functionality is available in directly in module cli commands.
- `~/openpype_console module sync_server syncservice`
- """
-
- os.environ["OPENPYPE_LOCAL_ID"] = active_site
-
- def signal_handler(sig, frame):
- print("You pressed Ctrl+C. Process ended.")
- sync_server_module.server_exit()
- sys.exit(0)
-
- signal.signal(signal.SIGINT, signal_handler)
- signal.signal(signal.SIGTERM, signal_handler)
-
- from openpype.modules import ModulesManager
-
- manager = ModulesManager()
- sync_server_module = manager.modules_by_name["sync_server"]
-
- sync_server_module.server_init()
- sync_server_module.server_start()
-
- while True:
- time.sleep(1.0)
-
def repack_version(self, directory):
"""Repacking OpenPype version."""
from openpype.tools.repack_version import VersionRepacker
diff --git a/openpype/scripts/remote_publish.py b/openpype/scripts/remote_publish.py
index 37df35e36c..d362f7abdc 100644
--- a/openpype/scripts/remote_publish.py
+++ b/openpype/scripts/remote_publish.py
@@ -9,4 +9,4 @@ except ImportError as exc:
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
- remote_publish(log, raise_error=True)
+ remote_publish(log)
diff --git a/openpype/settings/ayon_settings.py b/openpype/settings/ayon_settings.py
index 904751e653..78eed359a3 100644
--- a/openpype/settings/ayon_settings.py
+++ b/openpype/settings/ayon_settings.py
@@ -599,7 +599,6 @@ def _convert_maya_project_settings(ayon_settings, output):
reference_loader = ayon_maya_load["reference_loader"]
reference_loader["namespace"] = (
reference_loader["namespace"]
- .replace("{folder[name]}", "{asset_name}")
.replace("{product[name]}", "{subset}")
)
@@ -645,6 +644,9 @@ def _convert_nuke_knobs(knobs):
elif knob_type == "vector_3d":
value = [value["x"], value["y"], value["z"]]
+ elif knob_type == "box":
+ value = [value["x"], value["y"], value["r"], value["t"]]
+
new_knob[value_key] = value
return new_knobs
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json
index 26c64e6219..6b516ddf4a 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json
@@ -284,6 +284,10 @@
"type": "schema_template",
"name": "template_workfile_options"
},
+ {
+ "type": "label",
+ "label": "^ Settings and for Workfile Builder is deprecated and will be soon removed.
Please use Template Workfile Build Settings instead."
+ },
{
"type": "schema",
"name": "schema_templated_workfile_build"
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json
index c9dee8681a..51c78ce8f0 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json
@@ -213,7 +213,7 @@
},
{
"type": "number",
- "key": "y",
+ "key": "z",
"default": 1,
"decimal": 4,
"maximum": 99999999
@@ -238,29 +238,75 @@
"object_types": [
{
"type": "number",
- "key": "x",
+ "key": "r",
"default": 1,
"decimal": 4,
"maximum": 99999999
},
{
"type": "number",
- "key": "x",
+ "key": "g",
"default": 1,
"decimal": 4,
"maximum": 99999999
},
+ {
+ "type": "number",
+ "key": "b",
+ "default": 1,
+ "decimal": 4,
+ "maximum": 99999999
+ },
+ {
+ "type": "number",
+ "key": "a",
+ "default": 1,
+ "decimal": 4,
+ "maximum": 99999999
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "key": "box",
+ "label": "Box",
+ "children": [
+ {
+ "type": "text",
+ "key": "name",
+ "label": "Name"
+ },
+ {
+ "type": "list-strict",
+ "key": "value",
+ "label": "Value",
+ "object_types": [
+ {
+ "type": "number",
+ "key": "x",
+ "default": 0,
+ "decimal": 4,
+ "maximum": 99999999
+ },
{
"type": "number",
"key": "y",
- "default": 1,
+ "default": 0,
"decimal": 4,
"maximum": 99999999
},
{
"type": "number",
- "key": "y",
- "default": 1,
+ "key": "r",
+ "default": 1920,
+ "decimal": 4,
+ "maximum": 99999999
+ },
+ {
+ "type": "number",
+ "key": "t",
+ "default": 1080,
"decimal": 4,
"maximum": 99999999
}
diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py
index bd10595333..e68e9a5931 100644
--- a/openpype/tools/libraryloader/app.py
+++ b/openpype/tools/libraryloader/app.py
@@ -114,9 +114,10 @@ class LibraryLoaderWindow(QtWidgets.QDialog):
manager = ModulesManager()
sync_server = manager.modules_by_name.get("sync_server")
- sync_server_enabled = False
- if sync_server is not None:
- sync_server_enabled = sync_server.enabled
+ sync_server_enabled = (
+ sync_server is not None
+ and sync_server.enabled
+ )
repres_widget = None
if sync_server_enabled:
diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py
index 5115f39a69..69b7e593b1 100644
--- a/openpype/tools/loader/model.py
+++ b/openpype/tools/loader/model.py
@@ -64,6 +64,7 @@ class BaseRepresentationModel(object):
"""Sets/Resets sync server vars after every change (refresh.)"""
repre_icons = {}
sync_server = None
+ sync_server_enabled = False
active_site = active_provider = None
remote_site = remote_provider = None
@@ -75,6 +76,7 @@ class BaseRepresentationModel(object):
if not project_name:
self.repre_icons = repre_icons
self.sync_server = sync_server
+ self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@@ -100,8 +102,13 @@ class BaseRepresentationModel(object):
self._modules_manager = ModulesManager()
self._last_manager_cache = now_time
- sync_server = self._modules_manager.modules_by_name["sync_server"]
- if sync_server.is_project_enabled(project_name, single=True):
+ sync_server = self._modules_manager.modules_by_name.get("sync_server")
+ if (
+ sync_server is not None
+ and sync_server.enabled
+ and sync_server.is_project_enabled(project_name, single=True)
+ ):
+ sync_server_enabled = True
active_site = sync_server.get_active_site(project_name)
active_provider = sync_server.get_provider_for_site(
project_name, active_site)
@@ -118,6 +125,7 @@ class BaseRepresentationModel(object):
self.repre_icons = repre_icons
self.sync_server = sync_server
+ self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@@ -213,6 +221,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
self.repre_icons = {}
self.sync_server = None
+ self.sync_server_enabled = False
self.active_site = self.active_provider = None
self.columns_index = dict(
@@ -282,7 +291,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
)
# update availability on active site when version changes
- if self.sync_server.enabled and version_doc:
+ if self.sync_server_enabled and version_doc:
repres_info = list(
self.sync_server.get_repre_info_for_versions(
project_name,
@@ -507,7 +516,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
return
repre_info_by_version_id = {}
- if self.sync_server.enabled:
+ if self.sync_server_enabled:
versions_by_id = {}
for _subset_id, doc in last_versions_by_subset_id.items():
versions_by_id[doc["_id"]] = doc
@@ -1033,12 +1042,16 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
self._version_ids = []
manager = ModulesManager()
- sync_server = active_site = remote_site = None
+ active_site = remote_site = None
active_provider = remote_provider = None
+ sync_server = manager.modules_by_name.get("sync_server")
+ sync_server_enabled = (
+ sync_server is not None
+ and sync_server.enabled
+ )
project_name = dbcon.current_project()
- if project_name:
- sync_server = manager.modules_by_name["sync_server"]
+ if sync_server_enabled and project_name:
active_site = sync_server.get_active_site(project_name)
remote_site = sync_server.get_remote_site(project_name)
@@ -1057,6 +1070,7 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
remote_provider = 'studio'
self.sync_server = sync_server
+ self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@@ -1174,9 +1188,15 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
repre_groups_items[doc["name"]] = 0
group = group_item
- progress = self.sync_server.get_progress_for_repre(
- doc,
- self.active_site, self.remote_site)
+ progress = {
+ self.active_site: 0,
+ self.remote_site: 0,
+ }
+ if self.sync_server_enabled:
+ progress = self.sync_server.get_progress_for_repre(
+ doc,
+ self.active_site,
+ self.remote_site)
active_site_icon = self._icons.get(self.active_provider)
remote_site_icon = self._icons.get(self.remote_provider)
diff --git a/openpype/tools/sceneinventory/lib.py b/openpype/tools/sceneinventory/lib.py
index 4b1860342a..0ac7622d65 100644
--- a/openpype/tools/sceneinventory/lib.py
+++ b/openpype/tools/sceneinventory/lib.py
@@ -1,9 +1,3 @@
-import os
-from openpype_modules import sync_server
-
-from qtpy import QtGui
-
-
def walk_hierarchy(node):
"""Recursively yield group node."""
for child in node.children():
@@ -12,19 +6,3 @@ def walk_hierarchy(node):
for _child in walk_hierarchy(child):
yield _child
-
-
-def get_site_icons():
- resource_path = os.path.join(
- os.path.dirname(sync_server.sync_server_module.__file__),
- "providers",
- "resources"
- )
- icons = {}
- # TODO get from sync module
- for provider in ["studio", "local_drive", "gdrive"]:
- pix_url = "{}/{}.png".format(resource_path, provider)
- icons[provider] = QtGui.QIcon(pix_url)
-
- return icons
-
diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py
index 1cfcd0d8c0..64c439712c 100644
--- a/openpype/tools/sceneinventory/model.py
+++ b/openpype/tools/sceneinventory/model.py
@@ -24,10 +24,7 @@ from openpype.style import get_default_entity_icon_color
from openpype.tools.utils.models import TreeModel, Item
from openpype.modules import ModulesManager
-from .lib import (
- get_site_icons,
- walk_hierarchy,
-)
+from .lib import walk_hierarchy
class InventoryModel(TreeModel):
@@ -53,8 +50,10 @@ class InventoryModel(TreeModel):
self._default_icon_color = get_default_entity_icon_color()
manager = ModulesManager()
- sync_server = manager.modules_by_name["sync_server"]
- self.sync_enabled = sync_server.enabled
+ sync_server = manager.modules_by_name.get("sync_server")
+ self.sync_enabled = (
+ sync_server is not None and sync_server.enabled
+ )
self._site_icons = {}
self.active_site = self.remote_site = None
self.active_provider = self.remote_provider = None
@@ -84,7 +83,10 @@ class InventoryModel(TreeModel):
self.active_provider = active_provider
self.remote_site = remote_site
self.remote_provider = remote_provider
- self._site_icons = get_site_icons()
+ self._site_icons = {
+ provider: QtGui.QIcon(icon_path)
+ for provider, icon_path in self.get_site_icons().items()
+ }
if "active_site" not in self.Columns:
self.Columns.append("active_site")
if "remote_site" not in self.Columns:
diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py
index d22b2bdd0f..af463e4867 100644
--- a/openpype/tools/sceneinventory/view.py
+++ b/openpype/tools/sceneinventory/view.py
@@ -54,8 +54,11 @@ class SceneInventoryView(QtWidgets.QTreeView):
self._selected = None
manager = ModulesManager()
- self.sync_server = manager.modules_by_name["sync_server"]
- self.sync_enabled = self.sync_server.enabled
+ sync_server = manager.modules_by_name.get("sync_server")
+ sync_enabled = sync_server is not None and sync_server.enabled
+
+ self.sync_server = sync_server
+ self.sync_enabled = sync_enabled
def _set_hierarchy_view(self, enabled):
if enabled == self._hierarchy_view:
diff --git a/openpype/tools/settings/local_settings/projects_widget.py b/openpype/tools/settings/local_settings/projects_widget.py
index 4a4148d7cd..68e144f87b 100644
--- a/openpype/tools/settings/local_settings/projects_widget.py
+++ b/openpype/tools/settings/local_settings/projects_widget.py
@@ -267,19 +267,20 @@ class SitesWidget(QtWidgets.QWidget):
self.input_objects = {}
def _get_sites_inputs(self):
- sync_server_module = (
- self.modules_manager.modules_by_name["sync_server"]
- )
+ output = []
+ if self._project_name is None:
+ return output
+
+ sync_server_module = self.modules_manager.modules_by_name.get(
+ "sync_server")
+ if sync_server_module is None or not sync_server_module.enabled:
+ return output
site_configs = sync_server_module.get_all_site_configs(
self._project_name, local_editable_only=True)
- roots_entity = (
- self.project_settings[PROJECT_ANATOMY_KEY][LOCAL_ROOTS_KEY]
- )
site_names = [self.active_site_widget.current_text(),
self.remote_site_widget.current_text()]
- output = []
for site_name in site_names:
if not site_name:
continue
@@ -350,9 +351,6 @@ class SitesWidget(QtWidgets.QWidget):
def refresh(self):
self._clear_widgets()
- if self._project_name is None:
- return
-
# Site label
for site_name, site_inputs in self._get_sites_inputs():
site_widget = QtWidgets.QWidget(self.content_widget)
diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py
index 82ca23c848..2df46c1eae 100644
--- a/openpype/tools/utils/lib.py
+++ b/openpype/tools/utils/lib.py
@@ -760,20 +760,23 @@ def create_qthread(func, *args, **kwargs):
def get_repre_icons():
"""Returns a dict {'provider_name': QIcon}"""
+ icons = {}
try:
from openpype_modules import sync_server
except Exception:
# Backwards compatibility
- from openpype.modules import sync_server
+ try:
+ from openpype.modules import sync_server
+ except Exception:
+ return icons
resource_path = os.path.join(
os.path.dirname(sync_server.sync_server_module.__file__),
"providers", "resources"
)
- icons = {}
if not os.path.exists(resource_path):
print("No icons for Site Sync found")
- return {}
+ return icons
for file_name in os.listdir(resource_path):
if file_name and not file_name.endswith("png"):
diff --git a/openpype/version.py b/openpype/version.py
index bbe452aeba..393074c773 100644
--- a/openpype/version.py
+++ b/openpype/version.py
@@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
-__version__ = "3.16.3-nightly.3"
+__version__ = "3.16.3-nightly.5"
diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py
index fdec05006b..e782e8a591 100644
--- a/server_addon/applications/server/__init__.py
+++ b/server_addon/applications/server/__init__.py
@@ -2,12 +2,68 @@ import os
import json
import copy
-from ayon_server.addons import BaseServerAddon
+from ayon_server.addons import BaseServerAddon, AddonLibrary
from ayon_server.lib.postgres import Postgres
from .version import __version__
from .settings import ApplicationsAddonSettings, DEFAULT_VALUES
+try:
+ import semver
+except ImportError:
+ semver = None
+
+
+def sort_versions(addon_versions, reverse=False):
+ if semver is None:
+ for addon_version in sorted(addon_versions, reverse=reverse):
+ yield addon_version
+ return
+
+ version_objs = []
+ invalid_versions = []
+ for addon_version in addon_versions:
+ try:
+ version_objs.append(
+ (addon_version, semver.VersionInfo.parse(addon_version))
+ )
+ except ValueError:
+ invalid_versions.append(addon_version)
+
+ valid_versions = [
+ addon_version
+ for addon_version, _ in sorted(version_objs, key=lambda x: x[1])
+ ]
+ sorted_versions = list(sorted(invalid_versions)) + valid_versions
+ if reverse:
+ sorted_versions = reversed(sorted_versions)
+ for addon_version in sorted_versions:
+ yield addon_version
+
+
+def merge_groups(output, new_groups):
+ groups_by_name = {
+ o_group["name"]: o_group
+ for o_group in output
+ }
+ extend_groups = []
+ for new_group in new_groups:
+ group_name = new_group["name"]
+ if group_name not in groups_by_name:
+ extend_groups.append(new_group)
+ continue
+ existing_group = groups_by_name[group_name]
+ existing_variants = existing_group["variants"]
+ existing_variants_by_name = {
+ variant["name"]: variant
+ for variant in existing_variants
+ }
+ for new_variant in new_group["variants"]:
+ if new_variant["name"] not in existing_variants_by_name:
+ existing_variants.append(new_variant)
+
+ output.extend(extend_groups)
+
def get_enum_items_from_groups(groups):
label_by_name = {}
@@ -22,12 +78,11 @@ def get_enum_items_from_groups(groups):
full_name = f"{group_name}/{variant_name}"
full_label = f"{group_label} {variant_label}"
label_by_name[full_name] = full_label
- enum_items = []
- for full_name in sorted(label_by_name):
- enum_items.append(
- {"value": full_name, "label": label_by_name[full_name]}
- )
- return enum_items
+
+ return [
+ {"value": full_name, "label": label_by_name[full_name]}
+ for full_name in sorted(label_by_name)
+ ]
class ApplicationsAddon(BaseServerAddon):
@@ -48,6 +103,19 @@ class ApplicationsAddon(BaseServerAddon):
return self.get_settings_model()(**default_values)
+ async def pre_setup(self):
+ """Make sure older version of addon use the new way of attributes."""
+
+ instance = AddonLibrary.getinstance()
+ app_defs = instance.data.get(self.name)
+ old_addon = app_defs.versions.get("0.1.0")
+ if old_addon is not None:
+ # Override 'create_applications_attribute' for older versions
+ # - avoid infinite server restart loop
+ old_addon.create_applications_attribute = (
+ self.create_applications_attribute
+ )
+
async def setup(self):
need_restart = await self.create_applications_attribute()
if need_restart:
@@ -60,21 +128,32 @@ class ApplicationsAddon(BaseServerAddon):
bool: 'True' if an attribute was created or updated.
"""
- settings_model = await self.get_studio_settings()
- studio_settings = settings_model.dict()
- applications = studio_settings["applications"]
- _applications = applications.pop("additional_apps")
- for name, value in applications.items():
- value["name"] = name
- _applications.append(value)
+ instance = AddonLibrary.getinstance()
+ app_defs = instance.data.get(self.name)
+ all_applications = []
+ all_tools = []
+ for addon_version in sort_versions(
+ app_defs.versions.keys(), reverse=True
+ ):
+ addon = app_defs.versions[addon_version]
+ for variant in ("production", "staging"):
+ settings_model = await addon.get_studio_settings(variant)
+ studio_settings = settings_model.dict()
+ application_settings = studio_settings["applications"]
+ app_groups = application_settings.pop("additional_apps")
+ for group_name, value in application_settings.items():
+ value["name"] = group_name
+ app_groups.append(value)
+ merge_groups(all_applications, app_groups)
+ merge_groups(all_tools, studio_settings["tool_groups"])
query = "SELECT name, position, scope, data from public.attributes"
apps_attrib_name = "applications"
tools_attrib_name = "tools"
- apps_enum = get_enum_items_from_groups(_applications)
- tools_enum = get_enum_items_from_groups(studio_settings["tool_groups"])
+ apps_enum = get_enum_items_from_groups(all_applications)
+ tools_enum = get_enum_items_from_groups(all_tools)
apps_attribute_data = {
"type": "list_of_strings",
"title": "Applications",
diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py
index 3dc1f76bc6..485f44ac21 100644
--- a/server_addon/applications/server/version.py
+++ b/server_addon/applications/server/version.py
@@ -1 +1 @@
-__version__ = "0.1.0"
+__version__ = "0.1.1"
diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py
index 8be9baa983..61dbd5c8d9 100644
--- a/server_addon/create_ayon_addons.py
+++ b/server_addon/create_ayon_addons.py
@@ -203,8 +203,7 @@ def create_openpype_package(
ignored_modules = [
"ftrack",
"shotgrid",
- # Sync server is still expected at multiple places
- # "sync_server",
+ "sync_server",
"example_addons",
"slack"
]
diff --git a/server_addon/nuke/server/settings/common.py b/server_addon/nuke/server/settings/common.py
index f1bb46ff90..700f01f3dc 100644
--- a/server_addon/nuke/server/settings/common.py
+++ b/server_addon/nuke/server/settings/common.py
@@ -39,6 +39,15 @@ class Vector3d(BaseSettingsModel):
z: float = Field(1.0, title="Z")
+class Box(BaseSettingsModel):
+ _layout = "compact"
+
+ x: float = Field(1.0, title="X")
+ y: float = Field(1.0, title="Y")
+ r: float = Field(1.0, title="R")
+ t: float = Field(1.0, title="T")
+
+
def formatable_knob_type_enum():
return [
{"value": "text", "label": "Text"},
@@ -74,6 +83,7 @@ knob_types_enum = [
{"value": "vector_2d", "label": "2D vector"},
{"value": "vector_3d", "label": "3D vector"},
{"value": "color", "label": "Color"},
+ {"value": "box", "label": "Box"},
{"value": "expression", "label": "Expression"}
]
@@ -118,6 +128,10 @@ class KnobModel(BaseSettingsModel):
(0.0, 0.0, 1.0, 1.0),
title="RGBA Float"
)
+ box: Box = Field(
+ default_factory=Box,
+ title="Value"
+ )
formatable: Formatable = Field(
default_factory=Formatable,
title="Formatable"
diff --git a/server_addon/nuke/server/version.py b/server_addon/nuke/server/version.py
index 485f44ac21..b3f4756216 100644
--- a/server_addon/nuke/server/version.py
+++ b/server_addon/nuke/server/version.py
@@ -1 +1 @@
-__version__ = "0.1.1"
+__version__ = "0.1.2"
diff --git a/website/docs/admin_hosts_maya.md b/website/docs/admin_hosts_maya.md
index 700822843f..93acf316c2 100644
--- a/website/docs/admin_hosts_maya.md
+++ b/website/docs/admin_hosts_maya.md
@@ -113,7 +113,8 @@ This is useful to fix some specific renderer glitches and advanced hacking of Ma
#### Namespace and Group Name
Here you can create your own custom naming for the reference loader.
-The custom naming is split into two parts: namespace and group name. If you don't set the namespace or the group name, an error will occur.
+The custom naming is split into two parts: namespace and group name. If you don't set the namespace, an error will occur.
+Group name could be set empty, that way no wrapping group will be created for loaded item.
Here's the different variables you can use: