[Automated] Merged develop into main

This commit is contained in:
ynbot 2023-08-09 05:23:48 +02:00 committed by GitHub
commit 11625495bf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
55 changed files with 329 additions and 324 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.16.3-nightly.4
- 3.16.3-nightly.3
- 3.16.3-nightly.2
- 3.16.3-nightly.1
@ -134,7 +135,6 @@ body:
- 3.14.7-nightly.5
- 3.14.7-nightly.4
- 3.14.7-nightly.3
- 3.14.7-nightly.2
validations:
required: true
- type: dropdown

View file

@ -196,47 +196,6 @@ def publish(paths, targets, gui):
PypeCommands.publish(list(paths), targets, gui)
@main.command()
@click.argument("path")
@click.option("-h", "--host", help="Host")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
def remotepublishfromapp(project, path, host, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
if AYON_SERVER_ENABLED:
raise RuntimeError(
"AYON does not support 'remotepublishfromapp' command."
)
PypeCommands.remotepublishfromapp(
project, path, host, user, targets=targets
)
@main.command()
@click.argument("path")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
def remotepublish(project, path, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
if AYON_SERVER_ENABLED:
raise RuntimeError("AYON does not support 'remotepublish' command.")
PypeCommands.remotepublish(project, path, user, targets=targets)
@main.command(context_settings={"ignore_unknown_options": True})
def projectmanager():
if AYON_SERVER_ENABLED:
@ -338,12 +297,18 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
persist, app_variant, timeout, setup_only)
@main.command()
@main.command(help="DEPRECATED - run sync server")
@click.pass_context
@click.option("-a", "--active_site", required=True,
help="Name of active stie")
def syncserver(active_site):
help="Name of active site")
def syncserver(ctx, active_site):
"""Run sync site server in background.
Deprecated:
This command is deprecated and will be removed in future versions.
Use '~/openpype_console module sync_server syncservice' instead.
Details:
Some Site Sync use cases need to expose site to another one.
For example if majority of artists work in studio, they are not using
SS at all, but if you want to expose published assets to 'studio' site
@ -359,7 +324,10 @@ def syncserver(active_site):
if AYON_SERVER_ENABLED:
raise RuntimeError("AYON does not support 'syncserver' command.")
PypeCommands().syncserver(active_site)
from openpype.modules.sync_server.sync_server_module import (
syncservice)
ctx.invoke(syncservice, active_site=active_site)
@main.command()

View file

@ -133,7 +133,6 @@ def _get_default_template_name(templates):
def _template_replacements_to_v3(template):
return (
template
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
)
@ -715,7 +714,6 @@ def convert_v4_representation_to_v3(representation):
if "template" in output_data:
output_data["template"] = (
output_data["template"]
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
)
@ -977,7 +975,6 @@ def convert_create_representation_to_v4(representation, con):
representation_data = representation["data"]
representation_data["template"] = (
representation_data["template"]
.replace("{asset}", "{folder[name]}")
.replace("{subset}", "{product[name]}")
.replace("{family}", "{product[type]}")
)
@ -1266,7 +1263,6 @@ def convert_update_representation_to_v4(
if "template" in attribs:
attribs["template"] = (
attribs["template"]
.replace("{asset}", "{folder[name]}")
.replace("{family}", "{product[type]}")
.replace("{subset}", "{product[name]}")
)

View file

@ -38,7 +38,8 @@ class OCIOEnvHook(PreLaunchHook):
host_name=self.host_name,
project_settings=self.data["project_settings"],
anatomy_data=template_data,
anatomy=self.data["anatomy"]
anatomy=self.data["anatomy"],
env=self.launch_context.env,
)
if config_data:

View file

@ -32,19 +32,26 @@ class HostDirmap(object):
"""
def __init__(
self, host_name, project_name, project_settings=None, sync_module=None
self,
host_name,
project_name,
project_settings=None,
sync_module=None
):
self.host_name = host_name
self.project_name = project_name
self._project_settings = project_settings
self._sync_module = sync_module # to limit reinit of Modules
self._sync_module = sync_module
# to limit reinit of Modules
self._sync_module_discovered = sync_module is not None
self._log = None
@property
def sync_module(self):
if self._sync_module is None:
if not self._sync_module_discovered:
self._sync_module_discovered = True
manager = ModulesManager()
self._sync_module = manager["sync_server"]
self._sync_module = manager.get("sync_server")
return self._sync_module
@property
@ -151,21 +158,25 @@ class HostDirmap(object):
"""
project_name = self.project_name
sync_module = self.sync_module
mapping = {}
if (not self.sync_module.enabled or
project_name not in self.sync_module.get_enabled_projects()):
if (
sync_module is None
or not sync_module.enabled
or project_name not in sync_module.get_enabled_projects()
):
return mapping
active_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_active_site(project_name))
remote_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_remote_site(project_name))
active_site = sync_module.get_local_normalized_site(
sync_module.get_active_site(project_name))
remote_site = sync_module.get_local_normalized_site(
sync_module.get_remote_site(project_name))
self.log.debug(
"active {} - remote {}".format(active_site, remote_site)
)
if active_site == "local" and active_site != remote_site:
sync_settings = self.sync_module.get_sync_project_setting(
sync_settings = sync_module.get_sync_project_setting(
project_name,
exclude_locals=False,
cached=False)
@ -179,7 +190,7 @@ class HostDirmap(object):
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
remote_provider = self.sync_module.get_provider_for_site(
remote_provider = sync_module.get_provider_for_site(
project_name, remote_site
)
# dirmap has sense only with regular disk provider, in the workfile

View file

@ -15,7 +15,7 @@ class CloseAE(pyblish.api.ContextPlugin):
active = True
hosts = ["aftereffects"]
targets = ["remotepublish"]
targets = ["automated"]
def process(self, context):
self.log.info("CloseAE")

View file

@ -10,7 +10,6 @@ class CreateArnoldAss(plugin.HoudiniCreator):
label = "Arnold ASS"
family = "ass"
icon = "magic"
defaults = ["Main"]
# Default extension: `.ass` or `.ass.gz`
# however calling HoudiniCreator.create()

View file

@ -9,7 +9,6 @@ class CreateArnoldRop(plugin.HoudiniCreator):
label = "Arnold ROP"
family = "arnold_rop"
icon = "magic"
defaults = ["master"]
# Default extension
ext = "exr"

View file

@ -11,7 +11,6 @@ class CreateKarmaROP(plugin.HoudiniCreator):
label = "Karma ROP"
family = "karma_rop"
icon = "magic"
defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa

View file

@ -11,7 +11,6 @@ class CreateMantraROP(plugin.HoudiniCreator):
label = "Mantra ROP"
family = "mantra_rop"
icon = "magic"
defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa

View file

@ -13,7 +13,6 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
label = "Redshift ROP"
family = "redshift_rop"
icon = "magic"
defaults = ["master"]
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):

View file

@ -14,8 +14,6 @@ class CreateVrayROP(plugin.HoudiniCreator):
label = "VRay ROP"
family = "vray_rop"
icon = "magic"
defaults = ["master"]
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):

View file

@ -32,8 +32,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
nodes = [n.path() for n in invalid]
raise PublishValidationError(
"See log for details. " "Invalid nodes: {0}".format(invalid),
"See log for details. " "Invalid nodes: {0}".format(nodes),
title=self.label
)

View file

@ -136,6 +136,7 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
temp_arr = #()
for x in all_handles do
(
if x.node == undefined do continue
handle_name = node_to_name x.node
append temp_arr handle_name
)
@ -185,7 +186,10 @@ class MaxCreatorBase(object):
node = rt.Container(name=node)
attrs = rt.Execute(MS_CUSTOM_ATTRIB)
rt.custAttributes.add(node.baseObject, attrs)
modifier = rt.EmptyModifier()
rt.addModifier(node, modifier)
node.modifiers[0].name = "OP Data"
rt.custAttributes.add(node.modifiers[0], attrs)
return node
@ -209,13 +213,19 @@ class MaxCreator(Creator, MaxCreatorBase):
if pre_create_data.get("use_selection"):
node_list = []
sel_list = []
for i in self.selected_nodes:
node_ref = rt.NodeTransformMonitor(node=i)
node_list.append(node_ref)
sel_list.append(str(i))
# Setting the property
rt.setProperty(
instance_node.openPypeData, "all_handles", node_list)
instance_node.modifiers[0].openPypeData,
"all_handles", node_list)
rt.setProperty(
instance_node.modifiers[0].openPypeData,
"sel_list", sel_list)
self._add_instance_to_context(instance)
imprint(instance_node.name, instance.data_to_store())
@ -254,8 +264,8 @@ class MaxCreator(Creator, MaxCreatorBase):
instance_node = rt.GetNodeByName(
instance.data.get("instance_node"))
if instance_node:
count = rt.custAttributes.count(instance_node)
rt.custAttributes.delete(instance_node, count)
count = rt.custAttributes.count(instance_node.modifiers[0])
rt.custAttributes.delete(instance_node.modifiers[0], count)
rt.Delete(instance_node)
self._remove_instance_from_context(instance)

View file

@ -17,6 +17,6 @@ class CollectMembers(pyblish.api.InstancePlugin):
container = rt.GetNodeByName(instance.data["instance_node"])
instance.data["members"] = [
member.node for member
in container.openPypeData.all_handles
in container.modifiers[0].openPypeData.all_handles
]
self.log.debug("{}".format(instance.data["members"]))

View file

@ -575,12 +575,15 @@ class ReferenceLoader(Loader):
raise LoadError("No namespace specified in "
"Maya ReferenceLoader settings")
elif not custom_naming['group_name']:
raise LoadError("No group name specified in "
"Maya ReferenceLoader settings")
self.log.debug("No custom group_name, no group will be created.")
options["attach_to_root"] = False
formatting_data = {
"asset_name": asset['name'],
"asset_type": asset['type'],
"folder": {
"name": asset["name"],
},
"subset": subset['name'],
"family": (
subset['data'].get('family') or

View file

@ -9,7 +9,8 @@ from openpype.hosts.maya.api.lib import (
maintained_selection,
get_container_members,
parent_nodes,
create_rig_animation_instance
create_rig_animation_instance,
get_reference_node
)
@ -123,6 +124,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
attach_to_root = options.get("attach_to_root", True)
group_name = options["group_name"]
# no group shall be created
if not attach_to_root:
group_name = namespace
path = self.filepath_from_context(context)
with maintained_selection():
cmds.loadPlugin("AbcImport.mll", quiet=True)
@ -148,11 +153,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
if current_namespace != ":":
group_name = current_namespace + ":" + group_name
group_name = "|" + group_name
self[:] = new_nodes
if attach_to_root:
group_name = "|" + group_name
roots = cmds.listRelatives(group_name,
children=True,
fullPath=True) or []
@ -205,6 +209,11 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
self._post_process_rig(name, namespace, context, options)
else:
if "translate" in options:
if not attach_to_root and new_nodes:
root_nodes = cmds.ls(new_nodes, assemblies=True,
long=True)
# we assume only a single root is ever loaded
group_name = root_nodes[0]
cmds.setAttr("{}.translate".format(group_name),
*options["translate"])
return new_nodes

View file

@ -2955,6 +2955,7 @@ class DirmapCache:
"""Caching class to get settings and sync_module easily and only once."""
_project_name = None
_project_settings = None
_sync_module_discovered = False
_sync_module = None
_mapping = None
@ -2972,8 +2973,10 @@ class DirmapCache:
@classmethod
def sync_module(cls):
if cls._sync_module is None:
cls._sync_module = ModulesManager().modules_by_name["sync_server"]
if not cls._sync_module_discovered:
cls._sync_module_discovered = True
cls._sync_module = ModulesManager().modules_by_name.get(
"sync_server")
return cls._sync_module
@classmethod

View file

@ -1,6 +1,7 @@
"""Host API required Work Files tool"""
import os
import nuke
import shutil
from .utils import is_headless
@ -21,21 +22,37 @@ def save_file(filepath):
def open_file(filepath):
def read_script(nuke_script):
nuke.scriptClear()
nuke.scriptReadFile(nuke_script)
nuke.Root()["name"].setValue(nuke_script)
nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script))
nuke.Root().setModified(False)
filepath = filepath.replace("\\", "/")
# To remain in the same window, we have to clear the script and read
# in the contents of the workfile.
nuke.scriptClear()
# Nuke Preferences can be read after the script is read.
read_script(filepath)
if not is_headless():
autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate()
autosave_prmpt = "Autosave detected.\nWould you like to load the autosave file?" # noqa
autosave_prmpt = "Autosave detected.\n" \
"Would you like to load the autosave file?" # noqa
if os.path.isfile(autosave) and nuke.ask(autosave_prmpt):
filepath = autosave
try:
# Overwrite the filepath with autosave
shutil.copy(autosave, filepath)
# Now read the (auto-saved) script again
read_script(filepath)
except shutil.Error as err:
nuke.message(
"Detected autosave file could not be used.\n{}"
.format(err))
nuke.scriptReadFile(filepath)
nuke.Root()["name"].setValue(filepath)
nuke.Root()["project_directory"].setValue(os.path.dirname(filepath))
nuke.Root().setModified(False)
return True

View file

@ -17,7 +17,7 @@ class ClosePS(pyblish.api.ContextPlugin):
active = True
hosts = ["photoshop"]
targets = ["remotepublish"]
targets = ["automated"]
def process(self, context):
self.log.info("ClosePS")

View file

@ -6,8 +6,6 @@ from openpype.pipeline.create import get_subset_name
class CollectAutoImage(pyblish.api.ContextPlugin):
"""Creates auto image in non artist based publishes (Webpublisher).
'remotepublish' should be renamed to 'autopublish' or similar in the future
"""
label = "Collect Auto Image"
@ -15,7 +13,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
hosts = ["photoshop"]
order = pyblish.api.CollectorOrder + 0.2
targets = ["remotepublish"]
targets = ["automated"]
def process(self, context):
family = "image"

View file

@ -20,7 +20,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
label = "Collect Auto Review"
hosts = ["photoshop"]
order = pyblish.api.CollectorOrder + 0.2
targets = ["remotepublish"]
targets = ["automated"]
publish = True

View file

@ -12,7 +12,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
label = "Collect Workfile"
hosts = ["photoshop"]
targets = ["remotepublish"]
targets = ["automated"]
def process(self, context):
family = "workfile"

View file

@ -35,7 +35,7 @@ class CollectBatchData(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.495
label = "Collect batch data"
hosts = ["photoshop"]
targets = ["remotepublish"]
targets = ["webpublish"]
def process(self, context):
self.log.info("CollectBatchData")

View file

@ -34,7 +34,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
label = "Instances"
order = pyblish.api.CollectorOrder
hosts = ["photoshop"]
targets = ["remotepublish"]
targets = ["automated"]
# configurable by Settings
color_code_mapping = []

View file

@ -26,7 +26,7 @@ class CollectPublishedVersion(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.190
label = "Collect published version"
hosts = ["photoshop"]
targets = ["remotepublish"]
targets = ["automated"]
def process(self, context):
workfile_subset_name = None

View file

@ -3,4 +3,4 @@ Webpublisher
Plugins meant for processing of Webpublisher.
Gets triggered by calling openpype.cli.remotepublish with appropriate arguments.
Gets triggered by calling `openpype_console modules webpublisher publish` with appropriate arguments.

View file

@ -20,11 +20,10 @@ class WebpublisherAddon(OpenPypeModule, IHostAddon):
Close Python process at the end.
"""
from openpype.pipeline.publish.lib import remote_publish
from .lib import get_webpublish_conn, publish_and_log
from .lib import get_webpublish_conn, publish_and_log, publish_in_test
if is_test:
remote_publish(log, close_plugin_name)
publish_in_test(log, close_plugin_name)
return
dbcon = get_webpublish_conn()

View file

@ -12,7 +12,6 @@ from openpype.client.mongo import OpenPypeMongoConnection
from openpype.settings import get_project_settings
from openpype.lib import Logger
from openpype.lib.profiles_filtering import filter_profiles
from openpype.pipeline.publish.lib import find_close_plugin
ERROR_STATUS = "error"
IN_PROGRESS_STATUS = "in_progress"
@ -68,6 +67,46 @@ def get_batch_asset_task_info(ctx):
return asset, task_name, task_type
def find_close_plugin(close_plugin_name, log):
if close_plugin_name:
plugins = pyblish.api.discover()
for plugin in plugins:
if plugin.__name__ == close_plugin_name:
return plugin
log.debug("Close plugin not found, app might not close.")
def publish_in_test(log, close_plugin_name=None):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (Logger)
close_plugin_name (Optional[str]): Name of plugin with responsibility
to close application.
"""
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
close_plugin = find_close_plugin(close_plugin_name, log)
for result in pyblish.util.publish_iter():
for record in result["records"]:
# Why do we log again? pyblish logger is logging to stdout...
log.info("{}: {}".format(result["plugin"].label, record.msg))
if not result["error"]:
continue
# QUESTION We don't break on error?
error_message = error_format.format(**result)
log.error(error_message)
if close_plugin: # close host app explicitly after error
context = pyblish.api.Context()
close_plugin().process(context)
def get_webpublish_conn():
"""Get connection to OP 'webpublishes' collection."""
mongo_client = OpenPypeMongoConnection.get_mongo_client()
@ -231,7 +270,7 @@ def find_variant_key(application_manager, host):
def get_task_data(batch_dir):
"""Return parsed data from first task manifest.json
Used for `remotepublishfromapp` command where batch contains only
Used for `publishfromapp` command where batch contains only
single task with publishable workfile.
Returns:

View file

@ -34,7 +34,7 @@ def cli_publish(project_name, batch_path, user_email, targets):
Args:
project_name (str): project to publish (only single context is
expected per call of remotepublish
expected per call of 'publish')
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
user_email (string): email address for webpublisher - used to
@ -49,8 +49,8 @@ def cli_publish(project_name, batch_path, user_email, targets):
if not batch_path:
raise RuntimeError("No publish paths specified")
log = Logger.get_logger("remotepublish")
log.info("remotepublish command")
log = Logger.get_logger("Webpublish")
log.info("Webpublish command")
# Register target and host
webpublisher_host = WebpublisherHost()
@ -107,7 +107,7 @@ def cli_publish_from_app(
Args:
project_name (str): project to publish (only single context is
expected per call of remotepublish
expected per call of publish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
host_name (str): 'photoshop'
@ -117,9 +117,9 @@ def cli_publish_from_app(
(to choose validator for example)
"""
log = Logger.get_logger("RemotePublishFromApp")
log = Logger.get_logger("PublishFromApp")
log.info("remotepublishphotoshop command")
log.info("Webpublish photoshop command")
task_data = get_task_data(batch_path)

View file

@ -216,7 +216,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
"extensions": [".tvpp"],
"command": "publish",
"arguments": {
"targets": ["tvpaint_worker"]
"targets": ["tvpaint_worker", "webpublish"]
},
"add_to_queue": False
},
@ -230,7 +230,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
# Make sure targets are set to None for cases that default
# would change
# - targets argument is not used in 'publishfromapp'
"targets": ["remotepublish"]
"targets": ["automated", "webpublish"]
},
# does publish need to be handled by a queue, eg. only
# single process running concurrently?
@ -247,7 +247,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
"project": content["project_name"],
"user": content["user"],
"targets": ["filespublish"]
"targets": ["filespublish", "webpublish"]
}
add_to_queue = False

View file

@ -45,7 +45,7 @@ def run_webserver(executable, upload_dir, host=None, port=None):
server_manager = webserver_module.create_new_server_manager(port, host)
webserver_url = server_manager.url
# queue for remotepublishfromapp tasks
# queue for publishfromapp tasks
studio_task_queue = collections.deque()
resource = RestApiResource(server_manager,

View file

@ -334,6 +334,9 @@ def get_usd_master_path(asset, subset, representation):
"name": project_name,
"code": project_doc.get("data", {}).get("code")
},
"folder": {
"name": asset_doc["name"],
},
"asset": asset_doc["name"],
"subset": subset,
"representation": representation,

View file

@ -33,7 +33,7 @@ class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.0015
label = "Collect ftrack username"
hosts = ["webpublisher", "photoshop"]
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
targets = ["webpublish"]
def process(self, context):
self.log.info("{}".format(self.__class__.__name__))

View file

@ -34,7 +34,12 @@ from openpype.settings.constants import (
from .providers.local_drive import LocalDriveHandler
from .providers import lib
from .utils import time_function, SyncStatus, SiteAlreadyPresentError
from .utils import (
time_function,
SyncStatus,
SiteAlreadyPresentError,
SYNC_SERVER_ROOT,
)
log = Logger.get_logger("SyncServer")
@ -138,9 +143,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule, IPluginPaths):
def get_plugin_paths(self):
"""Deadline plugin paths."""
current_dir = os.path.dirname(os.path.abspath(__file__))
return {
"load": [os.path.join(current_dir, "plugins", "load")]
"load": [os.path.join(SYNC_SERVER_ROOT, "plugins", "load")]
}
def get_site_icons(self):
"""Icons for sites.
Returns:
dict[str, str]: Path to icon by site.
"""
resource_path = os.path.join(
SYNC_SERVER_ROOT, "providers", "resources"
)
return {
provider: "{}/{}.png".format(resource_path, provider)
for provider in ["studio", "local_drive", "gdrive"]
}
""" Start of Public API """
@ -904,10 +923,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule, IPluginPaths):
(str): full absolut path to directory with hooks for the module
"""
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"launch_hooks"
)
return os.path.join(SYNC_SERVER_ROOT, "launch_hooks")
# Needs to be refactored after Settings are updated
# # Methods for Settings to get appriate values to fill forms

View file

@ -1,9 +1,12 @@
import os
import time
from openpype.lib import Logger
log = Logger.get_logger("SyncServer")
SYNC_SERVER_ROOT = os.path.dirname(os.path.abspath(__file__))
class ResumableError(Exception):
"""Error which could be temporary, skip current loop, try next time"""

View file

@ -329,7 +329,8 @@ def get_imageio_config(
host_name,
project_settings=None,
anatomy_data=None,
anatomy=None
anatomy=None,
env=None
):
"""Returns config data from settings
@ -342,6 +343,7 @@ def get_imageio_config(
project_settings (Optional[dict]): Project settings.
anatomy_data (Optional[dict]): anatomy formatting data.
anatomy (Optional[Anatomy]): Anatomy object.
env (Optional[dict]): Environment variables.
Returns:
dict: config path data or empty dict
@ -414,13 +416,13 @@ def get_imageio_config(
if override_global_config:
config_data = _get_config_data(
host_ocio_config["filepath"], formatting_data
host_ocio_config["filepath"], formatting_data, env
)
else:
# get config path from global
config_global = imageio_global["ocio_config"]
config_data = _get_config_data(
config_global["filepath"], formatting_data
config_global["filepath"], formatting_data, env
)
if not config_data:
@ -432,7 +434,7 @@ def get_imageio_config(
return config_data
def _get_config_data(path_list, anatomy_data):
def _get_config_data(path_list, anatomy_data, env=None):
"""Return first existing path in path list.
If template is used in path inputs,
@ -442,14 +444,17 @@ def _get_config_data(path_list, anatomy_data):
Args:
path_list (list[str]): list of abs paths
anatomy_data (dict): formatting data
env (Optional[dict]): Environment variables.
Returns:
dict: config data
"""
formatting_data = deepcopy(anatomy_data)
environment_vars = env or dict(**os.environ)
# format the path for potential env vars
formatting_data.update(dict(**os.environ))
formatting_data.update(environment_vars)
# first try host config paths
for path_ in path_list:

View file

@ -537,44 +537,24 @@ def filter_pyblish_plugins(plugins):
plugins.remove(plugin)
def find_close_plugin(close_plugin_name, log):
if close_plugin_name:
plugins = pyblish.api.discover()
for plugin in plugins:
if plugin.__name__ == close_plugin_name:
return plugin
log.debug("Close plugin not found, app might not close.")
def remote_publish(log, close_plugin_name=None, raise_error=False):
def remote_publish(log):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (Logger)
close_plugin_name (str): name of plugin with responsibility to
close host app
"""
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
close_plugin = find_close_plugin(close_plugin_name, log)
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
for result in pyblish.util.publish_iter():
for record in result["records"]:
log.info("{}: {}".format(
result["plugin"].label, record.msg))
if not result["error"]:
continue
if result["error"]:
error_message = error_format.format(**result)
log.error(error_message)
if close_plugin: # close host app explicitly after error
context = pyblish.api.Context()
close_plugin().process(context)
if raise_error:
# Fatal Error is because of Deadline
error_message = "Fatal Error: " + error_format.format(**result)
raise RuntimeError(error_message)
error_message = error_format.format(**result)
log.error(error_message)
# 'Fatal Error: ' is because of Deadline
raise RuntimeError("Fatal Error: {}".format(error_message))
def get_errored_instances_from_context(context, plugin=None):

View file

@ -94,6 +94,9 @@ def get_asset_template_data(asset_doc, project_name):
return {
"asset": asset_doc["name"],
"folder": {
"name": asset_doc["name"]
},
"hierarchy": hierarchy,
"parent": parent_name
}

View file

@ -85,8 +85,11 @@ class ExtractReviewSlate(publish.Extractor):
input_width,
input_height,
input_timecode,
input_frame_rate
input_frame_rate,
input_pixel_aspect
) = self._get_video_metadata(streams)
if input_pixel_aspect:
pixel_aspect = input_pixel_aspect
# Raise exception of any stream didn't define input resolution
if input_width is None:
@ -419,6 +422,7 @@ class ExtractReviewSlate(publish.Extractor):
input_width = None
input_height = None
input_frame_rate = None
input_pixel_aspect = None
for stream in streams:
if stream.get("codec_type") != "video":
continue
@ -436,6 +440,16 @@ class ExtractReviewSlate(publish.Extractor):
input_width = width
input_height = height
input_pixel_aspect = stream.get("sample_aspect_ratio")
if input_pixel_aspect is not None:
try:
input_pixel_aspect = float(
eval(str(input_pixel_aspect).replace(':', '/')))
except Exception:
self.log.debug(
"__Converting pixel aspect to float failed: {}".format(
input_pixel_aspect))
tags = stream.get("tags") or {}
input_timecode = tags.get("timecode") or ""
@ -446,7 +460,8 @@ class ExtractReviewSlate(publish.Extractor):
input_width,
input_height,
input_timecode,
input_frame_rate
input_frame_rate,
input_pixel_aspect
)
def _get_audio_metadata(self, streams):

View file

@ -2,9 +2,10 @@ import os
import logging
import sys
import copy
import datetime
import clique
import six
from bson.objectid import ObjectId
import pyblish.api
@ -320,10 +321,16 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Get the accessible sites for Site Sync
modules_by_name = instance.context.data["openPypeModules"]
sync_server_module = modules_by_name["sync_server"]
sites = sync_server_module.compute_resource_sync_sites(
project_name=instance.data["projectEntity"]["name"]
)
sync_server_module = modules_by_name.get("sync_server")
if sync_server_module is None:
sites = [{
"name": "studio",
"created_dt": datetime.datetime.now()
}]
else:
sites = sync_server_module.compute_resource_sync_sites(
project_name=instance.data["projectEntity"]["name"]
)
self.log.debug("Sync Server Sites: {}".format(sites))
# Compute the resource file infos once (files belonging to the

View file

@ -165,74 +165,6 @@ class PypeCommands:
log.info("Publish finished.")
@staticmethod
def remotepublishfromapp(project_name, batch_path, host_name,
user_email, targets=None):
"""Opens installed variant of 'host' and run remote publish there.
Eventually should be yanked out to Webpublisher cli.
Currently implemented and tested for Photoshop where customer
wants to process uploaded .psd file and publish collected layers
from there. Triggered by Webpublisher.
Checks if no other batches are running (status =='in_progress). If
so, it sleeps for SLEEP (this is separate process),
waits for WAIT_FOR seconds altogether.
Requires installed host application on the machine.
Runs publish process as user would, in automatic fashion.
Args:
project_name (str): project to publish (only single context is
expected per call of remotepublish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
host_name (str): 'photoshop'
user_email (string): email address for webpublisher - used to
find Ftrack user with same email
targets (list): Pyblish targets
(to choose validator for example)
"""
from openpype.hosts.webpublisher.publish_functions import (
cli_publish_from_app
)
cli_publish_from_app(
project_name, batch_path, host_name, user_email, targets
)
@staticmethod
def remotepublish(project, batch_path, user_email, targets=None):
"""Start headless publishing.
Used to publish rendered assets, workfiles etc via Webpublisher.
Eventually should be yanked out to Webpublisher cli.
Publish use json from passed paths argument.
Args:
project (str): project to publish (only single context is expected
per call of remotepublish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
user_email (string): email address for webpublisher - used to
find Ftrack user with same email
targets (list): Pyblish targets
(to choose validator for example)
Raises:
RuntimeError: When there is no path to process.
"""
from openpype.hosts.webpublisher.publish_functions import (
cli_publish
)
cli_publish(project, batch_path, user_email, targets)
@staticmethod
def extractenvironments(output_json_path, project, asset, task, app,
env_group):
@ -336,34 +268,6 @@ class PypeCommands:
import pytest
pytest.main(args)
def syncserver(self, active_site):
"""Start running sync_server in background.
This functionality is available in directly in module cli commands.
`~/openpype_console module sync_server syncservice`
"""
os.environ["OPENPYPE_LOCAL_ID"] = active_site
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
sync_server_module.server_exit()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
from openpype.modules import ModulesManager
manager = ModulesManager()
sync_server_module = manager.modules_by_name["sync_server"]
sync_server_module.server_init()
sync_server_module.server_start()
while True:
time.sleep(1.0)
def repack_version(self, directory):
"""Repacking OpenPype version."""
from openpype.tools.repack_version import VersionRepacker

View file

@ -9,4 +9,4 @@ except ImportError as exc:
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
remote_publish(log, raise_error=True)
remote_publish(log)

View file

@ -599,7 +599,6 @@ def _convert_maya_project_settings(ayon_settings, output):
reference_loader = ayon_maya_load["reference_loader"]
reference_loader["namespace"] = (
reference_loader["namespace"]
.replace("{folder[name]}", "{asset_name}")
.replace("{product[name]}", "{subset}")
)
@ -645,6 +644,9 @@ def _convert_nuke_knobs(knobs):
elif knob_type == "vector_3d":
value = [value["x"], value["y"], value["z"]]
elif knob_type == "box":
value = [value["x"], value["y"], value["r"], value["t"]]
new_knob[value_key] = value
return new_knobs

View file

@ -114,9 +114,10 @@ class LibraryLoaderWindow(QtWidgets.QDialog):
manager = ModulesManager()
sync_server = manager.modules_by_name.get("sync_server")
sync_server_enabled = False
if sync_server is not None:
sync_server_enabled = sync_server.enabled
sync_server_enabled = (
sync_server is not None
and sync_server.enabled
)
repres_widget = None
if sync_server_enabled:

View file

@ -64,6 +64,7 @@ class BaseRepresentationModel(object):
"""Sets/Resets sync server vars after every change (refresh.)"""
repre_icons = {}
sync_server = None
sync_server_enabled = False
active_site = active_provider = None
remote_site = remote_provider = None
@ -75,6 +76,7 @@ class BaseRepresentationModel(object):
if not project_name:
self.repre_icons = repre_icons
self.sync_server = sync_server
self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@ -100,8 +102,13 @@ class BaseRepresentationModel(object):
self._modules_manager = ModulesManager()
self._last_manager_cache = now_time
sync_server = self._modules_manager.modules_by_name["sync_server"]
if sync_server.is_project_enabled(project_name, single=True):
sync_server = self._modules_manager.modules_by_name.get("sync_server")
if (
sync_server is not None
and sync_server.enabled
and sync_server.is_project_enabled(project_name, single=True)
):
sync_server_enabled = True
active_site = sync_server.get_active_site(project_name)
active_provider = sync_server.get_provider_for_site(
project_name, active_site)
@ -118,6 +125,7 @@ class BaseRepresentationModel(object):
self.repre_icons = repre_icons
self.sync_server = sync_server
self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@ -213,6 +221,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
self.repre_icons = {}
self.sync_server = None
self.sync_server_enabled = False
self.active_site = self.active_provider = None
self.columns_index = dict(
@ -282,7 +291,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
)
# update availability on active site when version changes
if self.sync_server.enabled and version_doc:
if self.sync_server_enabled and version_doc:
repres_info = list(
self.sync_server.get_repre_info_for_versions(
project_name,
@ -507,7 +516,7 @@ class SubsetsModel(BaseRepresentationModel, TreeModel):
return
repre_info_by_version_id = {}
if self.sync_server.enabled:
if self.sync_server_enabled:
versions_by_id = {}
for _subset_id, doc in last_versions_by_subset_id.items():
versions_by_id[doc["_id"]] = doc
@ -1033,12 +1042,16 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
self._version_ids = []
manager = ModulesManager()
sync_server = active_site = remote_site = None
active_site = remote_site = None
active_provider = remote_provider = None
sync_server = manager.modules_by_name.get("sync_server")
sync_server_enabled = (
sync_server is not None
and sync_server.enabled
)
project_name = dbcon.current_project()
if project_name:
sync_server = manager.modules_by_name["sync_server"]
if sync_server_enabled and project_name:
active_site = sync_server.get_active_site(project_name)
remote_site = sync_server.get_remote_site(project_name)
@ -1057,6 +1070,7 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
remote_provider = 'studio'
self.sync_server = sync_server
self.sync_server_enabled = sync_server_enabled
self.active_site = active_site
self.active_provider = active_provider
self.remote_site = remote_site
@ -1174,9 +1188,15 @@ class RepresentationModel(TreeModel, BaseRepresentationModel):
repre_groups_items[doc["name"]] = 0
group = group_item
progress = self.sync_server.get_progress_for_repre(
doc,
self.active_site, self.remote_site)
progress = {
self.active_site: 0,
self.remote_site: 0,
}
if self.sync_server_enabled:
progress = self.sync_server.get_progress_for_repre(
doc,
self.active_site,
self.remote_site)
active_site_icon = self._icons.get(self.active_provider)
remote_site_icon = self._icons.get(self.remote_provider)

View file

@ -1,9 +1,3 @@
import os
from openpype_modules import sync_server
from qtpy import QtGui
def walk_hierarchy(node):
"""Recursively yield group node."""
for child in node.children():
@ -12,19 +6,3 @@ def walk_hierarchy(node):
for _child in walk_hierarchy(child):
yield _child
def get_site_icons():
resource_path = os.path.join(
os.path.dirname(sync_server.sync_server_module.__file__),
"providers",
"resources"
)
icons = {}
# TODO get from sync module
for provider in ["studio", "local_drive", "gdrive"]:
pix_url = "{}/{}.png".format(resource_path, provider)
icons[provider] = QtGui.QIcon(pix_url)
return icons

View file

@ -24,10 +24,7 @@ from openpype.style import get_default_entity_icon_color
from openpype.tools.utils.models import TreeModel, Item
from openpype.modules import ModulesManager
from .lib import (
get_site_icons,
walk_hierarchy,
)
from .lib import walk_hierarchy
class InventoryModel(TreeModel):
@ -53,8 +50,10 @@ class InventoryModel(TreeModel):
self._default_icon_color = get_default_entity_icon_color()
manager = ModulesManager()
sync_server = manager.modules_by_name["sync_server"]
self.sync_enabled = sync_server.enabled
sync_server = manager.modules_by_name.get("sync_server")
self.sync_enabled = (
sync_server is not None and sync_server.enabled
)
self._site_icons = {}
self.active_site = self.remote_site = None
self.active_provider = self.remote_provider = None
@ -84,7 +83,10 @@ class InventoryModel(TreeModel):
self.active_provider = active_provider
self.remote_site = remote_site
self.remote_provider = remote_provider
self._site_icons = get_site_icons()
self._site_icons = {
provider: QtGui.QIcon(icon_path)
for provider, icon_path in self.get_site_icons().items()
}
if "active_site" not in self.Columns:
self.Columns.append("active_site")
if "remote_site" not in self.Columns:

View file

@ -54,8 +54,11 @@ class SceneInventoryView(QtWidgets.QTreeView):
self._selected = None
manager = ModulesManager()
self.sync_server = manager.modules_by_name["sync_server"]
self.sync_enabled = self.sync_server.enabled
sync_server = manager.modules_by_name.get("sync_server")
sync_enabled = sync_server is not None and self.sync_server.enabled
self.sync_server = sync_server
self.sync_enabled = sync_enabled
def _set_hierarchy_view(self, enabled):
if enabled == self._hierarchy_view:

View file

@ -267,19 +267,20 @@ class SitesWidget(QtWidgets.QWidget):
self.input_objects = {}
def _get_sites_inputs(self):
sync_server_module = (
self.modules_manager.modules_by_name["sync_server"]
)
output = []
if self._project_name is None:
return output
sync_server_module = self.modules_manager.modules_by_name.get(
"sync_server")
if sync_server_module is None or not sync_server_module.enabled:
return output
site_configs = sync_server_module.get_all_site_configs(
self._project_name, local_editable_only=True)
roots_entity = (
self.project_settings[PROJECT_ANATOMY_KEY][LOCAL_ROOTS_KEY]
)
site_names = [self.active_site_widget.current_text(),
self.remote_site_widget.current_text()]
output = []
for site_name in site_names:
if not site_name:
continue
@ -350,9 +351,6 @@ class SitesWidget(QtWidgets.QWidget):
def refresh(self):
self._clear_widgets()
if self._project_name is None:
return
# Site label
for site_name, site_inputs in self._get_sites_inputs():
site_widget = QtWidgets.QWidget(self.content_widget)

View file

@ -760,20 +760,23 @@ def create_qthread(func, *args, **kwargs):
def get_repre_icons():
"""Returns a dict {'provider_name': QIcon}"""
icons = {}
try:
from openpype_modules import sync_server
except Exception:
# Backwards compatibility
from openpype.modules import sync_server
try:
from openpype.modules import sync_server
except Exception:
return icons
resource_path = os.path.join(
os.path.dirname(sync_server.sync_server_module.__file__),
"providers", "resources"
)
icons = {}
if not os.path.exists(resource_path):
print("No icons for Site Sync found")
return {}
return icons
for file_name in os.listdir(resource_path):
if file_name and not file_name.endswith("png"):

View file

@ -203,8 +203,7 @@ def create_openpype_package(
ignored_modules = [
"ftrack",
"shotgrid",
# Sync server is still expected at multiple places
# "sync_server",
"sync_server",
"example_addons",
"slack"
]

View file

@ -39,6 +39,15 @@ class Vector3d(BaseSettingsModel):
z: float = Field(1.0, title="Z")
class Box(BaseSettingsModel):
_layout = "compact"
x: float = Field(1.0, title="X")
y: float = Field(1.0, title="Y")
r: float = Field(1.0, title="R")
t: float = Field(1.0, title="T")
def formatable_knob_type_enum():
return [
{"value": "text", "label": "Text"},
@ -74,6 +83,7 @@ knob_types_enum = [
{"value": "vector_2d", "label": "2D vector"},
{"value": "vector_3d", "label": "3D vector"},
{"value": "color", "label": "Color"},
{"value": "box", "label": "Box"},
{"value": "expression", "label": "Expression"}
]
@ -118,6 +128,10 @@ class KnobModel(BaseSettingsModel):
(0.0, 0.0, 1.0, 1.0),
title="RGBA Float"
)
box: Box = Field(
default_factory=Box,
title="Value"
)
formatable: Formatable = Field(
default_factory=Formatable,
title="Formatable"

View file

@ -1 +1 @@
__version__ = "0.1.1"
__version__ = "0.1.2"

View file

@ -113,7 +113,8 @@ This is useful to fix some specific renderer glitches and advanced hacking of Ma
#### Namespace and Group Name
Here you can create your own custom naming for the reference loader.
The custom naming is split into two parts: namespace and group name. If you don't set the namespace or the group name, an error will occur.
The custom naming is split into two parts: namespace and group name. If you don't set the namespace, an error will occur.
Group name could be set empty, that way no wrapping group will be created for loaded item.
Here's the different variables you can use:
<div class="row markdown">