mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
00f3ffebaa
69 changed files with 4707 additions and 2780 deletions
|
|
@ -57,6 +57,17 @@ def tray(debug=False):
|
|||
PypeCommands().launch_tray(debug)
|
||||
|
||||
|
||||
@PypeCommands.add_modules
|
||||
@main.group(help="Run command line arguments of OpenPype modules")
|
||||
@click.pass_context
|
||||
def module(ctx):
|
||||
"""Module specific commands created dynamically.
|
||||
|
||||
These commands are generated dynamically by currently loaded addon/modules.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
|
||||
|
|
@ -147,7 +158,9 @@ def extractenvironments(output_json_path, project, asset, task, app):
|
|||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-t", "--targets", help="Targets module", default=None,
|
||||
multiple=True)
|
||||
def publish(debug, paths, targets):
|
||||
@click.option("-g", "--gui", is_flag=True,
|
||||
help="Show Publish UI", default=False)
|
||||
def publish(debug, paths, targets, gui):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
|
|
@ -155,7 +168,7 @@ def publish(debug, paths, targets):
|
|||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = '3'
|
||||
PypeCommands.publish(list(paths), targets)
|
||||
PypeCommands.publish(list(paths), targets, gui)
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
@ -166,7 +179,7 @@ def publish(debug, paths, targets):
|
|||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def remotepublishfromapp(debug, project, path, host, targets=None, user=None):
|
||||
def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
|
|
@ -174,18 +187,19 @@ def remotepublishfromapp(debug, project, path, host, targets=None, user=None):
|
|||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = '3'
|
||||
PypeCommands.remotepublishfromapp(project, path, host, user,
|
||||
targets=targets)
|
||||
PypeCommands.remotepublishfromapp(
|
||||
project, path, host, user, targets=targets
|
||||
)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-h", "--host", help="Host")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def remotepublish(debug, project, path, host, targets=None, user=None):
|
||||
def remotepublish(debug, project, path, user=None, targets=None):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
|
|
@ -193,7 +207,7 @@ def remotepublish(debug, project, path, host, targets=None, user=None):
|
|||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = '3'
|
||||
PypeCommands.remotepublish(project, path, host, user, targets=targets)
|
||||
PypeCommands.remotepublish(project, path, user, targets=targets)
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
@ -345,3 +359,28 @@ def run(script):
|
|||
def runtests(folder, mark, pyargs):
|
||||
"""Run all automatic tests after proper initialization via start.py"""
|
||||
PypeCommands().run_tests(folder, mark, pyargs)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug",
|
||||
is_flag=True, help=("Run process in debug mode"))
|
||||
@click.option("-a", "--active_site", required=True,
|
||||
help="Name of active stie")
|
||||
def syncserver(debug, active_site):
|
||||
"""Run sync site server in background.
|
||||
|
||||
Some Site Sync use cases need to expose site to another one.
|
||||
For example if majority of artists work in studio, they are not using
|
||||
SS at all, but if you want to expose published assets to 'studio' site
|
||||
to SFTP for only a couple of artists, some background process must
|
||||
mark published assets to live on multiple sites (they might be
|
||||
physically in same location - mounted shared disk).
|
||||
|
||||
Process mimics OP Tray with specific 'active_site' name, all
|
||||
configuration for this "dummy" user comes from Setting or Local
|
||||
Settings (configured by starting OP Tray with env
|
||||
var OPENPYPE_LOCAL_ID set to 'active_site'.
|
||||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = '3'
|
||||
PypeCommands().syncserver(active_site)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from pyblish import api as pyblish
|
|||
from openpype.lib import any_outdated
|
||||
import openpype.hosts.maya
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
from openpype.lib.path_tools import HostDirmap
|
||||
from . import menu, lib
|
||||
|
||||
log = logging.getLogger("openpype.hosts.maya")
|
||||
|
|
@ -30,7 +31,8 @@ def install():
|
|||
|
||||
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
# process path mapping
|
||||
process_dirmap(project_settings)
|
||||
dirmap_processor = MayaDirmap("maya", project_settings)
|
||||
dirmap_processor.process_dirmap()
|
||||
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
|
|
@ -60,110 +62,6 @@ def install():
|
|||
avalon.data["familiesStateToggled"] = ["imagesequence"]
|
||||
|
||||
|
||||
def process_dirmap(project_settings):
|
||||
# type: (dict) -> None
|
||||
"""Go through all paths in Settings and set them using `dirmap`.
|
||||
|
||||
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||
Local Settings when artist is syncing workfile locally.
|
||||
|
||||
Args:
|
||||
project_settings (dict): Settings for current project.
|
||||
|
||||
"""
|
||||
local_mapping = _get_local_sync_dirmap(project_settings)
|
||||
if not project_settings["maya"].get("maya-dirmap") and not local_mapping:
|
||||
return
|
||||
|
||||
mapping = local_mapping or \
|
||||
project_settings["maya"]["maya-dirmap"]["paths"] \
|
||||
or {}
|
||||
mapping_enabled = project_settings["maya"]["maya-dirmap"]["enabled"] \
|
||||
or bool(local_mapping)
|
||||
|
||||
if not mapping or not mapping_enabled:
|
||||
return
|
||||
if mapping.get("source-path") and mapping_enabled is True:
|
||||
log.info("Processing directory mapping ...")
|
||||
cmds.dirmap(en=True)
|
||||
for k, sp in enumerate(mapping["source-path"]):
|
||||
try:
|
||||
print("{} -> {}".format(sp, mapping["destination-path"][k]))
|
||||
cmds.dirmap(m=(sp, mapping["destination-path"][k]))
|
||||
cmds.dirmap(m=(mapping["destination-path"][k], sp))
|
||||
except IndexError:
|
||||
# missing corresponding destination path
|
||||
log.error(("invalid dirmap mapping, missing corresponding"
|
||||
" destination directory."))
|
||||
break
|
||||
except RuntimeError:
|
||||
log.error("invalid path {} -> {}, mapping not registered".format(
|
||||
sp, mapping["destination-path"][k]
|
||||
))
|
||||
continue
|
||||
|
||||
|
||||
def _get_local_sync_dirmap(project_settings):
|
||||
"""
|
||||
Returns dirmap if synch to local project is enabled.
|
||||
|
||||
Only valid mapping is from roots of remote site to local site set in
|
||||
Local Settings.
|
||||
|
||||
Args:
|
||||
project_settings (dict)
|
||||
Returns:
|
||||
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||
"""
|
||||
import json
|
||||
mapping = {}
|
||||
|
||||
if not project_settings["global"]["sync_server"]["enabled"]:
|
||||
log.debug("Site Sync not enabled")
|
||||
return mapping
|
||||
|
||||
from openpype.settings.lib import get_site_local_overrides
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
manager = ModulesManager()
|
||||
sync_module = manager.modules_by_name["sync_server"]
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
sync_settings = sync_module.get_sync_project_setting(
|
||||
os.getenv("AVALON_PROJECT"), exclude_locals=False, cached=False)
|
||||
log.debug(json.dumps(sync_settings, indent=4))
|
||||
|
||||
active_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_active_site(project_name))
|
||||
remote_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_remote_site(project_name))
|
||||
log.debug("active {} - remote {}".format(active_site, remote_site))
|
||||
|
||||
if active_site == "local" \
|
||||
and project_name in sync_module.get_enabled_projects()\
|
||||
and active_site != remote_site:
|
||||
overrides = get_site_local_overrides(os.getenv("AVALON_PROJECT"),
|
||||
active_site)
|
||||
for root_name, value in overrides.items():
|
||||
if os.path.isdir(value):
|
||||
try:
|
||||
mapping["destination-path"] = [value]
|
||||
mapping["source-path"] = [sync_settings["sites"]\
|
||||
[remote_site]\
|
||||
["root"]\
|
||||
[root_name]]
|
||||
except IndexError:
|
||||
# missing corresponding destination path
|
||||
log.debug("overrides".format(overrides))
|
||||
log.error(
|
||||
("invalid dirmap mapping, missing corresponding"
|
||||
" destination directory."))
|
||||
break
|
||||
|
||||
log.debug("local sync mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
|
|
@ -326,3 +224,12 @@ def before_workfile_save(workfile_path):
|
|||
|
||||
workdir = os.path.dirname(workfile_path)
|
||||
copy_workspace_mel(workdir)
|
||||
|
||||
|
||||
class MayaDirmap(HostDirmap):
|
||||
def on_enable_dirmap(self):
|
||||
cmds.dirmap(en=True)
|
||||
|
||||
def dirmap_routine(self, source_path, destination_path):
|
||||
cmds.dirmap(m=(source_path, destination_path))
|
||||
cmds.dirmap(m=(destination_path, source_path))
|
||||
|
|
|
|||
|
|
@ -45,9 +45,12 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
# get cameras
|
||||
camera = instance.data['review_camera']
|
||||
|
||||
override_viewport_options = (
|
||||
self.capture_preset['Viewport Options']
|
||||
['override_viewport_options']
|
||||
)
|
||||
preset = lib.load_capture_preset(data=self.capture_preset)
|
||||
|
||||
|
||||
preset['camera'] = camera
|
||||
preset['start_frame'] = start
|
||||
preset['end_frame'] = end
|
||||
|
|
@ -92,6 +95,12 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
|
||||
self.log.info('using viewport preset: {}'.format(preset))
|
||||
|
||||
# Update preset with current panel setting
|
||||
# if override_viewport_options is turned off
|
||||
if not override_viewport_options:
|
||||
panel_preset = capture.parse_active_view()
|
||||
preset.update(panel_preset)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
|
||||
self.log.debug("playblast path {}".format(path))
|
||||
|
|
|
|||
|
|
@ -32,6 +32,9 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
capture_preset = (
|
||||
instance.context.data["project_settings"]['maya']['publish']['ExtractPlayblast']['capture_preset']
|
||||
)
|
||||
override_viewport_options = (
|
||||
capture_preset['Viewport Options']['override_viewport_options']
|
||||
)
|
||||
|
||||
try:
|
||||
preset = lib.load_capture_preset(data=capture_preset)
|
||||
|
|
@ -86,6 +89,12 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
# playblast and viewer
|
||||
preset['viewer'] = False
|
||||
|
||||
# Update preset with current panel setting
|
||||
# if override_viewport_options is turned off
|
||||
if not override_viewport_options:
|
||||
panel_preset = capture.parse_active_view()
|
||||
preset.update(panel_preset)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,6 +24,10 @@ from openpype.api import (
|
|||
ApplicationManager
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.lib.path_tools import HostDirmap
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
import nuke
|
||||
|
||||
from .utils import set_context_favorites
|
||||
|
|
@ -1795,3 +1799,69 @@ def recreate_instance(origin_node, avalon_data=None):
|
|||
dn.setInput(0, new_node)
|
||||
|
||||
return new_node
|
||||
|
||||
|
||||
class NukeDirmap(HostDirmap):
|
||||
def __init__(self, host_name, project_settings, sync_module, file_name):
|
||||
"""
|
||||
Args:
|
||||
host_name (str): Nuke
|
||||
project_settings (dict): settings of current project
|
||||
sync_module (SyncServerModule): to limit reinitialization
|
||||
file_name (str): full path of referenced file from workfiles
|
||||
"""
|
||||
self.host_name = host_name
|
||||
self.project_settings = project_settings
|
||||
self.file_name = file_name
|
||||
self.sync_module = sync_module
|
||||
|
||||
self._mapping = None # cache mapping
|
||||
|
||||
def on_enable_dirmap(self):
|
||||
pass
|
||||
|
||||
def dirmap_routine(self, source_path, destination_path):
|
||||
log.debug("{}: {}->{}".format(self.file_name,
|
||||
source_path, destination_path))
|
||||
source_path = source_path.lower().replace(os.sep, '/')
|
||||
destination_path = destination_path.lower().replace(os.sep, '/')
|
||||
if platform.system().lower() == "windows":
|
||||
self.file_name = self.file_name.lower().replace(
|
||||
source_path, destination_path)
|
||||
else:
|
||||
self.file_name = self.file_name.replace(
|
||||
source_path, destination_path)
|
||||
|
||||
|
||||
class DirmapCache:
|
||||
"""Caching class to get settings and sync_module easily and only once."""
|
||||
_project_settings = None
|
||||
_sync_module = None
|
||||
|
||||
@classmethod
|
||||
def project_settings(cls):
|
||||
if cls._project_settings is None:
|
||||
cls._project_settings = get_project_settings(
|
||||
os.getenv("AVALON_PROJECT"))
|
||||
return cls._project_settings
|
||||
|
||||
@classmethod
|
||||
def sync_module(cls):
|
||||
if cls._sync_module is None:
|
||||
cls._sync_module = ModulesManager().modules_by_name["sync_server"]
|
||||
return cls._sync_module
|
||||
|
||||
|
||||
def dirmap_file_name_filter(file_name):
|
||||
"""Nuke callback function with single full path argument.
|
||||
|
||||
Checks project settings for potential mapping from source to dest.
|
||||
"""
|
||||
dirmap_processor = NukeDirmap("nuke",
|
||||
DirmapCache.project_settings(),
|
||||
DirmapCache.sync_module(),
|
||||
file_name)
|
||||
dirmap_processor.process_dirmap()
|
||||
if os.path.exists(dirmap_processor.file_name):
|
||||
return dirmap_processor.file_name
|
||||
return file_name
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ from openpype.hosts.nuke.api.lib import (
|
|||
|
||||
import nuke
|
||||
from openpype.api import Logger
|
||||
from openpype.hosts.nuke.api.lib import dirmap_file_name_filter
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
# fix ffmpeg settings on script
|
||||
nuke.addOnScriptLoad(on_script_load)
|
||||
|
||||
|
|
@ -20,4 +20,6 @@ nuke.addOnScriptSave(check_inventory_versions)
|
|||
# # set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
nuke.addFilenameFilter(dirmap_file_name_filter)
|
||||
|
||||
log.info('Automatic syncing of write file knob to script version')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,84 @@
|
|||
"""Loads batch context from json and continues in publish process.
|
||||
|
||||
Provides:
|
||||
context -> Loaded batch file.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
from openpype.lib.plugin_tools import (
|
||||
parse_json,
|
||||
get_batch_asset_task_info
|
||||
)
|
||||
from openpype.lib.remote_publish import get_webpublish_conn
|
||||
|
||||
|
||||
class CollectBatchData(pyblish.api.ContextPlugin):
|
||||
"""Collect batch data from json stored in 'OPENPYPE_PUBLISH_DATA' env dir.
|
||||
|
||||
The directory must contain 'manifest.json' file where batch data should be
|
||||
stored.
|
||||
"""
|
||||
# must be really early, context values are only in json file
|
||||
order = pyblish.api.CollectorOrder - 0.495
|
||||
label = "Collect batch data"
|
||||
host = ["webpublisher"]
|
||||
|
||||
def process(self, context):
|
||||
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
|
||||
|
||||
assert batch_dir, (
|
||||
"Missing `OPENPYPE_PUBLISH_DATA`")
|
||||
|
||||
assert os.path.exists(batch_dir), \
|
||||
"Folder {} doesn't exist".format(batch_dir)
|
||||
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AVALON_PROJECT` was not found."
|
||||
"Could not set project `root` which may cause issues."
|
||||
)
|
||||
|
||||
batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
|
||||
|
||||
context.data["batchDir"] = batch_dir
|
||||
context.data["batchData"] = batch_data
|
||||
|
||||
asset_name, task_name, task_type = get_batch_asset_task_info(
|
||||
batch_data["context"]
|
||||
)
|
||||
|
||||
os.environ["AVALON_ASSET"] = asset_name
|
||||
io.Session["AVALON_ASSET"] = asset_name
|
||||
os.environ["AVALON_TASK"] = task_name
|
||||
io.Session["AVALON_TASK"] = task_name
|
||||
|
||||
context.data["asset"] = asset_name
|
||||
context.data["task"] = task_name
|
||||
context.data["taskType"] = task_type
|
||||
|
||||
self._set_ctx_path(batch_data)
|
||||
|
||||
def _set_ctx_path(self, batch_data):
|
||||
dbcon = get_webpublish_conn()
|
||||
|
||||
batch_id = batch_data["batch"]
|
||||
ctx_path = batch_data["context"]["path"]
|
||||
self.log.info("ctx_path: {}".format(ctx_path))
|
||||
self.log.info("batch_id: {}".format(batch_id))
|
||||
if ctx_path and batch_id:
|
||||
self.log.info("Updating log record")
|
||||
dbcon.update_one(
|
||||
{
|
||||
"batch_id": batch_id,
|
||||
"status": "in_progress"
|
||||
},
|
||||
{
|
||||
"$set": {
|
||||
"path": ctx_path
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
@ -20,9 +20,8 @@ class CollectFPS(pyblish.api.InstancePlugin):
|
|||
hosts = ["webpublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
fps = instance.context.data["fps"]
|
||||
instance_fps = instance.data.get("fps")
|
||||
if instance_fps is None:
|
||||
instance.data["fps"] = instance.context.data["fps"]
|
||||
|
||||
instance.data.update({
|
||||
"fps": fps
|
||||
})
|
||||
self.log.debug(f"instance.data: {pformat(instance.data)}")
|
||||
|
|
|
|||
|
|
@ -1,21 +1,19 @@
|
|||
"""Loads publishing context from json and continues in publish process.
|
||||
"""Create instances from batch data and continues in publish process.
|
||||
|
||||
Requires:
|
||||
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
|
||||
CollectBatchData
|
||||
|
||||
Provides:
|
||||
context, instances -> All data from previous publishing process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import clique
|
||||
import tempfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
import pyblish.api
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
|
||||
|
||||
class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
||||
|
|
@ -28,28 +26,28 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.490
|
||||
label = "Collect rendered frames"
|
||||
host = ["webpublisher"]
|
||||
|
||||
_context = None
|
||||
targets = ["filespublish"]
|
||||
|
||||
# from Settings
|
||||
task_type_to_family = {}
|
||||
|
||||
def _process_batch(self, dir_url):
|
||||
task_subfolders = [
|
||||
os.path.join(dir_url, o)
|
||||
for o in os.listdir(dir_url)
|
||||
if os.path.isdir(os.path.join(dir_url, o))]
|
||||
def process(self, context):
|
||||
batch_dir = context.data["batchDir"]
|
||||
task_subfolders = []
|
||||
for folder_name in os.listdir(batch_dir):
|
||||
full_path = os.path.join(batch_dir, folder_name)
|
||||
if os.path.isdir(full_path):
|
||||
task_subfolders.append(full_path)
|
||||
|
||||
self.log.info("task_sub:: {}".format(task_subfolders))
|
||||
|
||||
asset_name = context.data["asset"]
|
||||
task_name = context.data["task"]
|
||||
task_type = context.data["taskType"]
|
||||
for task_dir in task_subfolders:
|
||||
task_data = parse_json(os.path.join(task_dir,
|
||||
"manifest.json"))
|
||||
self.log.info("task_data:: {}".format(task_data))
|
||||
ctx = task_data["context"]
|
||||
|
||||
asset, task_name, task_type = get_batch_asset_task_info(ctx)
|
||||
|
||||
if task_name:
|
||||
os.environ["AVALON_TASK"] = task_name
|
||||
|
||||
is_sequence = len(task_data["files"]) > 1
|
||||
|
||||
|
|
@ -60,26 +58,20 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
is_sequence,
|
||||
extension.replace(".", ''))
|
||||
|
||||
subset = self._get_subset_name(family, subset_template, task_name,
|
||||
task_data["variant"])
|
||||
subset = self._get_subset_name(
|
||||
family, subset_template, task_name, task_data["variant"]
|
||||
)
|
||||
version = self._get_last_version(asset_name, subset) + 1
|
||||
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
io.Session["AVALON_ASSET"] = asset
|
||||
|
||||
instance = self._context.create_instance(subset)
|
||||
instance.data["asset"] = asset
|
||||
instance = context.create_instance(subset)
|
||||
instance.data["asset"] = asset_name
|
||||
instance.data["subset"] = subset
|
||||
instance.data["family"] = family
|
||||
instance.data["families"] = families
|
||||
instance.data["version"] = \
|
||||
self._get_last_version(asset, subset) + 1
|
||||
instance.data["version"] = version
|
||||
instance.data["stagingDir"] = tempfile.mkdtemp()
|
||||
instance.data["source"] = "webpublisher"
|
||||
|
||||
# to store logging info into DB openpype.webpublishes
|
||||
instance.data["ctx_path"] = ctx["path"]
|
||||
instance.data["batch_id"] = task_data["batch"]
|
||||
|
||||
# to convert from email provided into Ftrack username
|
||||
instance.data["user_email"] = task_data["user"]
|
||||
|
||||
|
|
@ -230,23 +222,3 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
return version[0].get("version") or 0
|
||||
else:
|
||||
return 0
|
||||
|
||||
def process(self, context):
|
||||
self._context = context
|
||||
|
||||
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
|
||||
|
||||
assert batch_dir, (
|
||||
"Missing `OPENPYPE_PUBLISH_DATA`")
|
||||
|
||||
assert os.path.exists(batch_dir), \
|
||||
"Folder {} doesn't exist".format(batch_dir)
|
||||
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AVALON_PROJECT` was not found."
|
||||
"Could not set project `root` which may cause issues."
|
||||
)
|
||||
|
||||
self._process_batch(batch_dir)
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import OpenPypeMongoConnection
|
||||
|
||||
|
||||
class IntegrateContextToLog(pyblish.api.ContextPlugin):
|
||||
""" Adds context information to log document for displaying in front end"""
|
||||
|
||||
label = "Integrate Context to Log"
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
hosts = ["webpublisher"]
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("Integrate Context to Log")
|
||||
|
||||
mongo_client = OpenPypeMongoConnection.get_mongo_client()
|
||||
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
|
||||
dbcon = mongo_client[database_name]["webpublishes"]
|
||||
|
||||
for instance in context:
|
||||
self.log.info("ctx_path: {}".format(instance.data.get("ctx_path")))
|
||||
self.log.info("batch_id: {}".format(instance.data.get("batch_id")))
|
||||
if instance.data.get("ctx_path") and instance.data.get("batch_id"):
|
||||
self.log.info("Updating log record")
|
||||
dbcon.update_one(
|
||||
{
|
||||
"batch_id": instance.data.get("batch_id"),
|
||||
"status": "in_progress"
|
||||
},
|
||||
{"$set":
|
||||
{
|
||||
"path": instance.data.get("ctx_path")
|
||||
|
||||
}}
|
||||
)
|
||||
|
||||
return
|
||||
|
|
@ -20,11 +20,16 @@ log = PypeLogger.get_logger("WebServer")
|
|||
|
||||
class RestApiResource:
|
||||
"""Resource carrying needed info and Avalon DB connection for publish."""
|
||||
def __init__(self, server_manager, executable, upload_dir):
|
||||
def __init__(self, server_manager, executable, upload_dir,
|
||||
studio_task_queue=None):
|
||||
self.server_manager = server_manager
|
||||
self.upload_dir = upload_dir
|
||||
self.executable = executable
|
||||
|
||||
if studio_task_queue is None:
|
||||
studio_task_queue = collections.deque().dequeu
|
||||
self.studio_task_queue = studio_task_queue
|
||||
|
||||
self.dbcon = AvalonMongoDB()
|
||||
self.dbcon.install()
|
||||
|
||||
|
|
@ -176,71 +181,104 @@ class TaskNode(Node):
|
|||
class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
|
||||
"""Triggers headless publishing of batch."""
|
||||
async def post(self, request) -> Response:
|
||||
# for postprocessing in host, currently only PS
|
||||
host_map = {"photoshop": [".psd", ".psb"]}
|
||||
# Validate existence of openpype executable
|
||||
openpype_app = self.resource.executable
|
||||
if not openpype_app or not os.path.exists(openpype_app):
|
||||
msg = "Non existent OpenPype executable {}".format(openpype_app)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
output = {}
|
||||
log.info("WebpublisherBatchPublishEndpoint called")
|
||||
content = await request.json()
|
||||
|
||||
batch_path = os.path.join(self.resource.upload_dir,
|
||||
content["batch"])
|
||||
# Each filter have extensions which are checked on first task item
|
||||
# - first filter with extensions that are on first task is used
|
||||
# - filter defines command and can extend arguments dictionary
|
||||
# This is used only if 'studio_processing' is enabled on batch
|
||||
studio_processing_filters = [
|
||||
# Photoshop filter
|
||||
{
|
||||
"extensions": [".psd", ".psb"],
|
||||
"command": "remotepublishfromapp",
|
||||
"arguments": {
|
||||
# Command 'remotepublishfromapp' requires --host argument
|
||||
"host": "photoshop",
|
||||
# Make sure targets are set to None for cases that default
|
||||
# would change
|
||||
# - targets argument is not used in 'remotepublishfromapp'
|
||||
"targets": None
|
||||
},
|
||||
# does publish need to be handled by a queue, eg. only
|
||||
# single process running concurrently?
|
||||
"add_to_queue": True
|
||||
}
|
||||
]
|
||||
|
||||
batch_path = os.path.join(self.resource.upload_dir, content["batch"])
|
||||
|
||||
# Default command and arguments
|
||||
command = "remotepublish"
|
||||
add_args = {
|
||||
"host": "webpublisher",
|
||||
# All commands need 'project' and 'user'
|
||||
"project": content["project_name"],
|
||||
"user": content["user"]
|
||||
"user": content["user"],
|
||||
|
||||
"targets": ["filespublish"]
|
||||
}
|
||||
|
||||
command = "remotepublish"
|
||||
|
||||
add_to_queue = False
|
||||
if content.get("studio_processing"):
|
||||
log.info("Post processing called")
|
||||
|
||||
batch_data = parse_json(os.path.join(batch_path, "manifest.json"))
|
||||
if not batch_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(batch_path))
|
||||
"Cannot parse batch manifest in {}".format(batch_path))
|
||||
task_dir_name = batch_data["tasks"][0]
|
||||
task_data = parse_json(os.path.join(batch_path, task_dir_name,
|
||||
"manifest.json"))
|
||||
if not task_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(task_data))
|
||||
"Cannot parse task manifest in {}".format(task_data))
|
||||
|
||||
command = "remotepublishfromapp"
|
||||
for host, extensions in host_map.items():
|
||||
for ext in extensions:
|
||||
for file_name in task_data["files"]:
|
||||
if ext in file_name:
|
||||
add_args["host"] = host
|
||||
break
|
||||
for process_filter in studio_processing_filters:
|
||||
filter_extensions = process_filter.get("extensions") or []
|
||||
for file_name in task_data["files"]:
|
||||
file_ext = os.path.splitext(file_name)[-1].lower()
|
||||
if file_ext in filter_extensions:
|
||||
# Change command
|
||||
command = process_filter["command"]
|
||||
# Update arguments
|
||||
add_args.update(
|
||||
process_filter.get("arguments") or {}
|
||||
)
|
||||
add_to_queue = process_filter["add_to_queue"]
|
||||
break
|
||||
|
||||
if not add_args.get("host"):
|
||||
raise ValueError(
|
||||
"Couldn't discern host from {}".format(task_data["files"]))
|
||||
|
||||
openpype_app = self.resource.executable
|
||||
args = [
|
||||
openpype_app,
|
||||
command,
|
||||
batch_path
|
||||
]
|
||||
|
||||
if not openpype_app or not os.path.exists(openpype_app):
|
||||
msg = "Non existent OpenPype executable {}".format(openpype_app)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
for key, value in add_args.items():
|
||||
args.append("--{}".format(key))
|
||||
args.append(value)
|
||||
# Skip key values where value is None
|
||||
if value is not None:
|
||||
args.append("--{}".format(key))
|
||||
# Extend list into arguments (targets can be a list)
|
||||
if isinstance(value, (tuple, list)):
|
||||
args.extend(value)
|
||||
else:
|
||||
args.append(value)
|
||||
|
||||
log.info("args:: {}".format(args))
|
||||
if add_to_queue:
|
||||
log.debug("Adding to queue")
|
||||
self.resource.studio_task_queue.append(args)
|
||||
else:
|
||||
subprocess.call(args)
|
||||
|
||||
subprocess.call(args)
|
||||
return Response(
|
||||
status=200,
|
||||
body=self.resource.encode(output),
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import collections
|
||||
import time
|
||||
import os
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
|
|
@ -31,10 +33,13 @@ def run_webserver(*args, **kwargs):
|
|||
port = kwargs.get("port") or 8079
|
||||
server_manager = webserver_module.create_new_server_manager(port, host)
|
||||
webserver_url = server_manager.url
|
||||
# queue for remotepublishfromapp tasks
|
||||
studio_task_queue = collections.deque()
|
||||
|
||||
resource = RestApiResource(server_manager,
|
||||
upload_dir=kwargs["upload_dir"],
|
||||
executable=kwargs["executable"])
|
||||
executable=kwargs["executable"],
|
||||
studio_task_queue=studio_task_queue)
|
||||
projects_endpoint = WebpublisherProjectsEndpoint(resource)
|
||||
server_manager.add_route(
|
||||
"GET",
|
||||
|
|
@ -88,6 +93,10 @@ def run_webserver(*args, **kwargs):
|
|||
if time.time() - last_reprocessed > 20:
|
||||
reprocess_failed(kwargs["upload_dir"], webserver_url)
|
||||
last_reprocessed = time.time()
|
||||
if studio_task_queue:
|
||||
args = studio_task_queue.popleft()
|
||||
subprocess.call(args) # blocking call
|
||||
|
||||
time.sleep(1.0)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -522,6 +522,11 @@ def get_local_site_id():
|
|||
|
||||
Identifier is created if does not exists yet.
|
||||
"""
|
||||
# override local id from environment
|
||||
# used for background syncing
|
||||
if os.environ.get("OPENPYPE_LOCAL_ID"):
|
||||
return os.environ["OPENPYPE_LOCAL_ID"]
|
||||
|
||||
registry = OpenPypeSettingsRegistry()
|
||||
try:
|
||||
return registry.get_item("localId")
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ import json
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
from .anatomy import Anatomy
|
||||
|
|
@ -196,3 +198,159 @@ def get_project_basic_paths(project_name):
|
|||
if isinstance(folder_structure, str):
|
||||
folder_structure = json.loads(folder_structure)
|
||||
return _list_path_items(folder_structure)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HostDirmap:
|
||||
"""
|
||||
Abstract class for running dirmap on a workfile in a host.
|
||||
|
||||
Dirmap is used to translate paths inside of host workfile from one
|
||||
OS to another. (Eg. arstist created workfile on Win, different artists
|
||||
opens same file on Linux.)
|
||||
|
||||
Expects methods to be implemented inside of host:
|
||||
on_dirmap_enabled: run host code for enabling dirmap
|
||||
do_dirmap: run host code to do actual remapping
|
||||
"""
|
||||
def __init__(self, host_name, project_settings, sync_module=None):
|
||||
self.host_name = host_name
|
||||
self.project_settings = project_settings
|
||||
self.sync_module = sync_module # to limit reinit of Modules
|
||||
|
||||
self._mapping = None # cache mapping
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_enable_dirmap(self):
|
||||
"""
|
||||
Run host dependent operation for enabling dirmap if necessary.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dirmap_routine(self, source_path, destination_path):
|
||||
"""
|
||||
Run host dependent remapping from source_path to destination_path
|
||||
"""
|
||||
|
||||
def process_dirmap(self):
|
||||
# type: (dict) -> None
|
||||
"""Go through all paths in Settings and set them using `dirmap`.
|
||||
|
||||
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||
Local Settings when artist is syncing workfile locally.
|
||||
|
||||
Args:
|
||||
project_settings (dict): Settings for current project.
|
||||
|
||||
"""
|
||||
if not self._mapping:
|
||||
self._mapping = self.get_mappings(self.project_settings)
|
||||
if not self._mapping:
|
||||
return
|
||||
|
||||
log.info("Processing directory mapping ...")
|
||||
self.on_enable_dirmap()
|
||||
log.info("mapping:: {}".format(self._mapping))
|
||||
|
||||
for k, sp in enumerate(self._mapping["source-path"]):
|
||||
try:
|
||||
print("{} -> {}".format(sp,
|
||||
self._mapping["destination-path"][k]))
|
||||
self.dirmap_routine(sp,
|
||||
self._mapping["destination-path"][k])
|
||||
except IndexError:
|
||||
# missing corresponding destination path
|
||||
log.error(("invalid dirmap mapping, missing corresponding"
|
||||
" destination directory."))
|
||||
break
|
||||
except RuntimeError:
|
||||
log.error("invalid path {} -> {}, mapping not registered".format( # noqa: E501
|
||||
sp, self._mapping["destination-path"][k]
|
||||
))
|
||||
continue
|
||||
|
||||
def get_mappings(self, project_settings):
|
||||
"""Get translation from source-path to destination-path.
|
||||
|
||||
It checks if Site Sync is enabled and user chose to use local
|
||||
site, in that case configuration in Local Settings takes precedence
|
||||
"""
|
||||
local_mapping = self._get_local_sync_dirmap(project_settings)
|
||||
dirmap_label = "{}-dirmap".format(self.host_name)
|
||||
if not self.project_settings[self.host_name].get(dirmap_label) and \
|
||||
not local_mapping:
|
||||
return []
|
||||
mapping = local_mapping or \
|
||||
self.project_settings[self.host_name][dirmap_label]["paths"] or {}
|
||||
enbled = self.project_settings[self.host_name][dirmap_label]["enabled"]
|
||||
mapping_enabled = enbled or bool(local_mapping)
|
||||
|
||||
if not mapping or not mapping_enabled or \
|
||||
not mapping.get("destination-path") or \
|
||||
not mapping.get("source-path"):
|
||||
return []
|
||||
return mapping
|
||||
|
||||
def _get_local_sync_dirmap(self, project_settings):
|
||||
"""
|
||||
Returns dirmap if synch to local project is enabled.
|
||||
|
||||
Only valid mapping is from roots of remote site to local site set
|
||||
in Local Settings.
|
||||
|
||||
Args:
|
||||
project_settings (dict)
|
||||
Returns:
|
||||
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||
"""
|
||||
import json
|
||||
mapping = {}
|
||||
|
||||
if not project_settings["global"]["sync_server"]["enabled"]:
|
||||
log.debug("Site Sync not enabled")
|
||||
return mapping
|
||||
|
||||
from openpype.settings.lib import get_site_local_overrides
|
||||
|
||||
if not self.sync_module:
|
||||
from openpype.modules import ModulesManager
|
||||
manager = ModulesManager()
|
||||
self.sync_module = manager.modules_by_name["sync_server"]
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
|
||||
active_site = self.sync_module.get_local_normalized_site(
|
||||
self.sync_module.get_active_site(project_name))
|
||||
remote_site = self.sync_module.get_local_normalized_site(
|
||||
self.sync_module.get_remote_site(project_name))
|
||||
log.debug("active {} - remote {}".format(active_site, remote_site))
|
||||
|
||||
if active_site == "local" \
|
||||
and project_name in self.sync_module.get_enabled_projects()\
|
||||
and active_site != remote_site:
|
||||
|
||||
sync_settings = self.sync_module.get_sync_project_setting(
|
||||
os.getenv("AVALON_PROJECT"), exclude_locals=False,
|
||||
cached=False)
|
||||
|
||||
active_overrides = get_site_local_overrides(
|
||||
os.getenv("AVALON_PROJECT"), active_site)
|
||||
remote_overrides = get_site_local_overrides(
|
||||
os.getenv("AVALON_PROJECT"), remote_site)
|
||||
|
||||
log.debug("local overrides".format(active_overrides))
|
||||
log.debug("remote overrides".format(remote_overrides))
|
||||
for root_name, active_site_dir in active_overrides.items():
|
||||
remote_site_dir = remote_overrides.get(root_name) or\
|
||||
sync_settings["sites"][remote_site]["root"][root_name]
|
||||
if os.path.isdir(active_site_dir):
|
||||
if not mapping.get("destination-path"):
|
||||
mapping["destination-path"] = []
|
||||
mapping["destination-path"].append(active_site_dir)
|
||||
|
||||
if not mapping.get("source-path"):
|
||||
mapping["source-path"] = []
|
||||
mapping["source-path"].append(remote_site_dir)
|
||||
|
||||
log.debug("local sync mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
|
|
|||
|
|
@ -22,6 +22,9 @@ def import_filepath(filepath, module_name=None):
|
|||
if module_name is None:
|
||||
module_name = os.path.splitext(os.path.basename(filepath))[0]
|
||||
|
||||
# Make sure it is not 'unicode' in Python 2
|
||||
module_name = str(module_name)
|
||||
|
||||
# Prepare module object where content of file will be parsed
|
||||
module = types.ModuleType(module_name)
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ OpenPype modules should contain separated logic of specific kind of implementati
|
|||
- `__init__` should not be overridden and `initialize` should not do time consuming part but only prepare base data about module
|
||||
- also keep in mind that they may be initialized in headless mode
|
||||
- connection with other modules is made with help of interfaces
|
||||
- `cli` method - add cli commands specific for the module
|
||||
- command line arguments are handled using `click` python module
|
||||
- `cli` method should expect single argument which is click group on which can be called any group specific methods (e.g. `add_command` to add another click group as children see `ExampleAddon`)
|
||||
- it is possible to add trigger cli commands using `./openpype_console module <module_name> <command> *args`
|
||||
|
||||
## Addon class `OpenPypeAddOn`
|
||||
- inherits from `OpenPypeModule` but is enabled by default and doesn't have to implement `initialize` and `connect_with_modules` methods
|
||||
|
|
@ -140,4 +144,4 @@ class ClockifyModule(
|
|||
|
||||
### TrayModulesManager
|
||||
- inherits from `ModulesManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayModule` methods
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayModule` methods
|
||||
|
|
|
|||
|
|
@ -107,12 +107,9 @@ class _InterfacesClass(_ModuleClass):
|
|||
if attr_name in ("__path__", "__file__"):
|
||||
return None
|
||||
|
||||
# Fake Interface if is not missing
|
||||
self.__attributes__[attr_name] = type(
|
||||
attr_name,
|
||||
(MissingInteface, ),
|
||||
{}
|
||||
)
|
||||
raise ImportError((
|
||||
"cannot import name '{}' from 'openpype_interfaces'"
|
||||
).format(attr_name))
|
||||
|
||||
return self.__attributes__[attr_name]
|
||||
|
||||
|
|
@ -212,54 +209,17 @@ def _load_interfaces():
|
|||
_InterfacesClass(modules_key)
|
||||
)
|
||||
|
||||
log = PypeLogger.get_logger("InterfacesLoader")
|
||||
from . import interfaces
|
||||
|
||||
dirpaths = get_module_dirs()
|
||||
|
||||
interface_paths = []
|
||||
interface_paths.append(
|
||||
os.path.join(get_default_modules_dir(), "interfaces.py")
|
||||
)
|
||||
for dirpath in dirpaths:
|
||||
if not os.path.exists(dirpath):
|
||||
for attr_name in dir(interfaces):
|
||||
attr = getattr(interfaces, attr_name)
|
||||
if (
|
||||
not inspect.isclass(attr)
|
||||
or attr is OpenPypeInterface
|
||||
or not issubclass(attr, OpenPypeInterface)
|
||||
):
|
||||
continue
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
if filename in ("__pycache__", ):
|
||||
continue
|
||||
|
||||
full_path = os.path.join(dirpath, filename)
|
||||
if not os.path.isdir(full_path):
|
||||
continue
|
||||
|
||||
interfaces_path = os.path.join(full_path, "interfaces.py")
|
||||
if os.path.exists(interfaces_path):
|
||||
interface_paths.append(interfaces_path)
|
||||
|
||||
for full_path in interface_paths:
|
||||
if not os.path.exists(full_path):
|
||||
continue
|
||||
|
||||
try:
|
||||
# Prepare module object where content of file will be parsed
|
||||
module = import_filepath(full_path)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to load path: \"{0}\"".format(full_path),
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
for attr_name in dir(module):
|
||||
attr = getattr(module, attr_name)
|
||||
if (
|
||||
not inspect.isclass(attr)
|
||||
or attr is OpenPypeInterface
|
||||
or not issubclass(attr, OpenPypeInterface)
|
||||
):
|
||||
continue
|
||||
setattr(openpype_interfaces, attr_name, attr)
|
||||
setattr(openpype_interfaces, attr_name, attr)
|
||||
|
||||
|
||||
def load_modules(force=False):
|
||||
|
|
@ -333,6 +293,15 @@ def _load_modules():
|
|||
# - check manifest and content of manifest
|
||||
try:
|
||||
if os.path.isdir(fullpath):
|
||||
# Module without init file can't be used as OpenPype module
|
||||
# because the module class could not be imported
|
||||
init_file = os.path.join(fullpath, "__init__.py")
|
||||
if not os.path.exists(init_file):
|
||||
log.info((
|
||||
"Skipping module directory because of"
|
||||
" missing \"__init__.py\" file. \"{}\""
|
||||
).format(fullpath))
|
||||
continue
|
||||
import_module_from_dirpath(dirpath, filename, modules_key)
|
||||
|
||||
elif ext in (".py", ):
|
||||
|
|
@ -369,14 +338,6 @@ class OpenPypeInterface:
|
|||
pass
|
||||
|
||||
|
||||
class MissingInteface(OpenPypeInterface):
|
||||
"""Class representing missing interface class.
|
||||
|
||||
Used when interface is not available from currently registered paths.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class OpenPypeModule:
|
||||
"""Base class of pype module.
|
||||
|
|
@ -431,6 +392,28 @@ class OpenPypeModule:
|
|||
"""
|
||||
return {}
|
||||
|
||||
def cli(self, module_click_group):
|
||||
"""Add commands to click group.
|
||||
|
||||
The best practise is to create click group for whole module which is
|
||||
used to separate commands.
|
||||
|
||||
class MyPlugin(OpenPypeModule):
|
||||
...
|
||||
def cli(self, module_click_group):
|
||||
module_click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(<module name>, help="<Any help shown in cmd>")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
@cli_main.command()
|
||||
def mycommand():
|
||||
print("my_command")
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class OpenPypeAddOn(OpenPypeModule):
|
||||
# Enable Addon by default
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import collections
|
||||
from openpype.modules import OpenPypeModule
|
||||
|
||||
import click
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
|
|
@ -409,3 +411,62 @@ class FtrackModule(
|
|||
return 0
|
||||
hours_logged = (task_entity["time_logged"] / 60) / 60
|
||||
return hours_logged
|
||||
|
||||
def get_credentials(self):
|
||||
# type: () -> tuple
|
||||
"""Get local Ftrack credentials."""
|
||||
from .lib import credentials
|
||||
|
||||
cred = credentials.get_credentials(self.ftrack_url)
|
||||
return cred.get("username"), cred.get("api_key")
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(FtrackModule.name, help="Ftrack module related commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
|
||||
help="Ftrack server url")
|
||||
@click.option("--ftrack-user", envvar="FTRACK_API_USER",
|
||||
help="Ftrack api user")
|
||||
@click.option("--ftrack-api-key", envvar="FTRACK_API_KEY",
|
||||
help="Ftrack api key")
|
||||
@click.option("--legacy", is_flag=True,
|
||||
help="run event server without mongo storing")
|
||||
@click.option("--clockify-api-key", envvar="CLOCKIFY_API_KEY",
|
||||
help="Clockify API key.")
|
||||
@click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE",
|
||||
help="Clockify workspace")
|
||||
def eventserver(
|
||||
debug,
|
||||
ftrack_url,
|
||||
ftrack_user,
|
||||
ftrack_api_key,
|
||||
legacy,
|
||||
clockify_api_key,
|
||||
clockify_workspace
|
||||
):
|
||||
"""Launch ftrack event server.
|
||||
|
||||
This should be ideally used by system service (such us systemd or upstart
|
||||
on linux and window service).
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "3"
|
||||
|
||||
from .ftrack_server.event_server_cli import run_event_server
|
||||
|
||||
return run_event_server(
|
||||
ftrack_url,
|
||||
ftrack_user,
|
||||
ftrack_api_key,
|
||||
legacy,
|
||||
clockify_api_key,
|
||||
clockify_workspace
|
||||
)
|
||||
|
|
|
|||
|
|
@ -570,9 +570,15 @@ class BaseHandler(object):
|
|||
|
||||
if low_entity_type == "assetversion":
|
||||
asset = entity["asset"]
|
||||
parent = None
|
||||
if asset:
|
||||
parent = asset["parent"]
|
||||
if parent:
|
||||
|
||||
if parent:
|
||||
if parent.entity_type.lower() == "project":
|
||||
return parent
|
||||
|
||||
if "project" in parent:
|
||||
return parent["project"]
|
||||
|
||||
project_data = entity["link"][0]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class CollectLocalFtrackCreds(pyblish.api.ContextPlugin):
|
||||
"""Collect default Royal Render path."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect local ftrack credentials"
|
||||
targets = ["rr_control"]
|
||||
|
||||
def process(self, context):
|
||||
if os.getenv("FTRACK_API_USER") and os.getenv("FTRACK_API_KEY") and \
|
||||
os.getenv("FTRACK_SERVER"):
|
||||
return
|
||||
ftrack_module = context.data["openPypeModules"]["ftrack"]
|
||||
if ftrack_module.enabled:
|
||||
creds = ftrack_module.get_credentials()
|
||||
os.environ["FTRACK_API_USER"] = creds[0]
|
||||
os.environ["FTRACK_API_KEY"] = creds[1]
|
||||
os.environ["FTRACK_SERVER"] = ftrack_module.ftrack_url
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
from .royal_render_module import RoyalRenderModule
|
||||
|
||||
|
||||
__all__ = (
|
||||
"RoyalRenderModule",
|
||||
)
|
||||
199
openpype/modules/default_modules/royal_render/api.py
Normal file
199
openpype/modules/default_modules/royal_render/api.py
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Wrapper around Royal Render API."""
|
||||
import sys
|
||||
import os
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib.local_settings import OpenPypeSettingsRegistry
|
||||
from openpype.lib import PypeLogger, run_subprocess
|
||||
from .rr_job import RRJob, SubmitFile, SubmitterParameter
|
||||
|
||||
|
||||
log = PypeLogger.get_logger("RoyalRender")
|
||||
|
||||
|
||||
class Api:
|
||||
|
||||
_settings = None
|
||||
RR_SUBMIT_CONSOLE = 1
|
||||
RR_SUBMIT_API = 2
|
||||
|
||||
def __init__(self, settings, project=None):
|
||||
self._settings = settings
|
||||
self._initialize_rr(project)
|
||||
|
||||
def _initialize_rr(self, project=None):
|
||||
# type: (str) -> None
|
||||
"""Initialize RR Path.
|
||||
|
||||
Args:
|
||||
project (str, Optional): Project name to set RR api in
|
||||
context.
|
||||
|
||||
"""
|
||||
if project:
|
||||
project_settings = get_project_settings(project)
|
||||
rr_path = (
|
||||
project_settings
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
else:
|
||||
rr_path = (
|
||||
self._settings
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
["rr_path"]
|
||||
["default"]
|
||||
)
|
||||
os.environ["RR_ROOT"] = rr_path
|
||||
self._rr_path = rr_path
|
||||
|
||||
def _get_rr_bin_path(self, rr_root=None):
|
||||
# type: (str) -> str
|
||||
"""Get path to RR bin folder."""
|
||||
rr_root = rr_root or self._rr_path
|
||||
is_64bit_python = sys.maxsize > 2 ** 32
|
||||
|
||||
rr_bin_path = ""
|
||||
if sys.platform.lower() == "win32":
|
||||
rr_bin_path = "/bin/win64"
|
||||
if not is_64bit_python:
|
||||
# we are using 64bit python
|
||||
rr_bin_path = "/bin/win"
|
||||
rr_bin_path = rr_bin_path.replace(
|
||||
"/", os.path.sep
|
||||
)
|
||||
|
||||
if sys.platform.lower() == "darwin":
|
||||
rr_bin_path = "/bin/mac64"
|
||||
if not is_64bit_python:
|
||||
rr_bin_path = "/bin/mac"
|
||||
|
||||
if sys.platform.lower() == "linux":
|
||||
rr_bin_path = "/bin/lx64"
|
||||
|
||||
return os.path.join(rr_root, rr_bin_path)
|
||||
|
||||
def _initialize_module_path(self):
|
||||
# type: () -> None
|
||||
"""Set RR modules for Python."""
|
||||
# default for linux
|
||||
rr_bin = self._get_rr_bin_path()
|
||||
rr_module_path = os.path.join(rr_bin, "lx64/lib")
|
||||
|
||||
if sys.platform.lower() == "win32":
|
||||
rr_module_path = rr_bin
|
||||
rr_module_path = rr_module_path.replace(
|
||||
"/", os.path.sep
|
||||
)
|
||||
|
||||
if sys.platform.lower() == "darwin":
|
||||
rr_module_path = os.path.join(rr_bin, "lib/python/27")
|
||||
|
||||
sys.path.append(os.path.join(self._rr_path, rr_module_path))
|
||||
|
||||
def create_submission(self, jobs, submitter_attributes, file_name=None):
|
||||
# type: (list[RRJob], list[SubmitterParameter], str) -> SubmitFile
|
||||
"""Create jobs submission file.
|
||||
|
||||
Args:
|
||||
jobs (list): List of :class:`RRJob`
|
||||
submitter_attributes (list): List of submitter attributes
|
||||
:class:`SubmitterParameter` for whole submission batch.
|
||||
file_name (str), optional): File path to write data to.
|
||||
|
||||
Returns:
|
||||
str: XML data of job submission files.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def submit_file(self, file, mode=RR_SUBMIT_CONSOLE):
|
||||
# type: (SubmitFile, int) -> None
|
||||
if mode == self.RR_SUBMIT_CONSOLE:
|
||||
self._submit_using_console(file)
|
||||
|
||||
# RR v7 supports only Python 2.7 so we bail out in fear
|
||||
# until there is support for Python 3 😰
|
||||
raise NotImplementedError(
|
||||
"Submission via RoyalRender API is not supported yet")
|
||||
# self._submit_using_api(file)
|
||||
|
||||
def _submit_using_console(self, file):
|
||||
# type: (SubmitFile) -> bool
|
||||
rr_console = os.path.join(
|
||||
self._get_rr_bin_path(),
|
||||
"rrSubmitterconsole"
|
||||
)
|
||||
|
||||
if sys.platform.lower() == "darwin":
|
||||
if "/bin/mac64" in rr_console:
|
||||
rr_console = rr_console.replace("/bin/mac64", "/bin/mac")
|
||||
|
||||
if sys.platform.lower() == "win32":
|
||||
if "/bin/win64" in rr_console:
|
||||
rr_console = rr_console.replace("/bin/win64", "/bin/win")
|
||||
rr_console += ".exe"
|
||||
|
||||
args = [rr_console, file]
|
||||
run_subprocess(" ".join(args), logger=log)
|
||||
|
||||
def _submit_using_api(self, file):
|
||||
# type: (SubmitFile) -> None
|
||||
"""Use RR API to submit jobs.
|
||||
|
||||
Args:
|
||||
file (SubmitFile): Submit jobs definition.
|
||||
|
||||
Throws:
|
||||
RoyalRenderException: When something fails.
|
||||
|
||||
"""
|
||||
self._initialize_module_path()
|
||||
import libpyRR2 as rrLib # noqa
|
||||
from rrJob import getClass_JobBasics # noqa
|
||||
import libpyRR2 as _RenderAppBasic # noqa
|
||||
|
||||
tcp = rrLib._rrTCP("") # noqa
|
||||
rr_server = tcp.getRRServer()
|
||||
|
||||
if len(rr_server) == 0:
|
||||
log.info("Got RR IP address {}".format(rr_server))
|
||||
|
||||
# TODO: Port is hardcoded in RR? If not, move it to Settings
|
||||
if not tcp.setServer(rr_server, 7773):
|
||||
log.error(
|
||||
"Can not set RR server: {}".format(tcp.errorMessage()))
|
||||
raise RoyalRenderException(tcp.errorMessage())
|
||||
|
||||
# TODO: This need UI and better handling of username/password.
|
||||
# We can't store password in keychain as it is pulled multiple
|
||||
# times and users on linux must enter keychain password every time.
|
||||
# Probably best way until we setup our own user management would be
|
||||
# to encrypt password and save it to json locally. Not bulletproof
|
||||
# but at least it is not stored in plaintext.
|
||||
reg = OpenPypeSettingsRegistry()
|
||||
try:
|
||||
rr_user = reg.get_item("rr_username")
|
||||
rr_password = reg.get_item("rr_password")
|
||||
except ValueError:
|
||||
# user has no rr credentials set
|
||||
pass
|
||||
else:
|
||||
# login to RR
|
||||
tcp.setLogin(rr_user, rr_password)
|
||||
|
||||
job = getClass_JobBasics()
|
||||
renderer = _RenderAppBasic()
|
||||
|
||||
# iterate over SubmitFile, set _JobBasic (job) and renderer
|
||||
# and feed it to jobSubmitNew()
|
||||
# not implemented yet
|
||||
job.renderer = renderer
|
||||
tcp.jobSubmitNew(job)
|
||||
|
||||
|
||||
class RoyalRenderException(Exception):
|
||||
"""Exception used in various error states coming from RR."""
|
||||
pass
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDefaultRRPath(pyblish.api.ContextPlugin):
|
||||
"""Collect default Royal Render path."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Default Royal Render Path"
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
rr_module = context.data.get(
|
||||
"openPypeModules")["royalrender"]
|
||||
except AttributeError:
|
||||
msg = "Cannot get OpenPype Royal Render module."
|
||||
self.log.error(msg)
|
||||
raise AssertionError(msg)
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.log.debug(rr_module.rr_paths)
|
||||
context.data["defaultRRPath"] = rr_module.rr_paths["default"] # noqa: E501
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRRPathFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect RR Path from instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Royal Render Path from the Instance"
|
||||
families = ["rendering"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["rrPath"] = self._collect_rr_path(instance)
|
||||
self.log.info(
|
||||
"Using {} for submission.".format(instance.data["rrPath"]))
|
||||
|
||||
@staticmethod
|
||||
def _collect_rr_path(render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Royal Render path from render instance."""
|
||||
rr_settings = (
|
||||
render_instance.context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except AttributeError:
|
||||
# Handle situation were we had only one url for deadline.
|
||||
return render_instance.context.data["defaultRRPath"]
|
||||
|
||||
return rr_servers[
|
||||
list(rr_servers.keys())[
|
||||
int(render_instance.data.get("rrPaths"))
|
||||
]
|
||||
]
|
||||
|
|
@ -0,0 +1,209 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect sequences from Royal Render Job."""
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import json
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
|
||||
def collect(root,
|
||||
regex=None,
|
||||
exclude_regex=None,
|
||||
frame_start=None,
|
||||
frame_end=None):
|
||||
"""Collect sequence collections in root"""
|
||||
|
||||
from avalon.vendor import clique
|
||||
|
||||
files = []
|
||||
for filename in os.listdir(root):
|
||||
|
||||
# Must have extension
|
||||
ext = os.path.splitext(filename)[1]
|
||||
if not ext:
|
||||
continue
|
||||
|
||||
# Only files
|
||||
if not os.path.isfile(os.path.join(root, filename)):
|
||||
continue
|
||||
|
||||
# Include and exclude regex
|
||||
if regex and not re.search(regex, filename):
|
||||
continue
|
||||
if exclude_regex and re.search(exclude_regex, filename):
|
||||
continue
|
||||
|
||||
files.append(filename)
|
||||
|
||||
# Match collections
|
||||
# Support filenames like: projectX_shot01_0010.tiff with this regex
|
||||
pattern = r"(?P<index>(?P<padding>0*)\d+)\.\D+\d?$"
|
||||
collections, remainder = clique.assemble(files,
|
||||
patterns=[pattern],
|
||||
minimum_items=1)
|
||||
|
||||
# Ignore any remainders
|
||||
if remainder:
|
||||
print("Skipping remainder {}".format(remainder))
|
||||
|
||||
# Exclude any frames outside start and end frame.
|
||||
for collection in collections:
|
||||
for index in list(collection.indexes):
|
||||
if frame_start is not None and index < frame_start:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
if frame_end is not None and index > frame_end:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
|
||||
# Keep only collections that have at least a single frame
|
||||
collections = [c for c in collections if c.indexes]
|
||||
|
||||
return collections
|
||||
|
||||
|
||||
class CollectSequencesFromJob(pyblish.api.ContextPlugin):
|
||||
"""Gather file sequences from job directory.
|
||||
|
||||
When "OPENPYPE_PUBLISH_DATA" environment variable is set these paths
|
||||
(folders or .json files) are parsed for image sequences. Otherwise the
|
||||
current working directory is searched for file sequences.
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder
|
||||
targets = ["rr_control"]
|
||||
label = "Collect Rendered Frames"
|
||||
|
||||
def process(self, context):
|
||||
if os.environ.get("OPENPYPE_PUBLISH_DATA"):
|
||||
self.log.debug(os.environ.get("OPENPYPE_PUBLISH_DATA"))
|
||||
paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep)
|
||||
self.log.info("Collecting paths: {}".format(paths))
|
||||
else:
|
||||
cwd = context.get("workspaceDir", os.getcwd())
|
||||
paths = [cwd]
|
||||
|
||||
for path in paths:
|
||||
|
||||
self.log.info("Loading: {}".format(path))
|
||||
|
||||
if path.endswith(".json"):
|
||||
# Search using .json configuration
|
||||
with open(path, "r") as f:
|
||||
try:
|
||||
data = json.load(f)
|
||||
except Exception as exc:
|
||||
self.log.error("Error loading json: "
|
||||
"{} - Exception: {}".format(path, exc))
|
||||
raise
|
||||
|
||||
cwd = os.path.dirname(path)
|
||||
root_override = data.get("root")
|
||||
if root_override:
|
||||
if os.path.isabs(root_override):
|
||||
root = root_override
|
||||
else:
|
||||
root = os.path.join(cwd, root_override)
|
||||
else:
|
||||
root = cwd
|
||||
|
||||
metadata = data.get("metadata")
|
||||
if metadata:
|
||||
session = metadata.get("session")
|
||||
if session:
|
||||
self.log.info("setting session using metadata")
|
||||
api.Session.update(session)
|
||||
os.environ.update(session)
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
data = {}
|
||||
root = path
|
||||
|
||||
self.log.info("Collecting: {}".format(root))
|
||||
regex = data.get("regex")
|
||||
if regex:
|
||||
self.log.info("Using regex: {}".format(regex))
|
||||
|
||||
collections = collect(root=root,
|
||||
regex=regex,
|
||||
exclude_regex=data.get("exclude_regex"),
|
||||
frame_start=data.get("frameStart"),
|
||||
frame_end=data.get("frameEnd"))
|
||||
|
||||
self.log.info("Found collections: {}".format(collections))
|
||||
|
||||
if data.get("subset") and len(collections) > 1:
|
||||
self.log.error("Forced subset can only work with a single "
|
||||
"found sequence")
|
||||
raise RuntimeError("Invalid sequence")
|
||||
|
||||
fps = data.get("fps", 25)
|
||||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
if "render" not in families:
|
||||
families.append("render")
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
self.log.info("Collection: %s" % list(collection))
|
||||
|
||||
# Ensure each instance gets a unique reference to the data
|
||||
data = copy.deepcopy(data)
|
||||
|
||||
# If no subset provided, get it from collection's head
|
||||
subset = data.get("subset", collection.head.rstrip("_. "))
|
||||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
# root = os.path.normpath(root)
|
||||
# self.log.info("Source: {}}".format(data.get("source", "")))
|
||||
|
||||
ext = list(collection)[0].split('.')[-1]
|
||||
|
||||
instance.data.update({
|
||||
"name": str(collection),
|
||||
"family": families[0], # backwards compatibility / pyblish
|
||||
"families": list(families),
|
||||
"subset": subset,
|
||||
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
})
|
||||
instance.append(collection)
|
||||
instance.context.data['fps'] = fps
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': '{}'.format(ext),
|
||||
'files': list(collection),
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ['review']
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
if data.get('user'):
|
||||
context.data["user"] = data['user']
|
||||
|
||||
self.log.debug("Collected instance:\n"
|
||||
"{}".format(pformat(instance.data)))
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Module providing support for Royal Render."""
|
||||
import os
|
||||
import openpype.modules
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import IPluginPaths
|
||||
|
||||
|
||||
class RoyalRenderModule(OpenPypeModule, IPluginPaths):
|
||||
"""Class providing basic Royal Render implementation logic."""
|
||||
name = "royalrender"
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
if not self._api:
|
||||
# import royal render modules
|
||||
from . import api as rr_api
|
||||
self._api = rr_api.Api(self.settings)
|
||||
|
||||
return self._api
|
||||
|
||||
def __init__(self, manager, settings):
|
||||
# type: (openpype.modules.base.ModulesManager, dict) -> None
|
||||
self.rr_paths = {}
|
||||
self._api = None
|
||||
self.settings = settings
|
||||
super(RoyalRenderModule, self).__init__(manager, settings)
|
||||
|
||||
def initialize(self, module_settings):
|
||||
# type: (dict) -> None
|
||||
rr_settings = module_settings[self.name]
|
||||
self.enabled = rr_settings["enabled"]
|
||||
self.rr_paths = rr_settings.get("rr_paths")
|
||||
|
||||
@staticmethod
|
||||
def get_plugin_paths():
|
||||
# type: () -> dict
|
||||
"""Royal Render plugin paths.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of plugin paths for RR.
|
||||
"""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
256
openpype/modules/default_modules/royal_render/rr_job.py
Normal file
256
openpype/modules/default_modules/royal_render/rr_job.py
Normal file
|
|
@ -0,0 +1,256 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Python wrapper for RoyalRender XML job file."""
|
||||
from xml.dom import minidom as md
|
||||
import attr
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
||||
|
||||
CustomAttribute = namedtuple("CustomAttribute", ["name", "value"])
|
||||
|
||||
|
||||
@attr.s
|
||||
class RRJob:
|
||||
"""Mapping of Royal Render job file to a data class."""
|
||||
|
||||
# Required
|
||||
# --------
|
||||
|
||||
# Name of your render application. Same as in the render config file.
|
||||
# (Maya, Softimage)
|
||||
Software = attr.ib() # type: str
|
||||
|
||||
# The OS the scene was created on, all texture paths are set on
|
||||
# that OS. Possible values are windows, linux, osx
|
||||
SceneOS = attr.ib() # type: str
|
||||
|
||||
# Renderer you use. Same as in the render config file
|
||||
# (VRay, Mental Ray, Arnold)
|
||||
Renderer = attr.ib() # type: str
|
||||
|
||||
# Version you want to render with. (5.11, 2010, 12)
|
||||
Version = attr.ib() # type: str
|
||||
|
||||
# Name of the scene file with full path.
|
||||
SceneName = attr.ib() # type: str
|
||||
|
||||
# Is the job enabled for submission?
|
||||
# enabled by default
|
||||
IsActive = attr.ib() # type: str
|
||||
|
||||
# Sequence settings of this job
|
||||
SeqStart = attr.ib() # type: int
|
||||
SeqEnd = attr.ib() # type: int
|
||||
SeqStep = attr.ib() # type: int
|
||||
SeqFileOffset = attr.ib() # type: int
|
||||
|
||||
# If you specify ImageDir, then ImageFilename has no path. If you do
|
||||
# NOT specify ImageDir, then ImageFilename has to include the path.
|
||||
# Same for ImageExtension.
|
||||
# Important: Do not forget any _ or . in front or after the frame
|
||||
# numbering. Usually ImageExtension always starts with a . (.tga, .exr)
|
||||
ImageDir = attr.ib() # type: str
|
||||
ImageFilename = attr.ib() # type: str
|
||||
ImageExtension = attr.ib() # type: str
|
||||
|
||||
# Some applications always add a . or _ in front of the frame number.
|
||||
# Set this variable to that character. The user can then change
|
||||
# the filename at the rrSubmitter and the submitter keeps
|
||||
# track of this character.
|
||||
ImagePreNumberLetter = attr.ib() # type: str
|
||||
|
||||
# If you render a single file, e.g. Quicktime or Avi, then you have to
|
||||
# set this value. Videos have to be rendered at once on one client.
|
||||
ImageSingleOutputFile = attr.ib(default="false") # type: str
|
||||
|
||||
# Semi-Required (required for some render applications)
|
||||
# -----------------------------------------------------
|
||||
|
||||
# The database of your scene file. In Maya and XSI called "project",
|
||||
# in Lightwave "content dir"
|
||||
SceneDatabaseDir = attr.ib(default=None) # type: str
|
||||
|
||||
# Required if you want to split frames on multiple clients
|
||||
ImageWidth = attr.ib(default=None) # type: int
|
||||
ImageHeight = attr.ib(default=None) # type: int
|
||||
Camera = attr.ib(default=None) # type: str
|
||||
Layer = attr.ib(default=None) # type: str
|
||||
Channel = attr.ib(default=None) # type: str
|
||||
|
||||
# Optional
|
||||
# --------
|
||||
|
||||
# Used for the RR render license function.
|
||||
# E.g. If you render with mentalRay, then add mentalRay. If you render
|
||||
# with Nuke and you use Furnace plugins in your comp, add Furnace.
|
||||
# TODO: determine how this work for multiple plugins
|
||||
RequiredPlugins = attr.ib(default=None) # type: str
|
||||
|
||||
# Frame Padding of the frame number in the rendered filename.
|
||||
# Some render config files are setting the padding at render time.
|
||||
ImageFramePadding = attr.ib(default=None) # type: str
|
||||
|
||||
# Some render applications support overriding the image format at
|
||||
# the render commandline.
|
||||
OverrideImageFormat = attr.ib(default=None) # type: str
|
||||
|
||||
# rrControl can display the name of additonal channels that are
|
||||
# rendered. Each channel requires these two values. ChannelFilename
|
||||
# contains the full path.
|
||||
ChannelFilename = attr.ib(default=None) # type: str
|
||||
ChannelExtension = attr.ib(default=None) # type: str
|
||||
|
||||
# A value between 0 and 255. Each job gets the Pre ID attached as small
|
||||
# letter to the main ID. A new main ID is generated for every machine
|
||||
# for every 5/1000s.
|
||||
PreID = attr.ib(default=None) # type: int
|
||||
|
||||
# When the job is received by the server, the server checks for other
|
||||
# jobs send from this machine. If a job with the PreID was found, then
|
||||
# this jobs waits for the other job. Note: This flag can be used multiple
|
||||
# times to wait for multiple jobs.
|
||||
WaitForPreID = attr.ib(default=None) # type: int
|
||||
|
||||
# List of submitter options per job
|
||||
# list item must be of `SubmitterParameter` type
|
||||
SubmitterParameters = attr.ib(factory=list) # type: list
|
||||
|
||||
# List of Custom job attributes
|
||||
# Royal Render support custom attributes in format <CustomFoo> or
|
||||
# <CustomSomeOtherAttr>
|
||||
# list item must be of `CustomAttribute` named tuple
|
||||
CustomAttributes = attr.ib(factory=list) # type: list
|
||||
|
||||
# Additional information for subsequent publish script and
|
||||
# for better display in rrControl
|
||||
UserName = attr.ib(default=None) # type: str
|
||||
CustomSeQName = attr.ib(default=None) # type: str
|
||||
CustomSHotName = attr.ib(default=None) # type: str
|
||||
CustomVersionName = attr.ib(default=None) # type: str
|
||||
CustomUserInfo = attr.ib(default=None) # type: str
|
||||
SubmitMachine = attr.ib(default=None) # type: str
|
||||
Color_ID = attr.ib(default=2) # type: int
|
||||
|
||||
RequiredLicenses = attr.ib(default=None) # type: str
|
||||
|
||||
# Additional frame info
|
||||
Priority = attr.ib(default=50) # type: int
|
||||
TotalFrames = attr.ib(default=None) # type: int
|
||||
Tiled = attr.ib(default=None) # type: str
|
||||
|
||||
|
||||
class SubmitterParameter:
|
||||
"""Wrapper for Submitter Parameters."""
|
||||
def __init__(self, parameter, *args):
|
||||
# type: (str, list) -> None
|
||||
self._parameter = parameter
|
||||
self._values = args
|
||||
|
||||
def serialize(self):
|
||||
# type: () -> str
|
||||
"""Serialize submitter parameter as a string value.
|
||||
|
||||
This can be later on used as text node in job xml file.
|
||||
|
||||
Returns:
|
||||
str: concatenated string of parameter values.
|
||||
|
||||
"""
|
||||
return '"{param}={val}"'.format(
|
||||
param=self._parameter, val="~".join(self._values))
|
||||
|
||||
|
||||
@attr.s
|
||||
class SubmitFile:
|
||||
"""Class wrapping Royal Render submission XML file."""
|
||||
|
||||
# Syntax version of the submission file.
|
||||
syntax_version = attr.ib(default="6.0") # type: str
|
||||
|
||||
# Delete submission file after processing
|
||||
DeleteXML = attr.ib(default=1) # type: int
|
||||
|
||||
# List of submitter options per job
|
||||
# list item must be of `SubmitterParameter` type
|
||||
SubmitterParameters = attr.ib(factory=list) # type: list
|
||||
|
||||
# List of job is submission batch.
|
||||
# list item must be of type `RRJob`
|
||||
Jobs = attr.ib(factory=list) # type: list
|
||||
|
||||
@staticmethod
|
||||
def _process_submitter_parameters(parameters, dom, append_to):
|
||||
# type: (list[SubmitterParameter], md.Document, md.Element) -> None
|
||||
"""Take list of :class:`SubmitterParameter` and process it as XML.
|
||||
|
||||
This will take :class:`SubmitterParameter`, create XML element
|
||||
for them and convert value to Royal Render compatible string
|
||||
(options and values separated by ~)
|
||||
|
||||
Args:
|
||||
parameters (list of SubmitterParameter): List of parameters.
|
||||
dom (xml.dom.minidom.Document): XML Document
|
||||
append_to (xml.dom.minidom.Element): Element to append to.
|
||||
|
||||
"""
|
||||
for param in parameters:
|
||||
if not isinstance(param, SubmitterParameter):
|
||||
raise AttributeError(
|
||||
"{} is not of type `SubmitterParameter`".format(param))
|
||||
xml_parameter = dom.createElement("SubmitterParameter")
|
||||
xml_parameter.appendChild(dom.createTextNode(param.serialize()))
|
||||
append_to.appendChild(xml_parameter)
|
||||
|
||||
def serialize(self):
|
||||
# type: () -> str
|
||||
"""Return all data serialized as XML.
|
||||
|
||||
Returns:
|
||||
str: XML data as string.
|
||||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
"""Skip private attributes."""
|
||||
if a.name.startswith("_"):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
root = md.Document()
|
||||
# root element: <RR_Job_File syntax_version="6.0">
|
||||
job_file = root.createElement('RR_Job_File')
|
||||
job_file.setAttribute("syntax_version", self.syntax_version)
|
||||
|
||||
# handle Submitter Parameters for batch
|
||||
# <SubmitterParameter>foo=bar~baz~goo</SubmitterParameter>
|
||||
self._process_submitter_parameters(
|
||||
self.SubmitterParameters, root, job_file)
|
||||
|
||||
for job in self.Jobs: # type: RRJob
|
||||
if not isinstance(job, RRJob):
|
||||
raise AttributeError(
|
||||
"{} is not of type `SubmitterParameter`".format(job))
|
||||
xml_job = root.createElement("Job")
|
||||
# handle Submitter Parameters for job
|
||||
self._process_submitter_parameters(
|
||||
job.SubmitterParameters, root, xml_job
|
||||
)
|
||||
job_custom_attributes = job.CustomAttributes
|
||||
|
||||
serialized_job = attr.asdict(
|
||||
job, dict_factory=OrderedDict, filter=filter_data)
|
||||
serialized_job.pop("CustomAttributes")
|
||||
serialized_job.pop("SubmitterParameters")
|
||||
|
||||
for custom_attr in job_custom_attributes: # type: CustomAttribute
|
||||
serialized_job["Custom{}".format(
|
||||
custom_attr.name)] = custom_attr.value
|
||||
|
||||
for item, value in serialized_job.items():
|
||||
xml_attr = root.create(item)
|
||||
xml_attr.appendChild(
|
||||
root.createTextNode(value)
|
||||
)
|
||||
xml_job.appendChild(xml_attr)
|
||||
|
||||
return root.toprettyxml(indent="\t")
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
## OpenPype RoyalRender integration plugins
|
||||
|
||||
### Installation
|
||||
|
||||
Copy content of this folder to your `RR_ROOT` (place where RoyalRender studio wide installation is).
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""This is RR control plugin that runs on the job by user interaction.
|
||||
|
||||
It asks user for context to publish, getting it from OpenPype. In order to
|
||||
run it needs `OPENPYPE_ROOT` to be set to know where to execute OpenPype.
|
||||
|
||||
"""
|
||||
import rr # noqa
|
||||
import rrGlobal # noqa
|
||||
import subprocess
|
||||
import os
|
||||
import glob
|
||||
import platform
|
||||
import tempfile
|
||||
import json
|
||||
|
||||
|
||||
class OpenPypeContextSelector:
|
||||
"""Class to handle publishing context determination in RR."""
|
||||
|
||||
def __init__(self):
|
||||
self.job = rr.getJob()
|
||||
self.context = None
|
||||
|
||||
self.openpype_executable = "openpype_gui"
|
||||
if platform.system().lower() == "windows":
|
||||
self.openpype_executable = "{}.exe".format(
|
||||
self.openpype_executable)
|
||||
|
||||
op_path = os.environ.get("OPENPYPE_ROOT")
|
||||
print("initializing ... {}".format(op_path))
|
||||
if not op_path:
|
||||
print("Warning: OpenPype root is not found.")
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
print(" * trying to find OpenPype on local computer.")
|
||||
op_path = os.path.join(
|
||||
os.environ.get("PROGRAMFILES"),
|
||||
"OpenPype", "openpype_console.exe"
|
||||
)
|
||||
if os.path.exists(op_path):
|
||||
print(" - found OpenPype installation {}".format(op_path))
|
||||
else:
|
||||
# try to find in user local context
|
||||
op_path = os.path.join(
|
||||
os.environ.get("LOCALAPPDATA"),
|
||||
"Programs",
|
||||
"OpenPype", "openpype_console.exe"
|
||||
)
|
||||
if os.path.exists(op_path):
|
||||
print(
|
||||
" - found OpenPype installation {}".format(
|
||||
op_path))
|
||||
else:
|
||||
raise Exception("Error: OpenPype was not found.")
|
||||
|
||||
self.openpype_root = op_path
|
||||
|
||||
# TODO: this should try to find metadata file. Either using
|
||||
# jobs custom attributes or using environment variable
|
||||
# or just using plain existence of file.
|
||||
# self.context = self._process_metadata_file()
|
||||
|
||||
def _process_metadata_file(self):
|
||||
"""Find and process metadata file.
|
||||
|
||||
Try to find metadata json file in job folder to get context from.
|
||||
|
||||
Returns:
|
||||
dict: Context from metadata json file.
|
||||
|
||||
"""
|
||||
image_dir = self.job.imageDir
|
||||
metadata_files = glob.glob(
|
||||
"{}{}*_metadata.json".format(image_dir, os.path.sep))
|
||||
if not metadata_files:
|
||||
return {}
|
||||
|
||||
raise NotImplementedError(
|
||||
"Processing existing metadata not implemented yet.")
|
||||
|
||||
def process_job(self):
|
||||
"""Process selected job.
|
||||
|
||||
This should process selected job. If context can be determined
|
||||
automatically, no UI will be show and publishing will directly
|
||||
proceed.
|
||||
"""
|
||||
if not self.context:
|
||||
self.show()
|
||||
|
||||
self.context["user"] = self.job.userName
|
||||
self.run_publish()
|
||||
|
||||
def show(self):
|
||||
"""Show UI for context selection.
|
||||
|
||||
Because of RR UI limitations, this must be done using OpenPype
|
||||
itself.
|
||||
|
||||
"""
|
||||
tf = tempfile.TemporaryFile(delete=False)
|
||||
context_file = tf.name
|
||||
op_args = [os.path.join(self.openpype_root, self.openpype_executable),
|
||||
"contextselection", tf.name]
|
||||
|
||||
tf.close()
|
||||
print(">>> running {}".format(" ".join(op_args)))
|
||||
|
||||
subprocess.call(op_args)
|
||||
|
||||
with open(context_file, "r") as cf:
|
||||
self.context = json.load(cf)
|
||||
|
||||
os.unlink(context_file)
|
||||
print(">>> context: {}".format(self.context))
|
||||
|
||||
if not self.context or \
|
||||
not self.context.get("project") or \
|
||||
not self.context.get("asset") or \
|
||||
not self.context.get("task"):
|
||||
self._show_rr_warning("Context selection failed.")
|
||||
return
|
||||
|
||||
# self.context["app_name"] = self.job.renderer.name
|
||||
self.context["app_name"] = "maya/2020"
|
||||
|
||||
@staticmethod
|
||||
def _show_rr_warning(text):
|
||||
warning_dialog = rrGlobal.getGenericUI()
|
||||
warning_dialog.addItem(rrGlobal.genUIType.label, "infoLabel", "")
|
||||
warning_dialog.setText("infoLabel", text)
|
||||
warning_dialog.addItem(
|
||||
rrGlobal.genUIType.layoutH, "btnLayout", "")
|
||||
warning_dialog.addItem(
|
||||
rrGlobal.genUIType.closeButton, "Ok", "btnLayout")
|
||||
warning_dialog.execute()
|
||||
del warning_dialog
|
||||
|
||||
def run_publish(self):
|
||||
"""Run publish process."""
|
||||
env = {'AVALON_PROJECT': str(self.context.get("project")),
|
||||
"AVALON_ASSET": str(self.context.get("asset")),
|
||||
"AVALON_TASK": str(self.context.get("task")),
|
||||
"AVALON_APP_NAME": str(self.context.get("app_name"))}
|
||||
|
||||
print(">>> setting environment:")
|
||||
for k, v in env.items():
|
||||
print(" {}: {}".format(k, v))
|
||||
|
||||
args = [os.path.join(self.openpype_root, self.openpype_executable),
|
||||
'publish', '-t', "rr_control", "--gui",
|
||||
os.path.join(self.job.imageDir,
|
||||
os.path.dirname(self.job.imageFileName))
|
||||
]
|
||||
|
||||
print(">>> running {}".format(" ".join(args)))
|
||||
orig = os.environ.copy()
|
||||
orig.update(env)
|
||||
try:
|
||||
subprocess.call(args, env=orig)
|
||||
except subprocess.CalledProcessError as e:
|
||||
self._show_rr_warning(" Publish failed [ {} ]".format(
|
||||
e.returncode
|
||||
))
|
||||
|
||||
|
||||
print("running selector")
|
||||
selector = OpenPypeContextSelector()
|
||||
selector.process_job()
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
from abc import abstractmethod
|
||||
from openpype.modules import OpenPypeInterface
|
||||
|
||||
|
||||
class ISettingsChangeListener(OpenPypeInterface):
|
||||
"""Module has plugin paths to return.
|
||||
|
||||
Expected result is dictionary with keys "publish", "create", "load" or
|
||||
"actions" and values as list or string.
|
||||
{
|
||||
"publish": ["path/to/publish_plugins"]
|
||||
}
|
||||
"""
|
||||
@abstractmethod
|
||||
def on_system_settings_save(
|
||||
self, old_value, new_value, changes, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def on_project_settings_save(
|
||||
self, old_value, new_value, changes, project_name, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def on_project_anatomy_save(
|
||||
self, old_value, new_value, changes, project_name, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
|
@ -24,25 +24,19 @@ class DropboxHandler(AbstractProvider):
|
|||
)
|
||||
return
|
||||
|
||||
provider_presets = self.presets.get(self.CODE)
|
||||
if not provider_presets:
|
||||
msg = "Sync Server: No provider presets for {}".format(self.CODE)
|
||||
log.info(msg)
|
||||
return
|
||||
|
||||
token = self.presets[self.CODE].get("token", "")
|
||||
token = self.presets.get("token", "")
|
||||
if not token:
|
||||
msg = "Sync Server: No access token for dropbox provider"
|
||||
log.info(msg)
|
||||
return
|
||||
|
||||
team_folder_name = self.presets[self.CODE].get("team_folder_name", "")
|
||||
team_folder_name = self.presets.get("team_folder_name", "")
|
||||
if not team_folder_name:
|
||||
msg = "Sync Server: No team folder name for dropbox provider"
|
||||
log.info(msg)
|
||||
return
|
||||
|
||||
acting_as_member = self.presets[self.CODE].get("acting_as_member", "")
|
||||
acting_as_member = self.presets.get("acting_as_member", "")
|
||||
if not acting_as_member:
|
||||
msg = (
|
||||
"Sync Server: No acting member for dropbox provider"
|
||||
|
|
@ -51,13 +45,15 @@ class DropboxHandler(AbstractProvider):
|
|||
return
|
||||
|
||||
self.dbx = None
|
||||
try:
|
||||
self.dbx = self._get_service(
|
||||
token, acting_as_member, team_folder_name
|
||||
)
|
||||
except Exception as e:
|
||||
log.info("Could not establish dropbox object: {}".format(e))
|
||||
return
|
||||
|
||||
if self.presets["enabled"]:
|
||||
try:
|
||||
self.dbx = self._get_service(
|
||||
token, acting_as_member, team_folder_name
|
||||
)
|
||||
except Exception as e:
|
||||
log.info("Could not establish dropbox object: {}".format(e))
|
||||
return
|
||||
|
||||
super(AbstractProvider, self).__init__()
|
||||
|
||||
|
|
@ -106,7 +102,7 @@ class DropboxHandler(AbstractProvider):
|
|||
"type": "dict-roots",
|
||||
"object_type": {
|
||||
"type": "path",
|
||||
"multiplatform": True,
|
||||
"multiplatform": False,
|
||||
"multipath": False
|
||||
}
|
||||
}
|
||||
|
|
@ -169,7 +165,7 @@ class DropboxHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.dbx is not None
|
||||
return self.presets["enabled"] and self.dbx is not None
|
||||
|
||||
@classmethod
|
||||
def get_configurable_items(cls):
|
||||
|
|
@ -393,7 +389,7 @@ class DropboxHandler(AbstractProvider):
|
|||
{"root": {"root_ONE": "value", "root_TWO":"value}}
|
||||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
return self.presets['root']
|
||||
return self.presets['roots']
|
||||
|
||||
def resolve_path(self, path, root_config=None, anatomy=None):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -73,13 +73,7 @@ class GDriveHandler(AbstractProvider):
|
|||
format(site_name))
|
||||
return
|
||||
|
||||
provider_presets = self.presets.get(self.CODE)
|
||||
if not provider_presets:
|
||||
msg = "Sync Server: No provider presets for {}".format(self.CODE)
|
||||
log.info(msg)
|
||||
return
|
||||
|
||||
cred_path = self.presets[self.CODE].get("credentials_url", {}).\
|
||||
cred_path = self.presets.get("credentials_url", {}).\
|
||||
get(platform.system().lower()) or ''
|
||||
if not os.path.exists(cred_path):
|
||||
msg = "Sync Server: No credentials for gdrive provider " + \
|
||||
|
|
@ -87,10 +81,12 @@ class GDriveHandler(AbstractProvider):
|
|||
log.info(msg)
|
||||
return
|
||||
|
||||
self.service = self._get_gd_service(cred_path)
|
||||
self.service = None
|
||||
if self.presets["enabled"]:
|
||||
self.service = self._get_gd_service(cred_path)
|
||||
|
||||
self._tree = tree
|
||||
self.active = True
|
||||
self._tree = tree
|
||||
self.active = True
|
||||
|
||||
def is_active(self):
|
||||
"""
|
||||
|
|
@ -98,7 +94,7 @@ class GDriveHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.service is not None
|
||||
return self.presets["enabled"] and self.service is not None
|
||||
|
||||
@classmethod
|
||||
def get_system_settings_schema(cls):
|
||||
|
|
@ -125,9 +121,11 @@ class GDriveHandler(AbstractProvider):
|
|||
editable = [
|
||||
# credentials could be overriden on Project or User level
|
||||
{
|
||||
'key': "credentials_url",
|
||||
'label': "Credentials url",
|
||||
'type': 'text'
|
||||
"type": "path",
|
||||
"key": "credentials_url",
|
||||
"label": "Credentials url",
|
||||
"multiplatform": True,
|
||||
"placeholder": "Credentials url"
|
||||
},
|
||||
# roots could be overriden only on Project leve, User cannot
|
||||
{
|
||||
|
|
@ -136,7 +134,7 @@ class GDriveHandler(AbstractProvider):
|
|||
"type": "dict-roots",
|
||||
"object_type": {
|
||||
"type": "path",
|
||||
"multiplatform": True,
|
||||
"multiplatform": False,
|
||||
"multipath": False
|
||||
}
|
||||
}
|
||||
|
|
@ -176,7 +174,7 @@ class GDriveHandler(AbstractProvider):
|
|||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
# GDrive roots cannot be locally overridden
|
||||
return self.presets['root']
|
||||
return self.presets['roots']
|
||||
|
||||
def get_tree(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -89,6 +89,7 @@ class LocalDriveHandler(AbstractProvider):
|
|||
if not os.path.isfile(source_path):
|
||||
raise FileNotFoundError("Source file {} doesn't exist."
|
||||
.format(source_path))
|
||||
|
||||
if overwrite:
|
||||
thread = threading.Thread(target=self._copy,
|
||||
args=(source_path, target_path))
|
||||
|
|
@ -181,7 +182,10 @@ class LocalDriveHandler(AbstractProvider):
|
|||
|
||||
def _copy(self, source_path, target_path):
|
||||
print("copying {}->{}".format(source_path, target_path))
|
||||
shutil.copy(source_path, target_path)
|
||||
try:
|
||||
shutil.copy(source_path, target_path)
|
||||
except shutil.SameFileError:
|
||||
print("same files, skipping")
|
||||
|
||||
def _mark_progress(self, collection, file, representation, server, site,
|
||||
source_path, target_path, direction):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
import os.path
|
||||
import time
|
||||
import sys
|
||||
import six
|
||||
import threading
|
||||
import platform
|
||||
|
||||
|
|
@ -14,6 +12,7 @@ log = Logger().get_logger("SyncServer")
|
|||
pysftp = None
|
||||
try:
|
||||
import pysftp
|
||||
import paramiko
|
||||
except (ImportError, SyntaxError):
|
||||
pass
|
||||
|
||||
|
|
@ -37,7 +36,6 @@ class SFTPHandler(AbstractProvider):
|
|||
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.project_name = project_name
|
||||
self.site_name = site_name
|
||||
self.root = None
|
||||
|
|
@ -49,22 +47,15 @@ class SFTPHandler(AbstractProvider):
|
|||
format(site_name))
|
||||
return
|
||||
|
||||
provider_presets = self.presets.get(self.CODE)
|
||||
if not provider_presets:
|
||||
msg = "Sync Server: No provider presets for {}".format(self.CODE)
|
||||
log.warning(msg)
|
||||
return
|
||||
|
||||
# store to instance for reconnect
|
||||
self.sftp_host = provider_presets["sftp_host"]
|
||||
self.sftp_port = provider_presets["sftp_port"]
|
||||
self.sftp_user = provider_presets["sftp_user"]
|
||||
self.sftp_pass = provider_presets["sftp_pass"]
|
||||
self.sftp_key = provider_presets["sftp_key"]
|
||||
self.sftp_key_pass = provider_presets["sftp_key_pass"]
|
||||
self.sftp_host = presets["sftp_host"]
|
||||
self.sftp_port = presets["sftp_port"]
|
||||
self.sftp_user = presets["sftp_user"]
|
||||
self.sftp_pass = presets["sftp_pass"]
|
||||
self.sftp_key = presets["sftp_key"]
|
||||
self.sftp_key_pass = presets["sftp_key_pass"]
|
||||
|
||||
self._tree = None
|
||||
self.active = True
|
||||
|
||||
@property
|
||||
def conn(self):
|
||||
|
|
@ -80,7 +71,7 @@ class SFTPHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.conn is not None
|
||||
return self.presets["enabled"] and self.conn is not None
|
||||
|
||||
@classmethod
|
||||
def get_system_settings_schema(cls):
|
||||
|
|
@ -108,7 +99,7 @@ class SFTPHandler(AbstractProvider):
|
|||
editable = [
|
||||
# credentials could be overriden on Project or User level
|
||||
{
|
||||
'key': "sftp_server",
|
||||
'key': "sftp_host",
|
||||
'label': "SFTP host name",
|
||||
'type': 'text'
|
||||
},
|
||||
|
|
@ -130,7 +121,8 @@ class SFTPHandler(AbstractProvider):
|
|||
{
|
||||
'key': "sftp_key",
|
||||
'label': "SFTP user ssh key",
|
||||
'type': 'path'
|
||||
'type': 'path',
|
||||
"multiplatform": True
|
||||
},
|
||||
{
|
||||
'key': "sftp_key_pass",
|
||||
|
|
@ -144,7 +136,7 @@ class SFTPHandler(AbstractProvider):
|
|||
"type": "dict-roots",
|
||||
"object_type": {
|
||||
"type": "path",
|
||||
"multiplatform": True,
|
||||
"multiplatform": False,
|
||||
"multipath": False
|
||||
}
|
||||
}
|
||||
|
|
@ -176,7 +168,8 @@ class SFTPHandler(AbstractProvider):
|
|||
{
|
||||
'key': "sftp_key",
|
||||
'label': "SFTP user ssh key",
|
||||
'type': 'path'
|
||||
'type': 'path',
|
||||
"multiplatform": True
|
||||
},
|
||||
{
|
||||
'key': "sftp_key_pass",
|
||||
|
|
@ -199,7 +192,7 @@ class SFTPHandler(AbstractProvider):
|
|||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
# roots cannot be locally overridden
|
||||
return self.presets['root']
|
||||
return self.presets['roots']
|
||||
|
||||
def get_tree(self):
|
||||
"""
|
||||
|
|
@ -426,7 +419,10 @@ class SFTPHandler(AbstractProvider):
|
|||
if self.sftp_key_pass:
|
||||
conn_params['private_key_pass'] = self.sftp_key_pass
|
||||
|
||||
return pysftp.Connection(**conn_params)
|
||||
try:
|
||||
return pysftp.Connection(**conn_params)
|
||||
except paramiko.ssh_exception.SSHException:
|
||||
log.warning("Couldn't connect", exc_info=True)
|
||||
|
||||
def _mark_progress(self, collection, file, representation, server, site,
|
||||
source_path, target_path, direction):
|
||||
|
|
|
|||
|
|
@ -80,6 +80,10 @@ async def upload(module, collection, file, representation, provider_name,
|
|||
remote_site_name,
|
||||
True
|
||||
)
|
||||
|
||||
module.handle_alternate_site(collection, representation, remote_site_name,
|
||||
file["_id"], file_id)
|
||||
|
||||
return file_id
|
||||
|
||||
|
||||
|
|
@ -131,6 +135,10 @@ async def download(module, collection, file, representation, provider_name,
|
|||
local_site,
|
||||
True
|
||||
)
|
||||
|
||||
module.handle_alternate_site(collection, representation, local_site,
|
||||
file["_id"], file_id)
|
||||
|
||||
return file_id
|
||||
|
||||
|
||||
|
|
@ -246,6 +254,7 @@ class SyncServerThread(threading.Thread):
|
|||
|
||||
asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
|
||||
asyncio.ensure_future(self.sync_loop(), loop=self.loop)
|
||||
log.info("Sync Server Started")
|
||||
self.loop.run_forever()
|
||||
except Exception:
|
||||
log.warning(
|
||||
|
|
|
|||
|
|
@ -109,6 +109,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
# some parts of code need to run sequentially, not in async
|
||||
self.lock = None
|
||||
self._sync_system_settings = None
|
||||
# settings for all enabled projects for sync
|
||||
self._sync_project_settings = None
|
||||
self.sync_server_thread = None # asyncio requires new thread
|
||||
|
|
@ -152,9 +153,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if not site_name:
|
||||
site_name = self.DEFAULT_SITE
|
||||
|
||||
self.reset_provider_for_file(collection,
|
||||
representation_id,
|
||||
site_name=site_name, force=force)
|
||||
self.reset_site_on_representation(collection,
|
||||
representation_id,
|
||||
site_name=site_name, force=force)
|
||||
|
||||
# public facing API
|
||||
def remove_site(self, collection, representation_id, site_name,
|
||||
|
|
@ -176,10 +177,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if not self.get_sync_project_setting(collection):
|
||||
raise ValueError("Project not configured")
|
||||
|
||||
self.reset_provider_for_file(collection,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
remove=True)
|
||||
self.reset_site_on_representation(collection,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
remove=True)
|
||||
if remove_local_files:
|
||||
self._remove_local_file(collection, representation_id, site_name)
|
||||
|
||||
|
|
@ -314,8 +315,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"""
|
||||
log.info("Pausing SyncServer for {}".format(representation_id))
|
||||
self._paused_representations.add(representation_id)
|
||||
self.reset_provider_for_file(collection, representation_id,
|
||||
site_name=site_name, pause=True)
|
||||
self.reset_site_on_representation(collection, representation_id,
|
||||
site_name=site_name, pause=True)
|
||||
|
||||
def unpause_representation(self, collection, representation_id, site_name):
|
||||
"""
|
||||
|
|
@ -334,8 +335,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
except KeyError:
|
||||
pass
|
||||
# self.paused_representations is not persistent
|
||||
self.reset_provider_for_file(collection, representation_id,
|
||||
site_name=site_name, pause=False)
|
||||
self.reset_site_on_representation(collection, representation_id,
|
||||
site_name=site_name, pause=False)
|
||||
|
||||
def is_representation_paused(self, representation_id,
|
||||
check_parents=False, project_name=None):
|
||||
|
|
@ -769,6 +770,58 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
enabled_projects.append(project_name)
|
||||
|
||||
return enabled_projects
|
||||
|
||||
def handle_alternate_site(self, collection, representation, processed_site,
|
||||
file_id, synced_file_id):
|
||||
"""
|
||||
For special use cases where one site vendors another.
|
||||
|
||||
Current use case is sftp site vendoring (exposing) same data as
|
||||
regular site (studio). Each site is accessible for different
|
||||
audience. 'studio' for artists in a studio, 'sftp' for externals.
|
||||
|
||||
Change of file status on one site actually means same change on
|
||||
'alternate' site. (eg. artists publish to 'studio', 'sftp' is using
|
||||
same location >> file is accesible on 'sftp' site right away.
|
||||
|
||||
Args:
|
||||
collection (str): name of project
|
||||
representation (dict)
|
||||
processed_site (str): real site_name of published/uploaded file
|
||||
file_id (ObjectId): DB id of file handled
|
||||
synced_file_id (str): id of the created file returned
|
||||
by provider
|
||||
"""
|
||||
sites = self.sync_system_settings.get("sites", {})
|
||||
sites[self.DEFAULT_SITE] = {"provider": "local_drive",
|
||||
"alternative_sites": []}
|
||||
|
||||
alternate_sites = []
|
||||
for site_name, site_info in sites.items():
|
||||
conf_alternative_sites = site_info.get("alternative_sites", [])
|
||||
if processed_site in conf_alternative_sites:
|
||||
alternate_sites.append(site_name)
|
||||
continue
|
||||
if processed_site == site_name and conf_alternative_sites:
|
||||
alternate_sites.extend(conf_alternative_sites)
|
||||
continue
|
||||
|
||||
alternate_sites = set(alternate_sites)
|
||||
|
||||
for alt_site in alternate_sites:
|
||||
query = {
|
||||
"_id": representation["_id"]
|
||||
}
|
||||
elem = {"name": alt_site,
|
||||
"created_dt": datetime.now(),
|
||||
"id": synced_file_id}
|
||||
|
||||
self.log.debug("Adding alternate {} to {}".format(
|
||||
alt_site, representation["_id"]))
|
||||
self._add_site(collection, query,
|
||||
[representation], elem,
|
||||
alt_site, file_id=file_id, force=True)
|
||||
|
||||
""" End of Public API """
|
||||
|
||||
def get_local_file_path(self, collection, site_name, file_path):
|
||||
|
|
@ -799,12 +852,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
def tray_init(self):
|
||||
"""
|
||||
Actual initialization of Sync Server.
|
||||
Actual initialization of Sync Server for Tray.
|
||||
|
||||
Called when tray is initialized, it checks if module should be
|
||||
enabled. If not, no initialization necessary.
|
||||
"""
|
||||
# import only in tray, because of Python2 hosts
|
||||
self.server_init()
|
||||
|
||||
from .tray.app import SyncServerWindow
|
||||
self.widget = SyncServerWindow(self)
|
||||
|
||||
def server_init(self):
|
||||
"""Actual initialization of Sync Server."""
|
||||
# import only in tray or Python3, because of Python2 hosts
|
||||
from .sync_server import SyncServerThread
|
||||
|
||||
if not self.enabled:
|
||||
|
|
@ -816,6 +876,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return
|
||||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
self.sync_server_thread = SyncServerThread(self)
|
||||
|
||||
def tray_start(self):
|
||||
|
|
@ -829,6 +890,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
self.server_start()
|
||||
|
||||
def server_start(self):
|
||||
if self.sync_project_settings and self.enabled:
|
||||
self.sync_server_thread.start()
|
||||
else:
|
||||
|
|
@ -841,6 +905,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
Called from Module Manager
|
||||
"""
|
||||
self.server_exit()
|
||||
|
||||
def server_exit(self):
|
||||
if not self.sync_server_thread:
|
||||
return
|
||||
|
||||
|
|
@ -850,6 +917,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
log.info("Stopping sync server server")
|
||||
self.sync_server_thread.is_running = False
|
||||
self.sync_server_thread.stop()
|
||||
log.info("Sync server stopped")
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing sync server",
|
||||
|
|
@ -892,6 +960,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return self._connection
|
||||
|
||||
@property
|
||||
def sync_system_settings(self):
|
||||
if self._sync_system_settings is None:
|
||||
self._sync_system_settings = get_system_settings()["modules"].\
|
||||
get("sync_server")
|
||||
|
||||
return self._sync_system_settings
|
||||
|
||||
@property
|
||||
def sync_project_settings(self):
|
||||
if self._sync_project_settings is None:
|
||||
|
|
@ -977,9 +1053,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
(dict): {'studio': {'provider':'local_drive'...},
|
||||
'MY_LOCAL': {'provider':....}}
|
||||
"""
|
||||
sys_sett = get_system_settings()
|
||||
sync_sett = sys_sett["modules"].get("sync_server")
|
||||
|
||||
sync_sett = self.sync_system_settings
|
||||
project_enabled = True
|
||||
if project_name:
|
||||
project_enabled = project_name in self.get_enabled_projects()
|
||||
|
|
@ -1037,10 +1111,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if provider:
|
||||
return provider
|
||||
|
||||
sys_sett = get_system_settings()
|
||||
sync_sett = sys_sett["modules"].get("sync_server")
|
||||
for site, detail in sync_sett.get("sites", {}).items():
|
||||
sites[site] = detail.get("provider")
|
||||
sync_sett = self.sync_system_settings
|
||||
for conf_site, detail in sync_sett.get("sites", {}).items():
|
||||
sites[conf_site] = detail.get("provider")
|
||||
|
||||
return sites.get(site, 'N/A')
|
||||
|
||||
|
|
@ -1319,9 +1392,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return -1, None
|
||||
|
||||
def reset_provider_for_file(self, collection, representation_id,
|
||||
side=None, file_id=None, site_name=None,
|
||||
remove=False, pause=None, force=False):
|
||||
def reset_site_on_representation(self, collection, representation_id,
|
||||
side=None, file_id=None, site_name=None,
|
||||
remove=False, pause=None, force=False):
|
||||
"""
|
||||
Reset information about synchronization for particular 'file_id'
|
||||
and provider.
|
||||
|
|
@ -1407,9 +1480,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
update = {
|
||||
"$set": {"files.$[f].sites.$[s]": elem}
|
||||
}
|
||||
if not isinstance(file_id, ObjectId):
|
||||
file_id = ObjectId(file_id)
|
||||
|
||||
arr_filter = [
|
||||
{'s.name': site_name},
|
||||
{'f._id': ObjectId(file_id)}
|
||||
{'f._id': file_id}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
selected_index.isValid() and \
|
||||
not self._selection_changed:
|
||||
mode = QtCore.QItemSelectionModel.Select | \
|
||||
QtCore.QItemSelectionModel.Rows
|
||||
QtCore.QItemSelectionModel.Rows
|
||||
self.project_list.selectionModel().select(selected_index, mode)
|
||||
|
||||
if self.current_project:
|
||||
|
|
@ -489,7 +489,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget):
|
|||
format(check_progress))
|
||||
continue
|
||||
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.sync_server.reset_site_on_representation(
|
||||
self.model.project,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
|
|
@ -872,7 +872,7 @@ class SyncRepresentationDetailWidget(_SyncRepresentationWidget):
|
|||
format(check_progress))
|
||||
continue
|
||||
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.sync_server.reset_site_on_representation(
|
||||
self.model.project,
|
||||
self.representation_id,
|
||||
site_name=site_name,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import os
|
||||
import platform
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
ITimersManager,
|
||||
ITrayService
|
||||
)
|
||||
from openpype_interfaces import ITrayService
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,14 +8,15 @@ in global space here until are required or used.
|
|||
"""
|
||||
|
||||
import os
|
||||
import click
|
||||
|
||||
from openpype.modules import (
|
||||
JsonFilesSettingsDef,
|
||||
OpenPypeAddOn
|
||||
OpenPypeAddOn,
|
||||
ModulesManager
|
||||
)
|
||||
# Import interface defined by this addon to be able find other addons using it
|
||||
from openpype_interfaces import (
|
||||
IExampleInterface,
|
||||
IPluginPaths,
|
||||
ITrayAction
|
||||
)
|
||||
|
|
@ -75,19 +76,6 @@ class ExampleAddon(OpenPypeAddOn, IPluginPaths, ITrayAction):
|
|||
|
||||
self._create_dialog()
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Method where you should find connected modules.
|
||||
|
||||
It is triggered by OpenPype modules manager at the best possible time.
|
||||
Some addons and modules may required to connect with other modules
|
||||
before their main logic is executed so changes would require to restart
|
||||
whole process.
|
||||
"""
|
||||
self._connected_modules = []
|
||||
for module in enabled_modules:
|
||||
if isinstance(module, IExampleInterface):
|
||||
self._connected_modules.append(module)
|
||||
|
||||
def _create_dialog(self):
|
||||
# Don't recreate dialog if already exists
|
||||
if self._dialog is not None:
|
||||
|
|
@ -106,8 +94,6 @@ class ExampleAddon(OpenPypeAddOn, IPluginPaths, ITrayAction):
|
|||
"""
|
||||
# Make sure dialog is created
|
||||
self._create_dialog()
|
||||
# Change value of dialog by current state
|
||||
self._dialog.set_connected_modules(self.get_connected_modules())
|
||||
# Show dialog
|
||||
self._dialog.open()
|
||||
|
||||
|
|
@ -130,3 +116,32 @@ class ExampleAddon(OpenPypeAddOn, IPluginPaths, ITrayAction):
|
|||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(ExampleAddon.name, help="Example addon dynamic cli commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
def nothing():
|
||||
"""Does nothing but print a message."""
|
||||
print("You've triggered \"nothing\" command.")
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
def show_dialog():
|
||||
"""Show ExampleAddon dialog.
|
||||
|
||||
We don't have access to addon directly through cli so we have to create
|
||||
it again.
|
||||
"""
|
||||
from openpype.tools.utils.lib import qt_app_context
|
||||
|
||||
manager = ModulesManager()
|
||||
example_addon = manager.modules_by_name[ExampleAddon.name]
|
||||
with qt_app_context():
|
||||
example_addon.show_dialog()
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
""" Using interfaces is one way of connecting multiple OpenPype Addons/Modules.
|
||||
|
||||
Interfaces must be in `interfaces.py` file (or folder). Interfaces should not
|
||||
import module logic or other module in global namespace. That is because
|
||||
all of them must be imported before all OpenPype AddOns and Modules.
|
||||
|
||||
Ideally they should just define abstract and helper methods. If interface
|
||||
require any logic or connection it should be defined in module.
|
||||
|
||||
Keep in mind that attributes and methods will be added to other addon
|
||||
attributes and methods so they should be unique and ideally contain
|
||||
addon name in it's name.
|
||||
"""
|
||||
|
||||
from abc import abstractmethod
|
||||
from openpype.modules import OpenPypeInterface
|
||||
|
||||
|
||||
class IExampleInterface(OpenPypeInterface):
|
||||
"""Example interface of addon."""
|
||||
_example_module = None
|
||||
|
||||
def get_example_module(self):
|
||||
return self._example_module
|
||||
|
||||
@abstractmethod
|
||||
def example_method_of_example_interface(self):
|
||||
pass
|
||||
|
|
@ -9,7 +9,8 @@ class MyExampleDialog(QtWidgets.QDialog):
|
|||
|
||||
self.setWindowTitle("Connected modules")
|
||||
|
||||
label_widget = QtWidgets.QLabel(self)
|
||||
msg = "This is example dialog of example addon."
|
||||
label_widget = QtWidgets.QLabel(msg, self)
|
||||
|
||||
ok_btn = QtWidgets.QPushButton("OK", self)
|
||||
btns_layout = QtWidgets.QHBoxLayout()
|
||||
|
|
@ -28,12 +29,3 @@ class MyExampleDialog(QtWidgets.QDialog):
|
|||
|
||||
def _on_ok_clicked(self):
|
||||
self.done(1)
|
||||
|
||||
def set_connected_modules(self, connected_modules):
|
||||
if connected_modules:
|
||||
message = "\n".join(connected_modules)
|
||||
else:
|
||||
message = (
|
||||
"Other enabled modules/addons are not using my interface."
|
||||
)
|
||||
self._label_widget.setText(message)
|
||||
|
|
|
|||
|
|
@ -263,3 +263,31 @@ class ITrayService(ITrayModule):
|
|||
"""Change icon of an QAction to orange circle."""
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_idle())
|
||||
|
||||
|
||||
class ISettingsChangeListener(OpenPypeInterface):
|
||||
"""Module has plugin paths to return.
|
||||
|
||||
Expected result is dictionary with keys "publish", "create", "load" or
|
||||
"actions" and values as list or string.
|
||||
{
|
||||
"publish": ["path/to/publish_plugins"]
|
||||
}
|
||||
"""
|
||||
@abstractmethod
|
||||
def on_system_settings_save(
|
||||
self, old_value, new_value, changes, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def on_project_settings_save(
|
||||
self, old_value, new_value, changes, project_name, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def on_project_anatomy_save(
|
||||
self, old_value, new_value, changes, project_name, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
|
@ -38,6 +38,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.49
|
||||
label = "Collect Anatomy Instance data"
|
||||
|
||||
follow_workfile_version = False
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("Collecting anatomy data for all instances.")
|
||||
|
||||
|
|
@ -213,7 +215,10 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
context_asset_doc = context.data["assetEntity"]
|
||||
|
||||
for instance in context:
|
||||
version_number = instance.data.get("version")
|
||||
if self.follow_workfile_version:
|
||||
version_number = context.data('version')
|
||||
else:
|
||||
version_number = instance.data.get("version")
|
||||
# If version is not specified for instance or context
|
||||
if version_number is None:
|
||||
# TODO we should be able to change default version by studio
|
||||
|
|
|
|||
|
|
@ -649,6 +649,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
AssertionError: if more then one collection is obtained.
|
||||
|
||||
"""
|
||||
start_frame = int(start_frame)
|
||||
end_frame = int(end_frame)
|
||||
collections = clique.assemble(files)[0]
|
||||
assert len(collections) == 1, "Multiple collections found."
|
||||
col = collections[0]
|
||||
|
|
|
|||
|
|
@ -1029,29 +1029,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
local_site = 'studio' # default
|
||||
remote_site = None
|
||||
sync_server_presets = None
|
||||
|
||||
if (instance.context.data["system_settings"]
|
||||
["modules"]
|
||||
["sync_server"]
|
||||
["enabled"]):
|
||||
sync_server_presets = (instance.context.data["project_settings"]
|
||||
["global"]
|
||||
["sync_server"])
|
||||
|
||||
local_site_id = openpype.api.get_local_site_id()
|
||||
if sync_server_presets["enabled"]:
|
||||
local_site = sync_server_presets["config"].\
|
||||
get("active_site", "studio").strip()
|
||||
if local_site == 'local':
|
||||
local_site = local_site_id
|
||||
|
||||
remote_site = sync_server_presets["config"].get("remote_site")
|
||||
if remote_site == local_site:
|
||||
remote_site = None
|
||||
|
||||
if remote_site == 'local':
|
||||
remote_site = local_site_id
|
||||
always_accesible = []
|
||||
sync_project_presets = None
|
||||
|
||||
rec = {
|
||||
"_id": io.ObjectId(),
|
||||
|
|
@ -1066,12 +1045,93 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if sites:
|
||||
rec["sites"] = sites
|
||||
else:
|
||||
system_sync_server_presets = (
|
||||
instance.context.data["system_settings"]
|
||||
["modules"]
|
||||
["sync_server"])
|
||||
log.debug("system_sett:: {}".format(system_sync_server_presets))
|
||||
|
||||
if system_sync_server_presets["enabled"]:
|
||||
sync_project_presets = (
|
||||
instance.context.data["project_settings"]
|
||||
["global"]
|
||||
["sync_server"])
|
||||
|
||||
if sync_project_presets and sync_project_presets["enabled"]:
|
||||
local_site, remote_site = self._get_sites(sync_project_presets)
|
||||
|
||||
always_accesible = sync_project_presets["config"]. \
|
||||
get("always_accessible_on", [])
|
||||
|
||||
already_attached_sites = {}
|
||||
meta = {"name": local_site, "created_dt": datetime.now()}
|
||||
rec["sites"] = [meta]
|
||||
already_attached_sites[meta["name"]] = meta["created_dt"]
|
||||
|
||||
if remote_site:
|
||||
if sync_project_presets and sync_project_presets["enabled"]:
|
||||
# add remote
|
||||
meta = {"name": remote_site.strip()}
|
||||
rec["sites"].append(meta)
|
||||
already_attached_sites[meta["name"]] = None
|
||||
|
||||
# add skeleton for site where it should be always synced to
|
||||
for always_on_site in always_accesible:
|
||||
if always_on_site not in already_attached_sites.keys():
|
||||
meta = {"name": always_on_site.strip()}
|
||||
rec["sites"].append(meta)
|
||||
already_attached_sites[meta["name"]] = None
|
||||
|
||||
# add alternative sites
|
||||
rec = self._add_alternative_sites(system_sync_server_presets,
|
||||
already_attached_sites,
|
||||
rec)
|
||||
|
||||
log.debug("final sites:: {}".format(rec["sites"]))
|
||||
|
||||
return rec
|
||||
|
||||
def _get_sites(self, sync_project_presets):
|
||||
"""Returns tuple (local_site, remote_site)"""
|
||||
local_site_id = openpype.api.get_local_site_id()
|
||||
local_site = sync_project_presets["config"]. \
|
||||
get("active_site", "studio").strip()
|
||||
|
||||
if local_site == 'local':
|
||||
local_site = local_site_id
|
||||
|
||||
remote_site = sync_project_presets["config"].get("remote_site")
|
||||
if remote_site == local_site:
|
||||
remote_site = None
|
||||
|
||||
if remote_site == 'local':
|
||||
remote_site = local_site_id
|
||||
|
||||
return local_site, remote_site
|
||||
|
||||
def _add_alternative_sites(self,
|
||||
system_sync_server_presets,
|
||||
already_attached_sites,
|
||||
rec):
|
||||
"""Loop through all configured sites and add alternatives.
|
||||
|
||||
See SyncServerModule.handle_alternate_site
|
||||
"""
|
||||
conf_sites = system_sync_server_presets.get("sites", {})
|
||||
|
||||
for site_name, site_info in conf_sites.items():
|
||||
alt_sites = set(site_info.get("alternative_sites", []))
|
||||
already_attached_keys = list(already_attached_sites.keys())
|
||||
for added_site in already_attached_keys:
|
||||
if added_site in alt_sites:
|
||||
if site_name in already_attached_keys:
|
||||
continue
|
||||
meta = {"name": site_name}
|
||||
real_created = already_attached_sites[added_site]
|
||||
# alt site inherits state of 'created_dt'
|
||||
if real_created:
|
||||
meta["created_dt"] = real_created
|
||||
rec["sites"].append(meta)
|
||||
already_attached_sites[meta["name"]] = real_created
|
||||
|
||||
return rec
|
||||
|
||||
|
|
|
|||
|
|
@ -21,8 +21,9 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
if latest_version is not None:
|
||||
msg = (
|
||||
"Version `{0}` that you are trying to publish, already exists"
|
||||
" in the database. Version in database: `{1}`. Please version "
|
||||
"up your workfile to a higher version number than: `{1}`."
|
||||
).format(version, latest_version)
|
||||
"Version `{0}` from instance `{1}` that you are trying to"
|
||||
" publish, already exists in the database. Version in"
|
||||
" database: `{2}`. Please version up your workfile to a higher"
|
||||
" version number than: `{2}`."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
assert (int(version) > int(latest_version)), msg
|
||||
|
|
|
|||
|
|
@ -41,6 +41,25 @@ class PypeCommands:
|
|||
user_role = "manager"
|
||||
settings.main(user_role)
|
||||
|
||||
@staticmethod
|
||||
def add_modules(click_func):
|
||||
"""Modules/Addons can add their cli commands dynamically."""
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
manager = ModulesManager()
|
||||
log = PypeLogger.get_logger("AddModulesCLI")
|
||||
for module in manager.modules:
|
||||
try:
|
||||
module.cli(click_func)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to add cli command for module \"{}\"".format(
|
||||
module.name
|
||||
)
|
||||
)
|
||||
return click_func
|
||||
|
||||
@staticmethod
|
||||
def launch_eventservercli(*args):
|
||||
from openpype_modules.ftrack.ftrack_server.event_server_cli import (
|
||||
|
|
@ -60,7 +79,7 @@ class PypeCommands:
|
|||
standalonepublish.main()
|
||||
|
||||
@staticmethod
|
||||
def publish(paths, targets=None):
|
||||
def publish(paths, targets=None, gui=False):
|
||||
"""Start headless publishing.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
|
|
@ -69,20 +88,35 @@ class PypeCommands:
|
|||
paths (list): Paths to jsons.
|
||||
targets (string): What module should be targeted
|
||||
(to choose validator for example)
|
||||
gui (bool): Show publish UI.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
"""
|
||||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype import install, uninstall
|
||||
from openpype.api import Logger
|
||||
from openpype.tools.utils.host_tools import show_publish
|
||||
from openpype.tools.utils.lib import qt_app_context
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
log = Logger.get_logger()
|
||||
|
||||
install()
|
||||
|
||||
manager = ModulesManager()
|
||||
|
||||
publish_paths = manager.collect_plugin_paths()["publish"]
|
||||
|
||||
for path in publish_paths:
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
env = get_app_environments_for_context(
|
||||
os.environ["AVALON_PROJECT"],
|
||||
os.environ["AVALON_ASSET"],
|
||||
|
|
@ -91,32 +125,39 @@ class PypeCommands:
|
|||
)
|
||||
os.environ.update(env)
|
||||
|
||||
log = Logger.get_logger()
|
||||
|
||||
install()
|
||||
|
||||
pyblish.api.register_target("filesequence")
|
||||
pyblish.api.register_host("shell")
|
||||
|
||||
if targets:
|
||||
for target in targets:
|
||||
print(f"setting target: {target}")
|
||||
pyblish.api.register_target(target)
|
||||
else:
|
||||
pyblish.api.register_target("filesequence")
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths)
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
plugins = pyblish.api.discover()
|
||||
print("Using plugins:")
|
||||
for plugin in plugins:
|
||||
print(plugin)
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
uninstall()
|
||||
sys.exit(1)
|
||||
if gui:
|
||||
with qt_app_context():
|
||||
show_publish()
|
||||
else:
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = ("Failed {plugin.__name__}: "
|
||||
"{error} -- {error.traceback}")
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
# uninstall()
|
||||
sys.exit(1)
|
||||
|
||||
log.info("Publish finished.")
|
||||
uninstall()
|
||||
|
||||
@staticmethod
|
||||
def remotepublishfromapp(project, batch_dir, host, user, targets=None):
|
||||
|
|
@ -137,16 +178,13 @@ class PypeCommands:
|
|||
SLEEP = 5 # seconds for another loop check for concurrently runs
|
||||
WAIT_FOR = 300 # seconds to wait for conc. runs
|
||||
|
||||
from openpype import install, uninstall
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import ApplicationManager
|
||||
|
||||
log = Logger.get_logger()
|
||||
|
||||
log.info("remotepublishphotoshop command")
|
||||
|
||||
install()
|
||||
|
||||
from openpype.lib import ApplicationManager
|
||||
application_manager = ApplicationManager()
|
||||
|
||||
found_variant_key = find_variant_key(application_manager, host)
|
||||
|
|
@ -220,10 +258,8 @@ class PypeCommands:
|
|||
while launched_app.poll() is None:
|
||||
time.sleep(0.5)
|
||||
|
||||
uninstall()
|
||||
|
||||
@staticmethod
|
||||
def remotepublish(project, batch_path, host, user, targets=None):
|
||||
def remotepublish(project, batch_path, user, targets=None):
|
||||
"""Start headless publishing.
|
||||
|
||||
Used to publish rendered assets, workfiles etc.
|
||||
|
|
@ -235,10 +271,9 @@ class PypeCommands:
|
|||
per call of remotepublish
|
||||
batch_path (str): Path batch folder. Contains subfolders with
|
||||
resources (workfile, another subfolder 'renders' etc.)
|
||||
targets (string): What module should be targeted
|
||||
(to choose validator for example)
|
||||
host (string)
|
||||
user (string): email address for webpublisher
|
||||
targets (list): Pyblish targets
|
||||
(to choose validator for example)
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
|
|
@ -246,21 +281,22 @@ class PypeCommands:
|
|||
if not batch_path:
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
from openpype import install, uninstall
|
||||
from openpype.api import Logger
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
import avalon.api
|
||||
from openpype.hosts.webpublisher import api as webpublisher
|
||||
|
||||
log = Logger.get_logger()
|
||||
log = PypeLogger.get_logger()
|
||||
|
||||
log.info("remotepublish command")
|
||||
|
||||
install()
|
||||
host_name = "webpublisher"
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_APP"] = host_name
|
||||
|
||||
if host:
|
||||
pyblish.api.register_host(host)
|
||||
pyblish.api.register_host(host_name)
|
||||
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
|
|
@ -268,13 +304,6 @@ class PypeCommands:
|
|||
for target in targets:
|
||||
pyblish.api.register_target(target)
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_APP"] = host
|
||||
|
||||
import avalon.api
|
||||
from openpype.hosts.webpublisher import api as webpublisher
|
||||
|
||||
avalon.api.install(webpublisher)
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
|
@ -286,7 +315,6 @@ class PypeCommands:
|
|||
publish_and_log(dbcon, _id, log)
|
||||
|
||||
log.info("Publish finished.")
|
||||
uninstall()
|
||||
|
||||
@staticmethod
|
||||
def extractenvironments(output_json_path, project, asset, task, app):
|
||||
|
|
@ -352,3 +380,28 @@ class PypeCommands:
|
|||
cmd = "pytest {} {} {}".format(folder, mark_str, pyargs_str)
|
||||
print("Running {}".format(cmd))
|
||||
subprocess.run(cmd)
|
||||
|
||||
def syncserver(self, active_site):
|
||||
"""Start running sync_server in background."""
|
||||
import signal
|
||||
os.environ["OPENPYPE_LOCAL_ID"] = active_site
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
print("You pressed Ctrl+C. Process ended.")
|
||||
sync_server_module.server_exit()
|
||||
sys.exit(0)
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
manager = ModulesManager()
|
||||
sync_server_module = manager.modules_by_name["sync_server"]
|
||||
|
||||
sync_server_module.server_init()
|
||||
sync_server_module.server_start()
|
||||
|
||||
import time
|
||||
while True:
|
||||
time.sleep(1.0)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
{
|
||||
"publish": {
|
||||
"CollectAnatomyInstanceData": {
|
||||
"follow_workfile_version": false
|
||||
},
|
||||
"ValidateEditorialAssetName": {
|
||||
"enabled": true,
|
||||
"optional": false
|
||||
|
|
@ -315,10 +318,11 @@
|
|||
},
|
||||
"project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets[ftrack.Library]\": {\"characters[ftrack]\": {}, \"locations[ftrack]\": {}}, \"shots[ftrack.Sequence]\": {\"scripts\": {}, \"editorial[ftrack.Folder]\": {}}}}",
|
||||
"sync_server": {
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"config": {
|
||||
"retry_cnt": "3",
|
||||
"loop_delay": "60",
|
||||
"always_accessible_on": [],
|
||||
"active_site": "studio",
|
||||
"remote_site": "studio"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -8,16 +8,10 @@
|
|||
"yetiRig": "ma"
|
||||
},
|
||||
"maya-dirmap": {
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"paths": {
|
||||
"source-path": [
|
||||
"foo1",
|
||||
"foo2"
|
||||
],
|
||||
"destination-path": [
|
||||
"bar1",
|
||||
"bar2"
|
||||
]
|
||||
"source-path": [],
|
||||
"destination-path": []
|
||||
}
|
||||
},
|
||||
"scriptsmenu": {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,13 @@
|
|||
"build_workfile": "ctrl+alt+b"
|
||||
}
|
||||
},
|
||||
"nuke-dirmap": {
|
||||
"enabled": false,
|
||||
"paths": {
|
||||
"source-path": [],
|
||||
"destination-path": []
|
||||
}
|
||||
},
|
||||
"create": {
|
||||
"CreateWriteRender": {
|
||||
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}",
|
||||
|
|
@ -130,8 +137,7 @@
|
|||
},
|
||||
"LoadClip": {
|
||||
"enabled": true,
|
||||
"_representations": [
|
||||
],
|
||||
"_representations": [],
|
||||
"node_name_template": "{class_name}_{ext}"
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -30,8 +30,7 @@
|
|||
},
|
||||
"ExtractReview": {
|
||||
"jpg_options": {
|
||||
"tags": [
|
||||
]
|
||||
"tags": []
|
||||
},
|
||||
"mov_options": {
|
||||
"tags": [
|
||||
|
|
|
|||
|
|
@ -167,6 +167,16 @@
|
|||
"ffmpeg": 48
|
||||
}
|
||||
},
|
||||
"royalrender": {
|
||||
"enabled": false,
|
||||
"rr_paths": {
|
||||
"default": {
|
||||
"windows": "",
|
||||
"darwin": "",
|
||||
"linux": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"log_viewer": {
|
||||
"enabled": true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -762,6 +762,17 @@ class SyncServerProviders(DictConditionalEntity):
|
|||
|
||||
enum_children = []
|
||||
for provider_code, configurables in system_settings_schema.items():
|
||||
# any site could be exposed or vendorized by different site
|
||||
# eg studio site content could be mapped on sftp site, single file
|
||||
# accessible via 2 different protocols (sites)
|
||||
configurables.append(
|
||||
{
|
||||
"type": "list",
|
||||
"key": "alternative_sites",
|
||||
"label": "Alternative sites",
|
||||
"object_type": "text"
|
||||
}
|
||||
)
|
||||
label = provider_code_to_label.get(provider_code) or provider_code
|
||||
|
||||
enum_children.append({
|
||||
|
|
|
|||
|
|
@ -46,6 +46,39 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "nuke-dirmap",
|
||||
"label": "Nuke Directory Mapping",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "paths",
|
||||
"children": [
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text",
|
||||
"key": "source-path",
|
||||
"label": "Source Path"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text",
|
||||
"key": "destination-path",
|
||||
"label": "Destination Path"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -67,7 +67,17 @@
|
|||
"type": "list",
|
||||
"key": "color_code",
|
||||
"label": "Color codes for layers",
|
||||
"object_type": "text"
|
||||
"type": "enum",
|
||||
"multiselection": true,
|
||||
"enum_items": [
|
||||
{ "red": "red" },
|
||||
{ "orange": "orange" },
|
||||
{ "yellowColor": "yellow" },
|
||||
{ "grain": "green" },
|
||||
{ "blue": "blue" },
|
||||
{ "violet": "violet" },
|
||||
{ "gray": "gray" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
|
|
|
|||
|
|
@ -26,113 +26,33 @@
|
|||
"key": "loop_delay",
|
||||
"label": "Loop Delay"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "always_accessible_on",
|
||||
"label": "Always accessible on sites",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "active_site",
|
||||
"label": "Active Site"
|
||||
"label": "User Default Active Site"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "remote_site",
|
||||
"label": "Remote Site"
|
||||
"label": "User Default Remote Site"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"type": "sync-server-sites",
|
||||
"collapsible": true,
|
||||
"key": "sites",
|
||||
"label": "Sites",
|
||||
"collapsible_key": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "gdrive",
|
||||
"label": "Google Drive",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "path",
|
||||
"key": "credentials_url",
|
||||
"label": "Credentials url",
|
||||
"multiplatform": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "dropbox",
|
||||
"label": "Dropbox",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "token",
|
||||
"label": "Access Token"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "team_folder_name",
|
||||
"label": "Team Folder Name"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "acting_as_member",
|
||||
"label": "Acting As Member"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "sftp",
|
||||
"label": "SFTP",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sftp_host",
|
||||
"label": "SFTP host"
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "sftp_port",
|
||||
"label": "SFTP port"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sftp_user",
|
||||
"label": "SFTP user"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sftp_pass",
|
||||
"label": "SFTP pass"
|
||||
},
|
||||
{
|
||||
"type": "path",
|
||||
"key": "sftp_key",
|
||||
"label": "SFTP user ssh key",
|
||||
"multiplatform": true
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sftp_key_pass",
|
||||
"label": "SFTP user ssh key password"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "root",
|
||||
"label": "Roots",
|
||||
"collapsable": false,
|
||||
"collapsable_key": false,
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
"collapsible_key": false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,20 @@
|
|||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectAnatomyInstanceData",
|
||||
"label": "Collect Anatomy Instance Data",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "follow_workfile_version",
|
||||
"label": "Follow workfile version"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -180,6 +180,31 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "royalrender",
|
||||
"label": "Royal Render",
|
||||
"require_restart": true,
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"object_type": {
|
||||
"type": "path",
|
||||
"multiplatform": true
|
||||
},
|
||||
"key": "rr_paths",
|
||||
"required_keys": ["default"],
|
||||
"label": "Royal Render Root Paths"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "log_viewer",
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 69 B After Width: | Height: | Size: 1.4 KiB |
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -104,8 +104,8 @@ class TestPerformance():
|
|||
"name": "mb",
|
||||
"parent": {"oid": '{}'.format(id)},
|
||||
"data": {
|
||||
"path": "C:\\projects\\test_performance\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name), # noqa
|
||||
"template": "{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}" # noqa
|
||||
"path": "C:\\projects\\test_performance\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name), # noqa: E501
|
||||
"template": "{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}" # noqa: E501
|
||||
},
|
||||
"type": "representation",
|
||||
"schema": "openpype:representation-2.0"
|
||||
|
|
@ -188,21 +188,21 @@ class TestPerformance():
|
|||
create_files=False):
|
||||
ret = [
|
||||
{
|
||||
"path": "{root[work]}" + "{root[work]}/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_A_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"path": "{root[work]}" + "{root[work]}/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_A_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
|
||||
"_id": '{}'.format(file_id),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
"size": random.randint(0, self.MAX_FILE_SIZE_B)
|
||||
},
|
||||
{
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_B_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_B_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
|
||||
"_id": '{}'.format(file_id2),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
"size": random.randint(0, self.MAX_FILE_SIZE_B)
|
||||
},
|
||||
{
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_C_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_C_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
|
||||
"_id": '{}'.format(file_id3),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
|
|
@ -223,8 +223,8 @@ class TestPerformance():
|
|||
ret = {}
|
||||
ret['{}'.format(file_id)] = {
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderA_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
|
||||
"v{:03d}/test_CylinderA_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
|
|
@ -232,16 +232,16 @@ class TestPerformance():
|
|||
|
||||
ret['{}'.format(file_id2)] = {
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderB_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
|
||||
"v{:03d}/test_CylinderB_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
}
|
||||
ret['{}'.format(file_id3)] = {
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderC_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
|
||||
"v{:03d}/test_CylinderC_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
|
|
|
|||
|
|
@ -190,7 +190,9 @@ class Controller(QtCore.QObject):
|
|||
|
||||
plugins = pyblish.api.discover()
|
||||
|
||||
targets = pyblish.logic.registered_targets() or ["default"]
|
||||
targets = set(pyblish.logic.registered_targets())
|
||||
targets.add("default")
|
||||
targets = list(targets)
|
||||
plugins_by_targets = pyblish.logic.plugins_by_targets(plugins, targets)
|
||||
|
||||
_plugins = []
|
||||
|
|
|
|||
2
openpype/vendor/python/common/capture.py
vendored
2
openpype/vendor/python/common/capture.py
vendored
|
|
@ -116,6 +116,8 @@ def capture(camera=None,
|
|||
if not cmds.objExists(camera):
|
||||
raise RuntimeError("Camera does not exist: {0}".format(camera))
|
||||
|
||||
if width and height :
|
||||
maintain_aspect_ratio = False
|
||||
width = width or cmds.getAttr("defaultResolution.width")
|
||||
height = height or cmds.getAttr("defaultResolution.height")
|
||||
if maintain_aspect_ratio:
|
||||
|
|
|
|||
1172
poetry.lock
generated
1172
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for User Settings."""
|
||||
# import pytest
|
||||
# from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
def test_rr_job():
|
||||
# manager = ModulesManager()
|
||||
# rr_module = manager.modules_by_name["royalrender"]
|
||||
...
|
||||
BIN
website/docs/assets/site_sync_always_on.png
Normal file
BIN
website/docs/assets/site_sync_always_on.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
BIN
website/docs/assets/site_sync_system_sites.png
Normal file
BIN
website/docs/assets/site_sync_system_sites.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
|
|
@ -27,6 +27,38 @@ To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype S
|
|||
|
||||

|
||||
|
||||
### Sites
|
||||
|
||||
By default there are two sites created for each OpenPype installation:
|
||||
- **studio** - default site - usually a centralized mounted disk accessible to all artists. Studio site is used if Site Sync is disabled.
|
||||
- **local** - each workstation or server running OpenPype Tray receives its own with unique site name. Workstation refers to itself as "local"however all other sites will see it under it's unique ID.
|
||||
|
||||
Artists can explore their site ID by opening OpenPype Info tool by clicking on a version number in the tray app.
|
||||
|
||||
Many different sites can be created and configured on the system level, and some or all can be assigned to each project.
|
||||
|
||||
Each OpenPype Tray app works with two sites at one time. (Sites can be the same, and no synching is done in this setup).
|
||||
|
||||
Sites could be configured differently per project basis.
|
||||
|
||||
Each new site needs to be created first in `System Settings`. Most important feature of site is its Provider, select one from already prepared Providers.
|
||||
|
||||
#### Alternative sites
|
||||
|
||||
This attribute is meant for special use cases only.
|
||||
|
||||
One of the use cases is sftp site vendoring (exposing) same data as regular site (studio). Each site is accessible for different audience. 'studio' for artists in a studio via shared disk, 'sftp' for externals via sftp server with mounted 'studio' drive.
|
||||
|
||||
Change of file status on one site actually means same change on 'alternate' site occured too. (eg. artists publish to 'studio', 'sftp' is using
|
||||
same location >> file is accessible on 'sftp' site right away, no need to sync it anyhow.)
|
||||
|
||||
##### Example
|
||||

|
||||
Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in the studio SFTP server is deployed on a machine that has access to `studio` drive.
|
||||
|
||||
Alternative sites work both way:
|
||||
- everything published to `studio` is accessible on a `sftp` site too
|
||||
- everything published to `sftp` (most probably via artist's local disk - artists publishes locally, representation is marked to be synced to `sftp`. Immediately after it is synced, it is marked to be available on `studio` too for artists in the studio to use.)
|
||||
|
||||
## Project Settings
|
||||
|
||||
|
|
@ -45,21 +77,6 @@ Artists can also override which site they use as active and remote if need be.
|
|||

|
||||
|
||||
|
||||
## Sites
|
||||
|
||||
By default there are two sites created for each OpenPype installation:
|
||||
- **studio** - default site - usually a centralized mounted disk accessible to all artists. Studio site is used if Site Sync is disabled.
|
||||
- **local** - each workstation or server running OpenPype Tray receives its own with unique site name. Workstation refers to itself as "local"however all other sites will see it under it's unique ID.
|
||||
|
||||
Artists can explore their site ID by opening OpenPype Info tool by clicking on a version number in the tray app.
|
||||
|
||||
Many different sites can be created and configured on the system level, and some or all can be assigned to each project.
|
||||
|
||||
Each OpenPype Tray app works with two sites at one time. (Sites can be the same, and no synching is done in this setup).
|
||||
|
||||
Sites could be configured differently per project basis.
|
||||
|
||||
|
||||
## Providers
|
||||
|
||||
Each site implements a so called `provider` which handles most common operations (list files, copy files etc.) and provides interface with a particular type of storage. (disk, gdrive, aws, etc.)
|
||||
|
|
@ -140,3 +157,42 @@ Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)!
|
|||
|
||||
If a studio needs to use other services for cloud storage, or want to implement totally different storage providers, they can do so by writing their own provider plugin. We're working on a developer documentation, however, for now we recommend looking at `abstract_provider.py`and `gdrive.py` inside `openpype/modules/sync_server/providers` and using it as a template.
|
||||
|
||||
### Running Site Sync in background
|
||||
|
||||
Site Sync server synchronizes new published files from artist machine into configured remote location by default.
|
||||
|
||||
There might be a use case where you need to synchronize between "non-artist" sites, for example between studio site and cloud. In this case
|
||||
you need to run Site Sync as a background process from a command line (via service etc) 24/7.
|
||||
|
||||
To configure all sites where all published files should be synced eventually you need to configure `project_settings/global/sync_server/config/always_accessible_on` property in Settins (per project) first.
|
||||
|
||||

|
||||
|
||||
This is an example of:
|
||||
- Site Sync is enabled for a project
|
||||
- default active and remote sites are set to `studio` - eg. standard process: everyone is working in a studio, publishing to shared location etc.
|
||||
- (but this also allows any of the artists to work remotely, they would change their active site in their own Local Settings to `local` and configure local root.
|
||||
This would result in everything artist publishes is saved first onto his local folder AND synchronized to `studio` site eventually.)
|
||||
- everything exported must also be eventually uploaded to `sftp` site
|
||||
|
||||
This eventual synchronization between `studio` and `sftp` sites must be physically handled by background process.
|
||||
|
||||
As current implementation relies heavily on Settings and Local Settings, background process for a specific site ('studio' for example) must be configured via Tray first to `syncserver` command to work.
|
||||
|
||||
To do this:
|
||||
|
||||
- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of active (source) site. In most use cases it would be studio (for cases of backups of everything published to studio site to different cloud site etc.)
|
||||
- start `Tray`
|
||||
- check `Local ID` in information dialog after clicking on version number in the Tray
|
||||
- open `Local Settings` in the `Tray`
|
||||
- configure for each project necessary active site and remote site
|
||||
- close `Tray`
|
||||
- run OP from a command line with `syncserver` and `--active_site` arguments
|
||||
|
||||
|
||||
This is an example how to trigger background synching process where active (source) site is `studio`.
|
||||
(It is expected that OP is installed on a machine, `openpype_console` is on PATH. If not, add full path to executable.
|
||||
)
|
||||
```shell
|
||||
openpype_console syncserver --active_site studio
|
||||
```
|
||||
Loading…
Add table
Add a link
Reference in a new issue