Merge branch 'develop' of github.com:pypeclub/OpenPype into feature/OP-1978_Use-new-site-sync-entity-in-settings-and-modify-their-loading

This commit is contained in:
Petr Kalis 2021-11-12 12:08:55 +01:00
commit ccfeb16b7e
18 changed files with 2242 additions and 1869 deletions

View file

@ -166,7 +166,7 @@ def publish(debug, paths, targets):
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
def remotepublishfromapp(debug, project, path, host, targets=None, user=None):
def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
@ -174,18 +174,19 @@ def remotepublishfromapp(debug, project, path, host, targets=None, user=None):
"""
if debug:
os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands.remotepublishfromapp(project, path, host, user,
targets=targets)
PypeCommands.remotepublishfromapp(
project, path, host, user, targets=targets
)
@main.command()
@click.argument("path")
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-h", "--host", help="Host")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
def remotepublish(debug, project, path, host, targets=None, user=None):
def remotepublish(debug, project, path, user=None, targets=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
@ -193,7 +194,7 @@ def remotepublish(debug, project, path, host, targets=None, user=None):
"""
if debug:
os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands.remotepublish(project, path, host, user, targets=targets)
PypeCommands.remotepublish(project, path, user, targets=targets)
@main.command()

View file

@ -13,6 +13,7 @@ from pyblish import api as pyblish
from openpype.lib import any_outdated
import openpype.hosts.maya
from openpype.hosts.maya.lib import copy_workspace_mel
from openpype.lib.path_tools import HostDirmap
from . import menu, lib
log = logging.getLogger("openpype.hosts.maya")
@ -30,7 +31,8 @@ def install():
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
# process path mapping
process_dirmap(project_settings)
dirmap_processor = MayaDirmap("maya", project_settings)
dirmap_processor.process_dirmap()
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
@ -60,110 +62,6 @@ def install():
avalon.data["familiesStateToggled"] = ["imagesequence"]
def process_dirmap(project_settings):
# type: (dict) -> None
"""Go through all paths in Settings and set them using `dirmap`.
If artists has Site Sync enabled, take dirmap mapping directly from
Local Settings when artist is syncing workfile locally.
Args:
project_settings (dict): Settings for current project.
"""
local_mapping = _get_local_sync_dirmap(project_settings)
if not project_settings["maya"].get("maya-dirmap") and not local_mapping:
return
mapping = local_mapping or \
project_settings["maya"]["maya-dirmap"]["paths"] \
or {}
mapping_enabled = project_settings["maya"]["maya-dirmap"]["enabled"] \
or bool(local_mapping)
if not mapping or not mapping_enabled:
return
if mapping.get("source-path") and mapping_enabled is True:
log.info("Processing directory mapping ...")
cmds.dirmap(en=True)
for k, sp in enumerate(mapping["source-path"]):
try:
print("{} -> {}".format(sp, mapping["destination-path"][k]))
cmds.dirmap(m=(sp, mapping["destination-path"][k]))
cmds.dirmap(m=(mapping["destination-path"][k], sp))
except IndexError:
# missing corresponding destination path
log.error(("invalid dirmap mapping, missing corresponding"
" destination directory."))
break
except RuntimeError:
log.error("invalid path {} -> {}, mapping not registered".format(
sp, mapping["destination-path"][k]
))
continue
def _get_local_sync_dirmap(project_settings):
"""
Returns dirmap if synch to local project is enabled.
Only valid mapping is from roots of remote site to local site set in
Local Settings.
Args:
project_settings (dict)
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
"""
import json
mapping = {}
if not project_settings["global"]["sync_server"]["enabled"]:
log.debug("Site Sync not enabled")
return mapping
from openpype.settings.lib import get_site_local_overrides
from openpype.modules import ModulesManager
manager = ModulesManager()
sync_module = manager.modules_by_name["sync_server"]
project_name = os.getenv("AVALON_PROJECT")
sync_settings = sync_module.get_sync_project_setting(
os.getenv("AVALON_PROJECT"), exclude_locals=False, cached=False)
log.debug(json.dumps(sync_settings, indent=4))
active_site = sync_module.get_local_normalized_site(
sync_module.get_active_site(project_name))
remote_site = sync_module.get_local_normalized_site(
sync_module.get_remote_site(project_name))
log.debug("active {} - remote {}".format(active_site, remote_site))
if active_site == "local" \
and project_name in sync_module.get_enabled_projects()\
and active_site != remote_site:
overrides = get_site_local_overrides(os.getenv("AVALON_PROJECT"),
active_site)
for root_name, value in overrides.items():
if os.path.isdir(value):
try:
mapping["destination-path"] = [value]
mapping["source-path"] = [sync_settings["sites"]\
[remote_site]\
["root"]\
[root_name]]
except IndexError:
# missing corresponding destination path
log.debug("overrides".format(overrides))
log.error(
("invalid dirmap mapping, missing corresponding"
" destination directory."))
break
log.debug("local sync mapping:: {}".format(mapping))
return mapping
def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
@ -326,3 +224,12 @@ def before_workfile_save(workfile_path):
workdir = os.path.dirname(workfile_path)
copy_workspace_mel(workdir)
class MayaDirmap(HostDirmap):
def on_enable_dirmap(self):
cmds.dirmap(en=True)
def dirmap_routine(self, source_path, destination_path):
cmds.dirmap(m=(source_path, destination_path))
cmds.dirmap(m=(destination_path, source_path))

View file

@ -24,6 +24,10 @@ from openpype.api import (
ApplicationManager
)
from openpype.tools.utils import host_tools
from openpype.lib.path_tools import HostDirmap
from openpype.settings import get_project_settings
from openpype.modules import ModulesManager
import nuke
from .utils import set_context_favorites
@ -1795,3 +1799,69 @@ def recreate_instance(origin_node, avalon_data=None):
dn.setInput(0, new_node)
return new_node
class NukeDirmap(HostDirmap):
def __init__(self, host_name, project_settings, sync_module, file_name):
"""
Args:
host_name (str): Nuke
project_settings (dict): settings of current project
sync_module (SyncServerModule): to limit reinitialization
file_name (str): full path of referenced file from workfiles
"""
self.host_name = host_name
self.project_settings = project_settings
self.file_name = file_name
self.sync_module = sync_module
self._mapping = None # cache mapping
def on_enable_dirmap(self):
pass
def dirmap_routine(self, source_path, destination_path):
log.debug("{}: {}->{}".format(self.file_name,
source_path, destination_path))
source_path = source_path.lower().replace(os.sep, '/')
destination_path = destination_path.lower().replace(os.sep, '/')
if platform.system().lower() == "windows":
self.file_name = self.file_name.lower().replace(
source_path, destination_path)
else:
self.file_name = self.file_name.replace(
source_path, destination_path)
class DirmapCache:
"""Caching class to get settings and sync_module easily and only once."""
_project_settings = None
_sync_module = None
@classmethod
def project_settings(cls):
if cls._project_settings is None:
cls._project_settings = get_project_settings(
os.getenv("AVALON_PROJECT"))
return cls._project_settings
@classmethod
def sync_module(cls):
if cls._sync_module is None:
cls._sync_module = ModulesManager().modules_by_name["sync_server"]
return cls._sync_module
def dirmap_file_name_filter(file_name):
"""Nuke callback function with single full path argument.
Checks project settings for potential mapping from source to dest.
"""
dirmap_processor = NukeDirmap("nuke",
DirmapCache.project_settings(),
DirmapCache.sync_module(),
file_name)
dirmap_processor.process_dirmap()
if os.path.exists(dirmap_processor.file_name):
return dirmap_processor.file_name
return file_name

View file

@ -6,10 +6,10 @@ from openpype.hosts.nuke.api.lib import (
import nuke
from openpype.api import Logger
from openpype.hosts.nuke.api.lib import dirmap_file_name_filter
log = Logger().get_logger(__name__)
# fix ffmpeg settings on script
nuke.addOnScriptLoad(on_script_load)
@ -20,4 +20,6 @@ nuke.addOnScriptSave(check_inventory_versions)
# # set apply all workfile settings on script load and save
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
nuke.addFilenameFilter(dirmap_file_name_filter)
log.info('Automatic syncing of write file knob to script version')

View file

@ -0,0 +1,84 @@
"""Loads batch context from json and continues in publish process.
Provides:
context -> Loaded batch file.
"""
import os
import pyblish.api
from avalon import io
from openpype.lib.plugin_tools import (
parse_json,
get_batch_asset_task_info
)
from openpype.lib.remote_publish import get_webpublish_conn
class CollectBatchData(pyblish.api.ContextPlugin):
"""Collect batch data from json stored in 'OPENPYPE_PUBLISH_DATA' env dir.
The directory must contain 'manifest.json' file where batch data should be
stored.
"""
# must be really early, context values are only in json file
order = pyblish.api.CollectorOrder - 0.495
label = "Collect batch data"
host = ["webpublisher"]
def process(self, context):
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
assert batch_dir, (
"Missing `OPENPYPE_PUBLISH_DATA`")
assert os.path.exists(batch_dir), \
"Folder {} doesn't exist".format(batch_dir)
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Could not set project `root` which may cause issues."
)
batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
context.data["batchDir"] = batch_dir
context.data["batchData"] = batch_data
asset_name, task_name, task_type = get_batch_asset_task_info(
batch_data["context"]
)
os.environ["AVALON_ASSET"] = asset_name
io.Session["AVALON_ASSET"] = asset_name
os.environ["AVALON_TASK"] = task_name
io.Session["AVALON_TASK"] = task_name
context.data["asset"] = asset_name
context.data["task"] = task_name
context.data["taskType"] = task_type
self._set_ctx_path(batch_data)
def _set_ctx_path(self, batch_data):
dbcon = get_webpublish_conn()
batch_id = batch_data["batch"]
ctx_path = batch_data["context"]["path"]
self.log.info("ctx_path: {}".format(ctx_path))
self.log.info("batch_id: {}".format(batch_id))
if ctx_path and batch_id:
self.log.info("Updating log record")
dbcon.update_one(
{
"batch_id": batch_id,
"status": "in_progress"
},
{
"$set": {
"path": ctx_path
}
}
)

View file

@ -20,9 +20,8 @@ class CollectFPS(pyblish.api.InstancePlugin):
hosts = ["webpublisher"]
def process(self, instance):
fps = instance.context.data["fps"]
instance_fps = instance.data.get("fps")
if instance_fps is None:
instance.data["fps"] = instance.context.data["fps"]
instance.data.update({
"fps": fps
})
self.log.debug(f"instance.data: {pformat(instance.data)}")

View file

@ -1,21 +1,19 @@
"""Loads publishing context from json and continues in publish process.
"""Create instances from batch data and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
CollectBatchData
Provides:
context, instances -> All data from previous publishing process.
"""
import os
import json
import clique
import tempfile
import pyblish.api
from avalon import io
import pyblish.api
from openpype.lib import prepare_template_data
from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info
from openpype.lib.plugin_tools import parse_json
class CollectPublishedFiles(pyblish.api.ContextPlugin):
@ -28,28 +26,28 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.490
label = "Collect rendered frames"
host = ["webpublisher"]
_context = None
targets = ["filespublish"]
# from Settings
task_type_to_family = {}
def _process_batch(self, dir_url):
task_subfolders = [
os.path.join(dir_url, o)
for o in os.listdir(dir_url)
if os.path.isdir(os.path.join(dir_url, o))]
def process(self, context):
batch_dir = context.data["batchDir"]
task_subfolders = []
for folder_name in os.listdir(batch_dir):
full_path = os.path.join(batch_dir, folder_name)
if os.path.isdir(full_path):
task_subfolders.append(full_path)
self.log.info("task_sub:: {}".format(task_subfolders))
asset_name = context.data["asset"]
task_name = context.data["task"]
task_type = context.data["taskType"]
for task_dir in task_subfolders:
task_data = parse_json(os.path.join(task_dir,
"manifest.json"))
self.log.info("task_data:: {}".format(task_data))
ctx = task_data["context"]
asset, task_name, task_type = get_batch_asset_task_info(ctx)
if task_name:
os.environ["AVALON_TASK"] = task_name
is_sequence = len(task_data["files"]) > 1
@ -60,26 +58,20 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
is_sequence,
extension.replace(".", ''))
subset = self._get_subset_name(family, subset_template, task_name,
task_data["variant"])
subset = self._get_subset_name(
family, subset_template, task_name, task_data["variant"]
)
version = self._get_last_version(asset_name, subset) + 1
os.environ["AVALON_ASSET"] = asset
io.Session["AVALON_ASSET"] = asset
instance = self._context.create_instance(subset)
instance.data["asset"] = asset
instance = context.create_instance(subset)
instance.data["asset"] = asset_name
instance.data["subset"] = subset
instance.data["family"] = family
instance.data["families"] = families
instance.data["version"] = \
self._get_last_version(asset, subset) + 1
instance.data["version"] = version
instance.data["stagingDir"] = tempfile.mkdtemp()
instance.data["source"] = "webpublisher"
# to store logging info into DB openpype.webpublishes
instance.data["ctx_path"] = ctx["path"]
instance.data["batch_id"] = task_data["batch"]
# to convert from email provided into Ftrack username
instance.data["user_email"] = task_data["user"]
@ -230,23 +222,3 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
return version[0].get("version") or 0
else:
return 0
def process(self, context):
self._context = context
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
assert batch_dir, (
"Missing `OPENPYPE_PUBLISH_DATA`")
assert os.path.exists(batch_dir), \
"Folder {} doesn't exist".format(batch_dir)
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Could not set project `root` which may cause issues."
)
self._process_batch(batch_dir)

View file

@ -1,38 +0,0 @@
import os
import pyblish.api
from openpype.lib import OpenPypeMongoConnection
class IntegrateContextToLog(pyblish.api.ContextPlugin):
""" Adds context information to log document for displaying in front end"""
label = "Integrate Context to Log"
order = pyblish.api.IntegratorOrder - 0.1
hosts = ["webpublisher"]
def process(self, context):
self.log.info("Integrate Context to Log")
mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
dbcon = mongo_client[database_name]["webpublishes"]
for instance in context:
self.log.info("ctx_path: {}".format(instance.data.get("ctx_path")))
self.log.info("batch_id: {}".format(instance.data.get("batch_id")))
if instance.data.get("ctx_path") and instance.data.get("batch_id"):
self.log.info("Updating log record")
dbcon.update_one(
{
"batch_id": instance.data.get("batch_id"),
"status": "in_progress"
},
{"$set":
{
"path": instance.data.get("ctx_path")
}}
)
return

View file

@ -176,23 +176,48 @@ class TaskNode(Node):
class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
"""Triggers headless publishing of batch."""
async def post(self, request) -> Response:
# for postprocessing in host, currently only PS
host_map = {"photoshop": [".psd", ".psb"]}
# Validate existence of openpype executable
openpype_app = self.resource.executable
if not openpype_app or not os.path.exists(openpype_app):
msg = "Non existent OpenPype executable {}".format(openpype_app)
raise RuntimeError(msg)
# for postprocessing in host, currently only PS
output = {}
log.info("WebpublisherBatchPublishEndpoint called")
content = await request.json()
batch_path = os.path.join(self.resource.upload_dir,
content["batch"])
# Each filter have extensions which are checked on first task item
# - first filter with extensions that are on first task is used
# - filter defines command and can extend arguments dictionary
# This is used only if 'studio_processing' is enabled on batch
studio_processing_filters = [
# Photoshop filter
{
"extensions": [".psd", ".psb"],
"command": "remotepublishfromapp",
"arguments": {
# Command 'remotepublishfromapp' requires --host argument
"host": "photoshop",
# Make sure targets are set to None for cases that default
# would change
# - targets argument is not used in 'remotepublishfromapp'
"targets": None
}
}
]
add_args = {
"host": "webpublisher",
"project": content["project_name"],
"user": content["user"]
}
batch_path = os.path.join(self.resource.upload_dir, content["batch"])
# Default command and arguments
command = "remotepublish"
add_args = {
# All commands need 'project' and 'user'
"project": content["project_name"],
"user": content["user"],
"targets": ["filespublish"]
}
if content.get("studio_processing"):
log.info("Post processing called")
@ -208,32 +233,34 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
raise ValueError(
"Cannot parse batch meta in {} folder".format(task_data))
command = "remotepublishfromapp"
for host, extensions in host_map.items():
for ext in extensions:
for file_name in task_data["files"]:
if ext in file_name:
add_args["host"] = host
break
for process_filter in studio_processing_filters:
filter_extensions = process_filter.get("extensions") or []
for file_name in task_data["files"]:
file_ext = os.path.splitext(file_name)[-1].lower()
if file_ext in filter_extensions:
# Change command
command = process_filter["command"]
# Update arguments
add_args.update(
process_filter.get("arguments") or {}
)
break
if not add_args.get("host"):
raise ValueError(
"Couldn't discern host from {}".format(task_data["files"]))
openpype_app = self.resource.executable
args = [
openpype_app,
command,
batch_path
]
if not openpype_app or not os.path.exists(openpype_app):
msg = "Non existent OpenPype executable {}".format(openpype_app)
raise RuntimeError(msg)
for key, value in add_args.items():
args.append("--{}".format(key))
args.append(value)
# Skip key values where value is None
if value is not None:
args.append("--{}".format(key))
# Extend list into arguments (targets can be a list)
if isinstance(value, (tuple, list)):
args.extend(value)
else:
args.append(value)
log.info("args:: {}".format(args))

View file

@ -2,6 +2,8 @@ import json
import logging
import os
import re
import abc
import six
from .anatomy import Anatomy
@ -196,3 +198,159 @@ def get_project_basic_paths(project_name):
if isinstance(folder_structure, str):
folder_structure = json.loads(folder_structure)
return _list_path_items(folder_structure)
@six.add_metaclass(abc.ABCMeta)
class HostDirmap:
"""
Abstract class for running dirmap on a workfile in a host.
Dirmap is used to translate paths inside of host workfile from one
OS to another. (Eg. arstist created workfile on Win, different artists
opens same file on Linux.)
Expects methods to be implemented inside of host:
on_dirmap_enabled: run host code for enabling dirmap
do_dirmap: run host code to do actual remapping
"""
def __init__(self, host_name, project_settings, sync_module=None):
self.host_name = host_name
self.project_settings = project_settings
self.sync_module = sync_module # to limit reinit of Modules
self._mapping = None # cache mapping
@abc.abstractmethod
def on_enable_dirmap(self):
"""
Run host dependent operation for enabling dirmap if necessary.
"""
@abc.abstractmethod
def dirmap_routine(self, source_path, destination_path):
"""
Run host dependent remapping from source_path to destination_path
"""
def process_dirmap(self):
# type: (dict) -> None
"""Go through all paths in Settings and set them using `dirmap`.
If artists has Site Sync enabled, take dirmap mapping directly from
Local Settings when artist is syncing workfile locally.
Args:
project_settings (dict): Settings for current project.
"""
if not self._mapping:
self._mapping = self.get_mappings(self.project_settings)
if not self._mapping:
return
log.info("Processing directory mapping ...")
self.on_enable_dirmap()
log.info("mapping:: {}".format(self._mapping))
for k, sp in enumerate(self._mapping["source-path"]):
try:
print("{} -> {}".format(sp,
self._mapping["destination-path"][k]))
self.dirmap_routine(sp,
self._mapping["destination-path"][k])
except IndexError:
# missing corresponding destination path
log.error(("invalid dirmap mapping, missing corresponding"
" destination directory."))
break
except RuntimeError:
log.error("invalid path {} -> {}, mapping not registered".format( # noqa: E501
sp, self._mapping["destination-path"][k]
))
continue
def get_mappings(self, project_settings):
"""Get translation from source-path to destination-path.
It checks if Site Sync is enabled and user chose to use local
site, in that case configuration in Local Settings takes precedence
"""
local_mapping = self._get_local_sync_dirmap(project_settings)
dirmap_label = "{}-dirmap".format(self.host_name)
if not self.project_settings[self.host_name].get(dirmap_label) and \
not local_mapping:
return []
mapping = local_mapping or \
self.project_settings[self.host_name][dirmap_label]["paths"] or {}
enbled = self.project_settings[self.host_name][dirmap_label]["enabled"]
mapping_enabled = enbled or bool(local_mapping)
if not mapping or not mapping_enabled or \
not mapping.get("destination-path") or \
not mapping.get("source-path"):
return []
return mapping
def _get_local_sync_dirmap(self, project_settings):
"""
Returns dirmap if synch to local project is enabled.
Only valid mapping is from roots of remote site to local site set
in Local Settings.
Args:
project_settings (dict)
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
"""
import json
mapping = {}
if not project_settings["global"]["sync_server"]["enabled"]:
log.debug("Site Sync not enabled")
return mapping
from openpype.settings.lib import get_site_local_overrides
if not self.sync_module:
from openpype.modules import ModulesManager
manager = ModulesManager()
self.sync_module = manager.modules_by_name["sync_server"]
project_name = os.getenv("AVALON_PROJECT")
active_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_active_site(project_name))
remote_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_remote_site(project_name))
log.debug("active {} - remote {}".format(active_site, remote_site))
if active_site == "local" \
and project_name in self.sync_module.get_enabled_projects()\
and active_site != remote_site:
sync_settings = self.sync_module.get_sync_project_setting(
os.getenv("AVALON_PROJECT"), exclude_locals=False,
cached=False)
active_overrides = get_site_local_overrides(
os.getenv("AVALON_PROJECT"), active_site)
remote_overrides = get_site_local_overrides(
os.getenv("AVALON_PROJECT"), remote_site)
log.debug("local overrides".format(active_overrides))
log.debug("remote overrides".format(remote_overrides))
for root_name, active_site_dir in active_overrides.items():
remote_site_dir = remote_overrides.get(root_name) or\
sync_settings["sites"][remote_site]["root"][root_name]
if os.path.isdir(active_site_dir):
if not mapping.get("destination-path"):
mapping["destination-path"] = []
mapping["destination-path"].append(active_site_dir)
if not mapping.get("source-path"):
mapping["source-path"] = []
mapping["source-path"].append(remote_site_dir)
log.debug("local sync mapping:: {}".format(mapping))
return mapping

View file

@ -137,16 +137,13 @@ class PypeCommands:
SLEEP = 5 # seconds for another loop check for concurrently runs
WAIT_FOR = 300 # seconds to wait for conc. runs
from openpype import install, uninstall
from openpype.api import Logger
from openpype.lib import ApplicationManager
log = Logger.get_logger()
log.info("remotepublishphotoshop command")
install()
from openpype.lib import ApplicationManager
application_manager = ApplicationManager()
found_variant_key = find_variant_key(application_manager, host)
@ -220,10 +217,8 @@ class PypeCommands:
while launched_app.poll() is None:
time.sleep(0.5)
uninstall()
@staticmethod
def remotepublish(project, batch_path, host, user, targets=None):
def remotepublish(project, batch_path, user, targets=None):
"""Start headless publishing.
Used to publish rendered assets, workfiles etc.
@ -235,10 +230,9 @@ class PypeCommands:
per call of remotepublish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
targets (string): What module should be targeted
(to choose validator for example)
host (string)
user (string): email address for webpublisher
targets (list): Pyblish targets
(to choose validator for example)
Raises:
RuntimeError: When there is no path to process.
@ -246,21 +240,22 @@ class PypeCommands:
if not batch_path:
raise RuntimeError("No publish paths specified")
from openpype import install, uninstall
from openpype.api import Logger
# Register target and host
import pyblish.api
import pyblish.util
import avalon.api
from openpype.hosts.webpublisher import api as webpublisher
log = Logger.get_logger()
log = PypeLogger.get_logger()
log.info("remotepublish command")
install()
host_name = "webpublisher"
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
os.environ["AVALON_PROJECT"] = project
os.environ["AVALON_APP"] = host_name
if host:
pyblish.api.register_host(host)
pyblish.api.register_host(host_name)
if targets:
if isinstance(targets, str):
@ -268,13 +263,6 @@ class PypeCommands:
for target in targets:
pyblish.api.register_target(target)
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
os.environ["AVALON_PROJECT"] = project
os.environ["AVALON_APP"] = host
import avalon.api
from openpype.hosts.webpublisher import api as webpublisher
avalon.api.install(webpublisher)
log.info("Running publish ...")
@ -286,7 +274,6 @@ class PypeCommands:
publish_and_log(dbcon, _id, log)
log.info("Publish finished.")
uninstall()
@staticmethod
def extractenvironments(output_json_path, project, asset, task, app):

View file

@ -8,16 +8,10 @@
"yetiRig": "ma"
},
"maya-dirmap": {
"enabled": true,
"enabled": false,
"paths": {
"source-path": [
"foo1",
"foo2"
],
"destination-path": [
"bar1",
"bar2"
]
"source-path": [],
"destination-path": []
}
},
"scriptsmenu": {

View file

@ -8,6 +8,13 @@
"build_workfile": "ctrl+alt+b"
}
},
"nuke-dirmap": {
"enabled": false,
"paths": {
"source-path": [],
"destination-path": []
}
},
"create": {
"CreateWriteRender": {
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}",
@ -130,8 +137,7 @@
},
"LoadClip": {
"enabled": true,
"_representations": [
],
"_representations": [],
"node_name_template": "{class_name}_{ext}"
}
},

View file

@ -46,6 +46,39 @@
}
]
},
{
"type": "dict",
"collapsible": true,
"checkbox_key": "enabled",
"key": "nuke-dirmap",
"label": "Nuke Directory Mapping",
"is_group": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "dict",
"key": "paths",
"children": [
{
"type": "list",
"object_type": "text",
"key": "source-path",
"label": "Source Path"
},
{
"type": "list",
"object_type": "text",
"key": "destination-path",
"label": "Destination Path"
}
]
}
]
},
{
"type": "dict",
"collapsible": true,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 B

After

Width:  |  Height:  |  Size: 1.4 KiB

Before After
Before After

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -190,7 +190,9 @@ class Controller(QtCore.QObject):
plugins = pyblish.api.discover()
targets = pyblish.logic.registered_targets() or ["default"]
targets = set(pyblish.logic.registered_targets())
targets.add("default")
targets = list(targets)
plugins_by_targets = pyblish.logic.plugins_by_targets(plugins, targets)
_plugins = []