mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge remote-tracking branch 'origin/feature/publish_cli_command' into feature/1004-deadline-better-handling-of-pype
This commit is contained in:
commit
c6fd07be85
35 changed files with 2537 additions and 464 deletions
|
|
@ -110,9 +110,8 @@ def eventserver(debug,
|
|||
|
||||
@main.command()
|
||||
@click.argument("paths", nargs=-1)
|
||||
@click.option("-g", "--gui", is_flag=True, help="Run pyblish GUI")
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
def publish(gui, debug, paths):
|
||||
def publish(debug, paths):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
|
|
@ -120,7 +119,7 @@ def publish(gui, debug, paths):
|
|||
"""
|
||||
if debug:
|
||||
os.environ['PYPE_DEBUG'] = '3'
|
||||
PypeCommands().publish(gui, list(paths))
|
||||
PypeCommands.publish(list(paths))
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
|
|||
|
|
@ -43,17 +43,17 @@ SHAPE_ATTRS = {"castsShadows",
|
|||
"opposite"}
|
||||
|
||||
RENDER_ATTRS = {"vray": {
|
||||
"node": "vraySettings",
|
||||
"prefix": "fileNamePrefix",
|
||||
"padding": "fileNamePadding",
|
||||
"ext": "imageFormatStr"
|
||||
},
|
||||
"default": {
|
||||
"node": "defaultRenderGlobals",
|
||||
"prefix": "imageFilePrefix",
|
||||
"padding": "extensionPadding"
|
||||
}
|
||||
}
|
||||
"node": "vraySettings",
|
||||
"prefix": "fileNamePrefix",
|
||||
"padding": "fileNamePadding",
|
||||
"ext": "imageFormatStr"
|
||||
},
|
||||
"default": {
|
||||
"node": "defaultRenderGlobals",
|
||||
"prefix": "imageFilePrefix",
|
||||
"padding": "extensionPadding"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
|
||||
|
|
@ -95,6 +95,8 @@ _alembic_options = {
|
|||
INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
|
||||
FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94}
|
||||
|
||||
RENDERLIKE_INSTANCE_FAMILIES = ["rendering", "vrayscene"]
|
||||
|
||||
|
||||
def _get_mel_global(name):
|
||||
"""Return the value of a mel global variable"""
|
||||
|
|
@ -114,7 +116,9 @@ def matrix_equals(a, b, tolerance=1e-10):
|
|||
bool : True or False
|
||||
|
||||
"""
|
||||
return all(abs(x - y) < tolerance for x, y in zip(a, b))
|
||||
if not all(abs(x - y) < tolerance for x, y in zip(a, b)):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def float_round(num, places=0, direction=ceil):
|
||||
|
|
@ -2466,12 +2470,21 @@ class shelf():
|
|||
cmds.shelfLayout(self.name, p="ShelfLayout")
|
||||
|
||||
|
||||
def _get_render_instance():
|
||||
def _get_render_instances():
|
||||
"""Return all 'render-like' instances.
|
||||
|
||||
This returns list of instance sets that needs to receive informations
|
||||
about render layer changes.
|
||||
|
||||
Returns:
|
||||
list: list of instances
|
||||
|
||||
"""
|
||||
objectset = cmds.ls("*.id", long=True, type="objectSet",
|
||||
recursive=True, objectsOnly=True)
|
||||
|
||||
instances = []
|
||||
for objset in objectset:
|
||||
|
||||
if not cmds.attributeQuery("id", node=objset, exists=True):
|
||||
continue
|
||||
|
||||
|
|
@ -2485,16 +2498,18 @@ def _get_render_instance():
|
|||
if not has_family:
|
||||
continue
|
||||
|
||||
if cmds.getAttr("{}.family".format(objset)) == 'rendering':
|
||||
return objset
|
||||
if cmds.getAttr(
|
||||
"{}.family".format(objset)) in RENDERLIKE_INSTANCE_FAMILIES:
|
||||
instances.append(objset)
|
||||
|
||||
return None
|
||||
return instances
|
||||
|
||||
|
||||
renderItemObserverList = []
|
||||
|
||||
|
||||
class RenderSetupListObserver:
|
||||
"""Observer to catch changes in render setup layers."""
|
||||
|
||||
def listItemAdded(self, item):
|
||||
print("--- adding ...")
|
||||
|
|
@ -2505,56 +2520,95 @@ class RenderSetupListObserver:
|
|||
self._remove_render_layer(item.name())
|
||||
|
||||
def _add_render_layer(self, item):
|
||||
render_set = _get_render_instance()
|
||||
render_sets = _get_render_instances()
|
||||
layer_name = item.name()
|
||||
|
||||
if not render_set:
|
||||
return
|
||||
for render_set in render_sets:
|
||||
members = cmds.sets(render_set, query=True) or []
|
||||
|
||||
members = cmds.sets(render_set, query=True) or []
|
||||
if not "LAYER_{}".format(layer_name) in members:
|
||||
namespace_name = "_{}".format(render_set)
|
||||
if not cmds.namespace(exists=namespace_name):
|
||||
index = 1
|
||||
namespace_name = "_{}".format(render_set)
|
||||
try:
|
||||
cmds.namespace(rm=namespace_name)
|
||||
except RuntimeError:
|
||||
# namespace is not empty, so we leave it untouched
|
||||
pass
|
||||
orignal_namespace_name = namespace_name
|
||||
while(cmds.namespace(exists=namespace_name)):
|
||||
namespace_name = "{}{}".format(
|
||||
orignal_namespace_name, index)
|
||||
index += 1
|
||||
|
||||
namespace = cmds.namespace(add=namespace_name)
|
||||
|
||||
if members:
|
||||
# if set already have namespaced members, use the same
|
||||
# namespace as others.
|
||||
namespace = members[0].rpartition(":")[0]
|
||||
else:
|
||||
namespace = namespace_name
|
||||
|
||||
render_layer_set_name = "{}:{}".format(namespace, layer_name)
|
||||
if render_layer_set_name in members:
|
||||
continue
|
||||
print(" - creating set for {}".format(layer_name))
|
||||
set = cmds.sets(n="LAYER_{}".format(layer_name), empty=True)
|
||||
cmds.sets(set, forceElement=render_set)
|
||||
maya_set = cmds.sets(n=render_layer_set_name, empty=True)
|
||||
cmds.sets(maya_set, forceElement=render_set)
|
||||
rio = RenderSetupItemObserver(item)
|
||||
print("- adding observer for {}".format(item.name()))
|
||||
item.addItemObserver(rio.itemChanged)
|
||||
renderItemObserverList.append(rio)
|
||||
|
||||
def _remove_render_layer(self, layer_name):
|
||||
render_set = _get_render_instance()
|
||||
render_sets = _get_render_instances()
|
||||
|
||||
if not render_set:
|
||||
return
|
||||
for render_set in render_sets:
|
||||
members = cmds.sets(render_set, query=True)
|
||||
if not members:
|
||||
continue
|
||||
|
||||
members = cmds.sets(render_set, query=True)
|
||||
if "LAYER_{}".format(layer_name) in members:
|
||||
print(" - removing set for {}".format(layer_name))
|
||||
cmds.delete("LAYER_{}".format(layer_name))
|
||||
# all sets under set should have the same namespace
|
||||
namespace = members[0].rpartition(":")[0]
|
||||
render_layer_set_name = "{}:{}".format(namespace, layer_name)
|
||||
|
||||
if render_layer_set_name in members:
|
||||
print(" - removing set for {}".format(layer_name))
|
||||
cmds.delete(render_layer_set_name)
|
||||
|
||||
|
||||
class RenderSetupItemObserver():
|
||||
"""Handle changes in render setup items."""
|
||||
|
||||
def __init__(self, item):
|
||||
self.item = item
|
||||
self.original_name = item.name()
|
||||
|
||||
def itemChanged(self, *args, **kwargs):
|
||||
"""Item changed callback."""
|
||||
if self.item.name() == self.original_name:
|
||||
return
|
||||
|
||||
render_set = _get_render_instance()
|
||||
render_sets = _get_render_instances()
|
||||
|
||||
if not render_set:
|
||||
return
|
||||
for render_set in render_sets:
|
||||
members = cmds.sets(render_set, query=True)
|
||||
if not members:
|
||||
continue
|
||||
|
||||
members = cmds.sets(render_set, query=True)
|
||||
if "LAYER_{}".format(self.original_name) in members:
|
||||
print(" <> renaming {} to {}".format(self.original_name,
|
||||
self.item.name()))
|
||||
cmds.rename("LAYER_{}".format(self.original_name),
|
||||
"LAYER_{}".format(self.item.name()))
|
||||
self.original_name = self.item.name()
|
||||
# all sets under set should have the same namespace
|
||||
namespace = members[0].rpartition(":")[0]
|
||||
render_layer_set_name = "{}:{}".format(
|
||||
namespace, self.original_name)
|
||||
|
||||
if render_layer_set_name in members:
|
||||
print(" <> renaming {} to {}".format(self.original_name,
|
||||
self.item.name()))
|
||||
cmds.rename(render_layer_set_name,
|
||||
"{}:{}".format(
|
||||
namespace, self.item.name()))
|
||||
self.original_name = self.item.name()
|
||||
|
||||
|
||||
renderListObserver = RenderSetupListObserver()
|
||||
|
|
@ -2564,14 +2618,19 @@ def add_render_layer_change_observer():
|
|||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
rs = renderSetup.instance()
|
||||
render_set = _get_render_instance()
|
||||
if not render_set:
|
||||
return
|
||||
render_sets = _get_render_instances()
|
||||
|
||||
members = cmds.sets(render_set, query=True)
|
||||
layers = rs.getRenderLayers()
|
||||
for layer in layers:
|
||||
if "LAYER_{}".format(layer.name()) in members:
|
||||
for render_set in render_sets:
|
||||
members = cmds.sets(render_set, query=True)
|
||||
if not members:
|
||||
continue
|
||||
# all sets under set should have the same namespace
|
||||
namespace = members[0].rpartition(":")[0]
|
||||
for layer in layers:
|
||||
render_layer_set_name = "{}:{}".format(namespace, layer.name())
|
||||
if render_layer_set_name not in members:
|
||||
continue
|
||||
rio = RenderSetupItemObserver(layer)
|
||||
print("- adding observer for {}".format(layer.name()))
|
||||
layer.addItemObserver(rio.itemChanged)
|
||||
|
|
|
|||
128
pype/hosts/maya/api/render_setup_tools.py
Normal file
128
pype/hosts/maya/api/render_setup_tools.py
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Export stuff in render setup layer context.
|
||||
|
||||
Export Maya nodes from Render Setup layer as if flattened in that layer instead
|
||||
of exporting the defaultRenderLayer as Maya forces by default
|
||||
|
||||
Credits: Roy Nieterau (BigRoy) / Colorbleed
|
||||
Modified for use in Pype
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import contextlib
|
||||
|
||||
from maya import cmds
|
||||
from maya.app.renderSetup.model import renderSetup
|
||||
|
||||
# from colorbleed.maya import lib
|
||||
from .lib import pairwise
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _allow_export_from_render_setup_layer():
|
||||
"""Context manager to override Maya settings to allow RS layer export"""
|
||||
try:
|
||||
|
||||
rs = renderSetup.instance()
|
||||
|
||||
# Exclude Render Setup nodes from the export
|
||||
rs._setAllRSNodesDoNotWrite(True)
|
||||
|
||||
# Disable Render Setup forcing the switch to master layer
|
||||
os.environ["MAYA_BATCH_RENDER_EXPORT"] = "1"
|
||||
|
||||
yield
|
||||
|
||||
finally:
|
||||
# Reset original state
|
||||
rs._setAllRSNodesDoNotWrite(False)
|
||||
os.environ.pop("MAYA_BATCH_RENDER_EXPORT", None)
|
||||
|
||||
|
||||
def export_in_rs_layer(path, nodes, export=None):
|
||||
"""Export nodes from Render Setup layer.
|
||||
|
||||
When exporting from Render Setup layer Maya by default
|
||||
forces a switch to the defaultRenderLayer as such making
|
||||
it impossible to export the contents of a Render Setup
|
||||
layer. Maya presents this warning message:
|
||||
# Warning: Exporting Render Setup master layer content #
|
||||
|
||||
This function however avoids the renderlayer switch and
|
||||
exports from the Render Setup layer as if the edits were
|
||||
'flattened' in the master layer.
|
||||
|
||||
It does so by:
|
||||
- Allowing export from Render Setup Layer
|
||||
- Enforce Render Setup nodes to NOT be written on export
|
||||
- Disconnect connections from any `applyOverride` nodes
|
||||
to flatten the values (so they are written correctly)*
|
||||
*Connection overrides like Shader Override and Material
|
||||
Overrides export correctly out of the box since they don't
|
||||
create an intermediate connection to an 'applyOverride' node.
|
||||
However, any scalar override (absolute or relative override)
|
||||
will get input connections in the layer so we'll break those
|
||||
to 'store' the values on the attribute itself and write value
|
||||
out instead.
|
||||
|
||||
Args:
|
||||
path (str): File path to export to.
|
||||
nodes (list): Maya nodes to export.
|
||||
export (callable, optional): Callback to be used for exporting. If
|
||||
not specified, default export to `.ma` will be called.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Raises:
|
||||
AssertionError: When not in a Render Setup layer an
|
||||
AssertionError is raised. This command assumes
|
||||
you are currently in a Render Setup layer.
|
||||
|
||||
"""
|
||||
rs = renderSetup.instance()
|
||||
assert rs.getVisibleRenderLayer().name() != "defaultRenderLayer", \
|
||||
("Export in Render Setup layer is only supported when in "
|
||||
"Render Setup layer")
|
||||
|
||||
# Break connection to any value overrides
|
||||
history = cmds.listHistory(nodes) or []
|
||||
nodes_all = list(
|
||||
set(cmds.ls(nodes + history, long=True, objectsOnly=True)))
|
||||
overrides = cmds.listConnections(nodes_all,
|
||||
source=True,
|
||||
destination=False,
|
||||
type="applyOverride",
|
||||
plugs=True,
|
||||
connections=True) or []
|
||||
for dest, src in pairwise(overrides):
|
||||
# Even after disconnecting the values
|
||||
# should be preserved as they were
|
||||
# Note: animated overrides would be lost for export
|
||||
cmds.disconnectAttr(src, dest)
|
||||
|
||||
# Export Selected
|
||||
with _allow_export_from_render_setup_layer():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
if export:
|
||||
export()
|
||||
else:
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
|
||||
if overrides:
|
||||
# If we have broken override connections then Maya
|
||||
# is unaware that the Render Setup layer is in an
|
||||
# invalid state. So let's 'hard reset' the state
|
||||
# by going to default render layer and switching back
|
||||
layer = rs.getVisibleRenderLayer()
|
||||
rs.switchToLayer(None)
|
||||
rs.switchToLayer(layer)
|
||||
|
|
@ -10,6 +10,7 @@ import maya.app.renderSetup.model.renderSetup as renderSetup
|
|||
|
||||
from pype.hosts.maya.api import lib
|
||||
from pype.api import get_system_settings
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
||||
|
|
@ -86,12 +87,28 @@ class CreateRender(avalon.maya.Creator):
|
|||
"""Entry point."""
|
||||
exists = cmds.ls(self.name)
|
||||
if exists:
|
||||
return cmds.warning("%s already exists." % exists[0])
|
||||
cmds.warning("%s already exists." % exists[0])
|
||||
return
|
||||
|
||||
use_selection = self.options.get("useSelection")
|
||||
with lib.undo_chunk():
|
||||
self._create_render_settings()
|
||||
instance = super(CreateRender, self).process()
|
||||
# create namespace with instance
|
||||
index = 1
|
||||
namespace_name = "_{}".format(str(instance))
|
||||
try:
|
||||
cmds.namespace(rm=namespace_name)
|
||||
except RuntimeError:
|
||||
# namespace is not empty, so we leave it untouched
|
||||
pass
|
||||
|
||||
while(cmds.namespace(exists=namespace_name)):
|
||||
namespace_name = "_{}{}".format(str(instance), index)
|
||||
index += 1
|
||||
|
||||
namespace = cmds.namespace(add=namespace_name)
|
||||
|
||||
cmds.setAttr("{}.machineList".format(instance), lock=True)
|
||||
self._rs = renderSetup.instance()
|
||||
layers = self._rs.getRenderLayers()
|
||||
|
|
@ -99,17 +116,19 @@ class CreateRender(avalon.maya.Creator):
|
|||
print(">>> processing existing layers")
|
||||
sets = []
|
||||
for layer in layers:
|
||||
print(" - creating set for {}".format(layer.name()))
|
||||
render_set = cmds.sets(n="LAYER_{}".format(layer.name()))
|
||||
print(" - creating set for {}:{}".format(
|
||||
namespace, layer.name()))
|
||||
render_set = cmds.sets(
|
||||
n="{}:{}".format(namespace, layer.name()))
|
||||
sets.append(render_set)
|
||||
cmds.sets(sets, forceElement=instance)
|
||||
|
||||
# if no render layers are present, create default one with
|
||||
# asterix selector
|
||||
if not layers:
|
||||
rl = self._rs.createRenderLayer('Main')
|
||||
cl = rl.createCollection("defaultCollection")
|
||||
cl.getSelector().setPattern('*')
|
||||
render_layer = self._rs.createRenderLayer('Main')
|
||||
collection = render_layer.createCollection("defaultCollection")
|
||||
collection.getSelector().setPattern('*')
|
||||
|
||||
renderer = cmds.getAttr(
|
||||
'defaultRenderGlobals.currentRenderer').lower()
|
||||
|
|
@ -186,7 +205,6 @@ class CreateRender(avalon.maya.Creator):
|
|||
self.data["whitelist"] = False
|
||||
self.data["machineList"] = ""
|
||||
self.data["useMayaBatch"] = False
|
||||
self.data["vrayScene"] = False
|
||||
self.data["tileRendering"] = False
|
||||
self.data["tilesX"] = 2
|
||||
self.data["tilesY"] = 2
|
||||
|
|
|
|||
|
|
@ -1,27 +1,236 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Create instance of vrayscene."""
|
||||
import os
|
||||
import json
|
||||
import appdirs
|
||||
import requests
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
from pype.hosts.maya.api import lib
|
||||
from pype.api import get_system_settings
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
||||
class CreateVRayScene(avalon.maya.Creator):
|
||||
"""Create Vray Scene."""
|
||||
|
||||
label = "VRay Scene"
|
||||
family = "vrayscene"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Entry."""
|
||||
super(CreateVRayScene, self).__init__(*args, **kwargs)
|
||||
self._rs = renderSetup.instance()
|
||||
self.data["exportOnFarm"] = False
|
||||
|
||||
# We don't need subset or asset attributes
|
||||
self.data.pop("subset", None)
|
||||
self.data.pop("asset", None)
|
||||
self.data.pop("active", None)
|
||||
def process(self):
|
||||
"""Entry point."""
|
||||
exists = cmds.ls(self.name)
|
||||
if exists:
|
||||
return cmds.warning("%s already exists." % exists[0])
|
||||
|
||||
self.data.update({
|
||||
"id": "avalon.vrayscene", # We won't be publishing this one
|
||||
"suspendRenderJob": False,
|
||||
"suspendPublishJob": False,
|
||||
"extendFrames": False,
|
||||
"pools": "",
|
||||
"framesPerTask": 1
|
||||
})
|
||||
use_selection = self.options.get("useSelection")
|
||||
with lib.undo_chunk():
|
||||
self._create_vray_instance_settings()
|
||||
instance = super(CreateVRayScene, self).process()
|
||||
|
||||
index = 1
|
||||
namespace_name = "_{}".format(str(instance))
|
||||
try:
|
||||
cmds.namespace(rm=namespace_name)
|
||||
except RuntimeError:
|
||||
# namespace is not empty, so we leave it untouched
|
||||
pass
|
||||
|
||||
while(cmds.namespace(exists=namespace_name)):
|
||||
namespace_name = "_{}{}".format(str(instance), index)
|
||||
index += 1
|
||||
|
||||
namespace = cmds.namespace(add=namespace_name)
|
||||
# create namespace with instance
|
||||
layers = self._rs.getRenderLayers()
|
||||
if use_selection:
|
||||
print(">>> processing existing layers")
|
||||
sets = []
|
||||
for layer in layers:
|
||||
print(" - creating set for {}".format(layer.name()))
|
||||
render_set = cmds.sets(
|
||||
n="{}:{}".format(namespace, layer.name()))
|
||||
sets.append(render_set)
|
||||
cmds.sets(sets, forceElement=instance)
|
||||
|
||||
# if no render layers are present, create default one with
|
||||
# asterix selector
|
||||
if not layers:
|
||||
render_layer = self._rs.createRenderLayer('Main')
|
||||
collection = render_layer.createCollection("defaultCollection")
|
||||
collection.getSelector().setPattern('*')
|
||||
|
||||
def _create_vray_instance_settings(self):
|
||||
# get pools
|
||||
pools = []
|
||||
|
||||
system_settings = get_system_settings()["modules"]
|
||||
|
||||
deadline_enabled = system_settings["deadline"]["enabled"]
|
||||
muster_enabled = system_settings["muster"]["enabled"]
|
||||
deadline_url = system_settings["deadline"]["DEADLINE_REST_URL"]
|
||||
muster_url = system_settings["muster"]["MUSTER_REST_URL"]
|
||||
|
||||
if deadline_enabled and muster_enabled:
|
||||
self.log.error(
|
||||
"Both Deadline and Muster are enabled. " "Cannot support both."
|
||||
)
|
||||
raise RuntimeError("Both Deadline and Muster are enabled")
|
||||
|
||||
if deadline_enabled:
|
||||
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
|
||||
try:
|
||||
response = self._requests_get(argument)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
msg = 'Cannot connect to deadline web service'
|
||||
self.log.error(msg)
|
||||
raise RuntimeError('{} - {}'.format(msg, e))
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
else:
|
||||
pools = response.json()
|
||||
self.data["primaryPool"] = pools
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pools
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
self._load_credentials()
|
||||
self.log.info(">>> Getting pools ...")
|
||||
try:
|
||||
pools = self._get_muster_pools()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.startswith("401"):
|
||||
self.log.warning("access token expired")
|
||||
self._show_login()
|
||||
raise RuntimeError("Access token expired")
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Cannot connect to Muster API endpoint.")
|
||||
raise RuntimeError("Cannot connect to {}".format(muster_url))
|
||||
pool_names = []
|
||||
for pool in pools:
|
||||
self.log.info(" - pool: {}".format(pool["name"]))
|
||||
pool_names.append(pool["name"])
|
||||
|
||||
self.data["primaryPool"] = pool_names
|
||||
|
||||
self.data["suspendPublishJob"] = False
|
||||
self.data["priority"] = 50
|
||||
self.data["whitelist"] = False
|
||||
self.data["machineList"] = ""
|
||||
self.data["vraySceneMultipleFiles"] = False
|
||||
self.options = {"useSelection": False} # Force no content
|
||||
|
||||
def _load_credentials(self):
|
||||
"""Load Muster credentials.
|
||||
|
||||
Load Muster credentials from file and set ``MUSTER_USER``,
|
||||
``MUSTER_PASSWORD``, ``MUSTER_REST_URL`` is loaded from presets.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If loaded credentials are invalid.
|
||||
AttributeError: If ``MUSTER_REST_URL`` is not set.
|
||||
|
||||
"""
|
||||
app_dir = os.path.normpath(appdirs.user_data_dir("pype-app", "pype"))
|
||||
file_name = "muster_cred.json"
|
||||
fpath = os.path.join(app_dir, file_name)
|
||||
file = open(fpath, "r")
|
||||
muster_json = json.load(file)
|
||||
self._token = muster_json.get("token", None)
|
||||
if not self._token:
|
||||
self._show_login()
|
||||
raise RuntimeError("Invalid access token for Muster")
|
||||
file.close()
|
||||
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
|
||||
if not self.MUSTER_REST_URL:
|
||||
raise AttributeError("Muster REST API url not set")
|
||||
|
||||
def _get_muster_pools(self):
|
||||
"""Get render pools from Muster.
|
||||
|
||||
Raises:
|
||||
Exception: If pool list cannot be obtained from Muster.
|
||||
|
||||
"""
|
||||
params = {"authToken": self._token}
|
||||
api_entry = "/api/pools/list"
|
||||
response = self._requests_get(self.MUSTER_REST_URL + api_entry,
|
||||
params=params)
|
||||
if response.status_code != 200:
|
||||
if response.status_code == 401:
|
||||
self.log.warning("Authentication token expired.")
|
||||
self._show_login()
|
||||
else:
|
||||
self.log.error(
|
||||
("Cannot get pools from "
|
||||
"Muster: {}").format(response.status_code)
|
||||
)
|
||||
raise Exception("Cannot get pools from Muster")
|
||||
try:
|
||||
pools = response.json()["ResponseData"]["pools"]
|
||||
except ValueError as e:
|
||||
self.log.error("Invalid response from Muster server {}".format(e))
|
||||
raise Exception("Invalid response from Muster server")
|
||||
|
||||
return pools
|
||||
|
||||
def _show_login(self):
|
||||
# authentication token expired so we need to login to Muster
|
||||
# again to get it. We use Pype API call to show login window.
|
||||
api_url = "{}/muster/show_login".format(
|
||||
os.environ["PYPE_REST_API_URL"])
|
||||
self.log.debug(api_url)
|
||||
login_response = self._requests_post(api_url, timeout=1)
|
||||
if login_response.status_code != 200:
|
||||
self.log.error("Cannot show login form to Muster")
|
||||
raise Exception("Cannot show login form to Muster")
|
||||
|
||||
def _requests_post(self, *args, **kwargs):
|
||||
"""Wrap request post method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline or Muster server are
|
||||
running with self-signed certificates and their certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing and it is not recommended.
|
||||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = (
|
||||
False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True
|
||||
) # noqa
|
||||
return requests.post(*args, **kwargs)
|
||||
|
||||
def _requests_get(self, *args, **kwargs):
|
||||
"""Wrap request get method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline or Muster server are
|
||||
running with self-signed certificates and their certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing and it is not recommended.
|
||||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = (
|
||||
False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True
|
||||
) # noqa
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
|
|||
145
pype/hosts/maya/plugins/load/load_vrayscene.py
Normal file
145
pype/hosts/maya/plugins/load/load_vrayscene.py
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
from avalon.maya import lib
|
||||
from avalon import api
|
||||
from pype.api import config
|
||||
import os
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
||||
class VRaySceneLoader(api.Loader):
|
||||
"""Load Vray scene"""
|
||||
|
||||
families = ["vrayscene_layer"]
|
||||
representations = ["vrscene"]
|
||||
|
||||
label = "Import VRay Scene"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from avalon.maya.pipeline import containerise
|
||||
from pype.hosts.maya.lib import namespaced
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "vrayscene_layer"
|
||||
|
||||
asset_name = context['asset']["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
prefix="_" if asset_name[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Ensure V-Ray for Maya is loaded.
|
||||
cmds.loadPlugin("vrayformaya", quiet=True)
|
||||
|
||||
with lib.maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
nodes, group_node = self.create_vray_scene(name,
|
||||
filename=self.fname)
|
||||
|
||||
self[:] = nodes
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
# colour the group node
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
|
||||
cmds.setAttr("{0}.outlinerColor".format(group_node),
|
||||
c[0], c[1], c[2])
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
members = cmds.sets(node, query=True) or []
|
||||
vraymeshes = cmds.ls(members, type="VRayScene")
|
||||
assert vraymeshes, "Cannot find VRayScene in container"
|
||||
|
||||
filename = api.get_representation_path(representation)
|
||||
|
||||
for vray_mesh in vraymeshes:
|
||||
cmds.setAttr("{}.FilePath".format(vray_mesh),
|
||||
filename,
|
||||
type="string")
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Remove the namespace, if empty
|
||||
namespace = container['namespace']
|
||||
if cmds.namespace(exists=namespace):
|
||||
members = cmds.namespaceInfo(namespace, listNamespace=True)
|
||||
if not members:
|
||||
cmds.namespace(removeNamespace=namespace)
|
||||
else:
|
||||
self.log.warning("Namespace not deleted because it "
|
||||
"still has members: %s", namespace)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def create_vray_scene(self, name, filename):
|
||||
"""Re-create the structure created by VRay to support vrscenes
|
||||
|
||||
Args:
|
||||
name(str): name of the asset
|
||||
|
||||
Returns:
|
||||
nodes(list)
|
||||
"""
|
||||
|
||||
# Create nodes
|
||||
mesh_node_name = "VRayScene_{}".format(name)
|
||||
|
||||
trans = cmds.createNode(
|
||||
"transform", name="{}".format(mesh_node_name))
|
||||
mesh = cmds.createNode(
|
||||
"mesh", name="{}_Shape".format(mesh_node_name), parent=trans)
|
||||
vray_scene = cmds.createNode(
|
||||
"VRayScene", name="{}_VRSCN".format(mesh_node_name), parent=trans)
|
||||
|
||||
cmds.connectAttr(
|
||||
"{}.outMesh".format(vray_scene), "{}.inMesh".format(mesh))
|
||||
|
||||
cmds.setAttr("{}.FilePath".format(vray_scene), filename, type="string")
|
||||
|
||||
# Create important connections
|
||||
cmds.connectAttr("time1.outTime",
|
||||
"{0}.inputTime".format(trans))
|
||||
|
||||
# Connect mesh to initialShadingGroup
|
||||
cmds.sets([mesh], forceElement="initialShadingGroup")
|
||||
|
||||
group_node = cmds.group(empty=True, name="{}_GRP".format(name))
|
||||
cmds.parent(trans, group_node)
|
||||
nodes = [trans, vray_scene, mesh, group_node]
|
||||
|
||||
# Fix: Force refresh so the mesh shows correctly after creation
|
||||
cmds.refresh()
|
||||
|
||||
return nodes, group_node
|
||||
|
|
@ -95,9 +95,17 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
self.maya_layers = maya_render_layers
|
||||
|
||||
for layer in collected_render_layers:
|
||||
# every layer in set should start with `LAYER_` prefix
|
||||
try:
|
||||
expected_layer_name = re.search(r"^LAYER_(.*)", layer).group(1)
|
||||
if layer.startswith("LAYER_"):
|
||||
# this is support for legacy mode where render layers
|
||||
# started with `LAYER_` prefix.
|
||||
expected_layer_name = re.search(
|
||||
r"^LAYER_(.*)", layer).group(1)
|
||||
else:
|
||||
# new way is to prefix render layer name with instance
|
||||
# namespace.
|
||||
expected_layer_name = re.search(
|
||||
r"^.+:(.*)", layer).group(1)
|
||||
except IndexError:
|
||||
msg = "Invalid layer name in set [ {} ]".format(layer)
|
||||
self.log.warnig(msg)
|
||||
|
|
@ -277,10 +285,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
|
||||
# handle standalone renderers
|
||||
if render_instance.data.get("vrayScene") is True:
|
||||
data["families"].append("vrayscene")
|
||||
data["families"].append("vrayscene_render")
|
||||
|
||||
if render_instance.data.get("assScene") is True:
|
||||
data["families"].append("assscene")
|
||||
data["families"].append("assscene_render")
|
||||
|
||||
# Include (optional) global settings
|
||||
# Get global overrides and translate to Deadline values
|
||||
|
|
|
|||
|
|
@ -12,11 +12,15 @@ class CollectRenderableCamera(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.02
|
||||
label = "Collect Renderable Camera(s)"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayscene",
|
||||
families = ["vrayscene_layer",
|
||||
"renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
layer = instance.data["setMembers"]
|
||||
if "vrayscene_layer" in instance.data.get("families", []):
|
||||
layer = instance.data.get("layer")
|
||||
else:
|
||||
layer = instance.data["setMembers"]
|
||||
|
||||
self.log.info("layer: {}".format(layer))
|
||||
cameras = cmds.ls(type="camera", long=True)
|
||||
renderable = [c for c in cameras if
|
||||
|
|
|
|||
155
pype/hosts/maya/plugins/publish/collect_vrayscene.py
Normal file
155
pype/hosts/maya/plugins/publish/collect_vrayscene.py
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Vray Scene and prepare it for extraction and publishing."""
|
||||
import re
|
||||
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
from pype.hosts.maya import lib
|
||||
|
||||
|
||||
class CollectVrayScene(pyblish.api.InstancePlugin):
|
||||
"""Collect Vray Scene.
|
||||
|
||||
If export on farm is checked, job is created to export it.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect Vray Scene"
|
||||
families = ["vrayscene"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collector entry point."""
|
||||
collected_render_layers = instance.data["setMembers"]
|
||||
instance.data["remove"] = True
|
||||
context = instance.context
|
||||
|
||||
_rs = renderSetup.instance()
|
||||
# current_layer = _rs.getVisibleRenderLayer()
|
||||
|
||||
# collect all frames we are expecting to be rendered
|
||||
renderer = cmds.getAttr(
|
||||
"defaultRenderGlobals.currentRenderer"
|
||||
).lower()
|
||||
|
||||
if renderer != "vray":
|
||||
raise AssertionError("Vray is not enabled.")
|
||||
|
||||
maya_render_layers = {
|
||||
layer.name(): layer for layer in _rs.getRenderLayers()
|
||||
}
|
||||
|
||||
layer_list = []
|
||||
for layer in collected_render_layers:
|
||||
# every layer in set should start with `LAYER_` prefix
|
||||
try:
|
||||
expected_layer_name = re.search(r"^.+:(.*)", layer).group(1)
|
||||
except IndexError:
|
||||
msg = "Invalid layer name in set [ {} ]".format(layer)
|
||||
self.log.warnig(msg)
|
||||
continue
|
||||
|
||||
self.log.info("processing %s" % layer)
|
||||
# check if layer is part of renderSetup
|
||||
if expected_layer_name not in maya_render_layers:
|
||||
msg = "Render layer [ {} ] is not in " "Render Setup".format(
|
||||
expected_layer_name
|
||||
)
|
||||
self.log.warning(msg)
|
||||
continue
|
||||
|
||||
# check if layer is renderable
|
||||
if not maya_render_layers[expected_layer_name].isRenderable():
|
||||
msg = "Render layer [ {} ] is not " "renderable".format(
|
||||
expected_layer_name
|
||||
)
|
||||
self.log.warning(msg)
|
||||
continue
|
||||
|
||||
layer_name = "rs_{}".format(expected_layer_name)
|
||||
|
||||
self.log.debug(expected_layer_name)
|
||||
layer_list.append(expected_layer_name)
|
||||
|
||||
frame_start_render = int(self.get_render_attribute(
|
||||
"startFrame", layer=layer_name))
|
||||
frame_end_render = int(self.get_render_attribute(
|
||||
"endFrame", layer=layer_name))
|
||||
|
||||
if (int(context.data['frameStartHandle']) == frame_start_render
|
||||
and int(context.data['frameEndHandle']) == frame_end_render): # noqa: W503, E501
|
||||
|
||||
handle_start = context.data['handleStart']
|
||||
handle_end = context.data['handleEnd']
|
||||
frame_start = context.data['frameStart']
|
||||
frame_end = context.data['frameEnd']
|
||||
frame_start_handle = context.data['frameStartHandle']
|
||||
frame_end_handle = context.data['frameEndHandle']
|
||||
else:
|
||||
handle_start = 0
|
||||
handle_end = 0
|
||||
frame_start = frame_start_render
|
||||
frame_end = frame_end_render
|
||||
frame_start_handle = frame_start_render
|
||||
frame_end_handle = frame_end_render
|
||||
|
||||
# Get layer specific settings, might be overrides
|
||||
data = {
|
||||
"subset": expected_layer_name,
|
||||
"layer": layer_name,
|
||||
"setMembers": cmds.sets(layer, q=True) or ["*"],
|
||||
"review": False,
|
||||
"publish": True,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"frameStartHandle": frame_start_handle,
|
||||
"frameEndHandle": frame_end_handle,
|
||||
"byFrameStep": int(
|
||||
self.get_render_attribute("byFrameStep",
|
||||
layer=layer_name)),
|
||||
"renderer": self.get_render_attribute("currentRenderer",
|
||||
layer=layer_name),
|
||||
# instance subset
|
||||
"family": "vrayscene_layer",
|
||||
"families": ["vrayscene_layer"],
|
||||
"asset": api.Session["AVALON_ASSET"],
|
||||
"time": api.time(),
|
||||
"author": context.data["user"],
|
||||
# Add source to allow tracing back to the scene from
|
||||
# which was submitted originally
|
||||
"source": context.data["currentFile"].replace("\\", "/"),
|
||||
"resolutionWidth": cmds.getAttr("defaultResolution.width"),
|
||||
"resolutionHeight": cmds.getAttr("defaultResolution.height"),
|
||||
"pixelAspect": cmds.getAttr("defaultResolution.pixelAspect"),
|
||||
"priority": instance.data.get("priority"),
|
||||
"useMultipleSceneFiles": instance.data.get(
|
||||
"vraySceneMultipleFiles")
|
||||
}
|
||||
|
||||
# Define nice label
|
||||
label = "{0} ({1})".format(expected_layer_name, data["asset"])
|
||||
label += " [{0}-{1}]".format(
|
||||
int(data["frameStartHandle"]), int(data["frameEndHandle"])
|
||||
)
|
||||
|
||||
instance = context.create_instance(expected_layer_name)
|
||||
instance.data["label"] = label
|
||||
instance.data.update(data)
|
||||
|
||||
def get_render_attribute(self, attr, layer):
|
||||
"""Get attribute from render options.
|
||||
|
||||
Args:
|
||||
attr (str): name of attribute to be looked up.
|
||||
|
||||
Returns:
|
||||
Attribute value
|
||||
|
||||
"""
|
||||
return lib.get_attr_in_layer(
|
||||
"defaultRenderGlobals.{}".format(attr), layer=layer
|
||||
)
|
||||
140
pype/hosts/maya/plugins/publish/extract_vrayscene.py
Normal file
140
pype/hosts/maya/plugins/publish/extract_vrayscene.py
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract vrayscene from specified families."""
|
||||
import os
|
||||
import re
|
||||
|
||||
import avalon.maya
|
||||
import pype.api
|
||||
from pype.hosts.maya.render_setup_tools import export_in_rs_layer
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ExtractVrayscene(pype.api.Extractor):
|
||||
"""Extractor for vrscene."""
|
||||
|
||||
label = "VRay Scene (.vrscene)"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayscene_layer"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
if instance.data.get("exportOnFarm"):
|
||||
self.log.info("vrayscenes will be exported on farm.")
|
||||
raise NotImplementedError(
|
||||
"exporting vrayscenes is not implemented")
|
||||
|
||||
# handle sequence
|
||||
if instance.data.get("vraySceneMultipleFiles"):
|
||||
self.log.info("vrayscenes will be exported on farm.")
|
||||
raise NotImplementedError(
|
||||
"exporting vrayscene sequences not implemented yet")
|
||||
|
||||
vray_settings = cmds.ls(type="VRaySettingsNode")
|
||||
if not vray_settings:
|
||||
node = cmds.createNode("VRaySettingsNode")
|
||||
else:
|
||||
node = vray_settings[0]
|
||||
|
||||
# setMembers on vrayscene_layer shoudl contain layer name.
|
||||
layer_name = instance.data.get("layer")
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("staging: {}".format(staging_dir))
|
||||
template = cmds.getAttr("{}.vrscene_filename".format(node))
|
||||
start_frame = instance.data.get(
|
||||
"frameStartHandle") if instance.data.get(
|
||||
"vraySceneMultipleFiles") else None
|
||||
formatted_name = self.format_vray_output_filename(
|
||||
os.path.basename(instance.data.get("source")),
|
||||
layer_name,
|
||||
template,
|
||||
start_frame
|
||||
)
|
||||
|
||||
file_path = os.path.join(
|
||||
staging_dir, "vrayscene", *formatted_name.split("/"))
|
||||
|
||||
# Write out vrscene file
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
with avalon.maya.maintained_selection():
|
||||
if "*" not in instance.data["setMembers"]:
|
||||
self.log.info(
|
||||
"Exporting: {}".format(instance.data["setMembers"]))
|
||||
set_members = instance.data["setMembers"]
|
||||
cmds.select(set_members, noExpand=True)
|
||||
else:
|
||||
self.log.info("Exporting all ...")
|
||||
set_members = cmds.ls(
|
||||
long=True, objectsOnly=True,
|
||||
geometry=True, lights=True, cameras=True)
|
||||
cmds.select(set_members, noExpand=True)
|
||||
|
||||
self.log.info("Appending layer name {}".format(layer_name))
|
||||
set_members.append(layer_name)
|
||||
|
||||
export_in_rs_layer(
|
||||
file_path,
|
||||
set_members,
|
||||
export=lambda: cmds.file(
|
||||
file_path, type="V-Ray Scene",
|
||||
pr=True, es=True, force=True))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
files = file_path
|
||||
|
||||
representation = {
|
||||
'name': 'vrscene',
|
||||
'ext': 'vrscene',
|
||||
'files': os.path.basename(files),
|
||||
"stagingDir": os.path.dirname(files),
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
||||
@staticmethod
|
||||
def format_vray_output_filename(
|
||||
filename, layer, template, start_frame=None):
|
||||
"""Format the expected output file of the Export job.
|
||||
|
||||
Example:
|
||||
filename: /mnt/projects/foo/shot010_v006.mb
|
||||
template: <Scene>/<Layer>/<Layer>
|
||||
result: "shot010_v006/CHARS/CHARS.vrscene"
|
||||
|
||||
Args:
|
||||
filename (str): path to scene file.
|
||||
layer (str): layer name.
|
||||
template (str): token template.
|
||||
start_frame (int, optional): start frame - if set we use
|
||||
mutliple files export mode.
|
||||
|
||||
Returns:
|
||||
str: formatted path.
|
||||
|
||||
"""
|
||||
# format template to match pythons format specs
|
||||
template = re.sub(r"<(\w+?)>", r"{\1}", template.lower())
|
||||
|
||||
# Ensure filename has no extension
|
||||
file_name, _ = os.path.splitext(filename)
|
||||
mapping = {
|
||||
"scene": file_name,
|
||||
"layer": layer
|
||||
}
|
||||
|
||||
output_path = template.format(**mapping)
|
||||
|
||||
if start_frame:
|
||||
filename_zero = "{}_{:04d}.vrscene".format(
|
||||
output_path, start_frame)
|
||||
else:
|
||||
filename_zero = "{}.vrscene".format(output_path)
|
||||
|
||||
result = filename_zero.replace("\\", "/")
|
||||
|
||||
return result
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate VRay Translator settings."""
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
from pype.plugin import contextplugin_should_run
|
||||
|
|
@ -6,14 +8,15 @@ from maya import cmds
|
|||
|
||||
|
||||
class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin):
|
||||
"""Validate VRay Translator settings for extracting vrscenes."""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "VRay Translator Settings"
|
||||
families = ["vrayscene"]
|
||||
families = ["vrayscene_layer"]
|
||||
actions = [pype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
"""Plugin entry point."""
|
||||
# Workaround bug pyblish-base#250
|
||||
if not contextplugin_should_run(self, context):
|
||||
return
|
||||
|
|
@ -24,7 +27,7 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin):
|
|||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
|
||||
"""Get invalid instances."""
|
||||
invalid = False
|
||||
|
||||
# Get vraySettings node
|
||||
|
|
@ -34,16 +37,26 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin):
|
|||
node = vray_settings[0]
|
||||
|
||||
if cmds.setAttr("{}.vrscene_render_on".format(node)):
|
||||
cls.log.error("Render is enabled, this should be disabled")
|
||||
cls.log.error(
|
||||
"Render is enabled, for export it should be disabled")
|
||||
invalid = True
|
||||
|
||||
if not cmds.getAttr("{}.vrscene_on".format(node)):
|
||||
cls.log.error("Export vrscene not enabled")
|
||||
invalid = True
|
||||
|
||||
if not cmds.getAttr("{}.misc_eachFrameInFile".format(node)):
|
||||
cls.log.error("Each Frame in File not enabled")
|
||||
invalid = True
|
||||
for instance in context:
|
||||
if "vrayscene_layer" not in instance.data.get("families"):
|
||||
continue
|
||||
|
||||
if instance.data.get("vraySceneMultipleFiles"):
|
||||
if not cmds.getAttr("{}.misc_eachFrameInFile".format(node)):
|
||||
cls.log.error("Each Frame in File not enabled")
|
||||
invalid = True
|
||||
else:
|
||||
if cmds.getAttr("{}.misc_eachFrameInFile".format(node)):
|
||||
cls.log.error("Each Frame in File is enabled")
|
||||
invalid = True
|
||||
|
||||
vrscene_filename = cmds.getAttr("{}.vrscene_filename".format(node))
|
||||
if vrscene_filename != "vrayscene/<Scene>/<Layer>/<Layer>":
|
||||
|
|
@ -54,7 +67,7 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin):
|
|||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
|
||||
"""Repair invalid settings."""
|
||||
vray_settings = cmds.ls(type="VRaySettingsNode")
|
||||
if not vray_settings:
|
||||
node = cmds.createNode("VRaySettingsNode")
|
||||
|
|
@ -63,7 +76,14 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin):
|
|||
|
||||
cmds.setAttr("{}.vrscene_render_on".format(node), False)
|
||||
cmds.setAttr("{}.vrscene_on".format(node), True)
|
||||
cmds.setAttr("{}.misc_eachFrameInFile".format(node), True)
|
||||
for instance in context:
|
||||
if "vrayscene" not in instance.data.get("families"):
|
||||
continue
|
||||
|
||||
if instance.data.get("vraySceneMultipleFiles"):
|
||||
cmds.setAttr("{}.misc_eachFrameInFile".format(node), True)
|
||||
else:
|
||||
cmds.setAttr("{}.misc_eachFrameInFile".format(node), False)
|
||||
cmds.setAttr("{}.vrscene_filename".format(node),
|
||||
"vrayscene/<Scene>/<Layer>/<Layer>",
|
||||
type="string")
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ def writes_version_sync():
|
|||
each['file'].setValue(node_new_file)
|
||||
if not os.path.isdir(os.path.dirname(node_new_file)):
|
||||
log.warning("Path does not exist! I am creating it.")
|
||||
os.makedirs(os.path.dirname(node_new_file), 0o766)
|
||||
os.makedirs(os.path.dirname(node_new_file))
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
"Write node: `{}` has no version in path: {}".format(
|
||||
|
|
@ -350,7 +350,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
# create directory
|
||||
if not os.path.isdir(os.path.dirname(fpath)):
|
||||
log.warning("Path does not exist! I am creating it.")
|
||||
os.makedirs(os.path.dirname(fpath), 0o766)
|
||||
os.makedirs(os.path.dirname(fpath))
|
||||
|
||||
_data = OrderedDict({
|
||||
"file": fpath
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# establish families
|
||||
family = avalon_knob_data["family"]
|
||||
families_ak = avalon_knob_data.get("families")
|
||||
families_ak = avalon_knob_data.get("families", [])
|
||||
families = list()
|
||||
|
||||
if families_ak:
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
"flc": ".fli",
|
||||
"gif": ".gif",
|
||||
"ilbm": ".iff",
|
||||
"jpg": ".jpg",
|
||||
"jpeg": ".jpg",
|
||||
"pcx": ".pcx",
|
||||
"png": ".png",
|
||||
|
|
@ -36,6 +37,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
"bmp",
|
||||
"dpx",
|
||||
"ilbm",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"png",
|
||||
"sun",
|
||||
|
|
@ -170,10 +172,14 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
if not thumbnail_fullpath:
|
||||
return
|
||||
|
||||
thumbnail_ext = os.path.splitext(
|
||||
thumbnail_fullpath
|
||||
)[1].replace(".", "")
|
||||
# Create thumbnail representation
|
||||
thumbnail_repre = {
|
||||
"name": "thumbnail",
|
||||
"ext": ext,
|
||||
"ext": thumbnail_ext,
|
||||
"outputName": "thumb",
|
||||
"files": os.path.basename(thumbnail_fullpath),
|
||||
"stagingDir": output_dir,
|
||||
"tags": ["thumbnail"]
|
||||
|
|
@ -306,11 +312,11 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
if thumbnail_filename:
|
||||
basename, ext = os.path.splitext(thumbnail_filename)
|
||||
if not ext:
|
||||
ext = ".png"
|
||||
ext = ".jpg"
|
||||
thumbnail_fullpath = "/".join([output_dir, basename + ext])
|
||||
all_output_files[thumbnail_filename] = thumbnail_fullpath
|
||||
# Force save mode to png for thumbnail
|
||||
george_script_lines.append("tv_SaveMode \"PNG\"")
|
||||
george_script_lines.append("tv_SaveMode \"JPG\"")
|
||||
# Go to frame
|
||||
george_script_lines.append("tv_layerImage {}".format(first_frame))
|
||||
# Store image to output
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.data.get("deadlineChunkSize")
|
||||
if chunk_size == 0:
|
||||
if chunk_size == 0 and self.deadline_chunk_size:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
priority = instance.data.get("deadlinePriority")
|
||||
|
|
@ -155,7 +155,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"BatchName": script_name,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": script_path,
|
||||
# "AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ Needed configuration:
|
|||
- `"local_id": "local_0",` -- identifier of user pype
|
||||
- `"retry_cnt": 3,` -- how many times try to synch file in case of error
|
||||
- `"loop_delay": 60,` -- how many seconds between sync loops
|
||||
- `"active_site": "studio",` -- which site user current, 'studio' by default,
|
||||
- `"publish_site": "studio",` -- which site user current, 'studio' by default,
|
||||
could by same as 'local_id' if user is working
|
||||
from home without connection to studio
|
||||
infrastructure
|
||||
|
|
@ -71,7 +71,7 @@ Needed configuration:
|
|||
Used in IntegrateNew to prepare skeleton for
|
||||
syncing in the representation record.
|
||||
Leave empty if no syncing is wanted.
|
||||
This is a general configuration, 'local_id', 'active_site' and 'remote_site'
|
||||
This is a general configuration, 'local_id', 'publish_site' and 'remote_site'
|
||||
will be set and changed by some GUI in the future.
|
||||
|
||||
`pype/settings/defaults/project_settings/global.json`.`sync_server`.`sites`:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,13 @@ from abc import ABCMeta, abstractmethod
|
|||
|
||||
class AbstractProvider(metaclass=ABCMeta):
|
||||
|
||||
def __init__(self, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.site_name = site_name
|
||||
|
||||
self.presets = presets
|
||||
|
||||
@abstractmethod
|
||||
def is_active(self):
|
||||
"""
|
||||
|
|
@ -27,13 +34,14 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def download_file(self, source_path, local_path):
|
||||
def download_file(self, source_path, local_path, overwrite=True):
|
||||
"""
|
||||
Download file from provider into local system
|
||||
|
||||
Args:
|
||||
source_path (string): absolute path on provider
|
||||
local_path (string): absolute path on local
|
||||
overwrite (bool): default set to True
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -351,6 +351,10 @@ class GDriveHandler(AbstractProvider):
|
|||
last_tick = status = response = None
|
||||
status_val = 0
|
||||
while response is None:
|
||||
if server.is_representation_paused(representation['_id'],
|
||||
check_parents=True,
|
||||
project_name=collection):
|
||||
raise ValueError("Paused during process, please redo.")
|
||||
if status:
|
||||
status_val = float(status.progress())
|
||||
if not last_tick or \
|
||||
|
|
@ -433,6 +437,10 @@ class GDriveHandler(AbstractProvider):
|
|||
last_tick = status = response = None
|
||||
status_val = 0
|
||||
while response is None:
|
||||
if server.is_representation_paused(representation['_id'],
|
||||
check_parents=True,
|
||||
project_name=collection):
|
||||
raise ValueError("Paused during process, please redo.")
|
||||
if status:
|
||||
status_val = float(status.progress())
|
||||
if not last_tick or \
|
||||
|
|
|
|||
|
|
@ -1,10 +1,6 @@
|
|||
from enum import Enum
|
||||
from .gdrive import GDriveHandler
|
||||
|
||||
|
||||
class Providers(Enum):
|
||||
LOCAL = 'studio'
|
||||
GDRIVE = 'gdrive'
|
||||
from .local_drive import LocalDriveHandler
|
||||
|
||||
|
||||
class ProviderFactory:
|
||||
|
|
@ -94,3 +90,4 @@ factory = ProviderFactory()
|
|||
# 7 denotes number of files that could be synced in single loop - learned by
|
||||
# trial and error
|
||||
factory.register_provider('gdrive', GDriveHandler, 7)
|
||||
factory.register_provider('local_drive', LocalDriveHandler, 10)
|
||||
|
|
|
|||
59
pype/modules/sync_server/providers/local_drive.py
Normal file
59
pype/modules/sync_server/providers/local_drive.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from __future__ import print_function
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
from pype.api import Logger
|
||||
from .abstract_provider import AbstractProvider
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class LocalDriveHandler(AbstractProvider):
|
||||
""" Handles required operations on mounted disks with OS """
|
||||
def is_active(self):
|
||||
return True
|
||||
|
||||
def upload_file(self, source_path, target_path, overwrite=True):
|
||||
"""
|
||||
Copies file from 'source_path' to 'target_path'
|
||||
"""
|
||||
if os.path.exists(source_path):
|
||||
if overwrite:
|
||||
shutil.copy(source_path, target_path)
|
||||
else:
|
||||
if os.path.exists(target_path):
|
||||
raise ValueError("File {} exists, set overwrite".
|
||||
format(target_path))
|
||||
|
||||
def download_file(self, source_path, local_path, overwrite=True):
|
||||
"""
|
||||
Download a file form 'source_path' to 'local_path'
|
||||
"""
|
||||
if os.path.exists(source_path):
|
||||
if overwrite:
|
||||
shutil.copy(source_path, local_path)
|
||||
else:
|
||||
if os.path.exists(local_path):
|
||||
raise ValueError("File {} exists, set overwrite".
|
||||
format(local_path))
|
||||
|
||||
def delete_file(self, path):
|
||||
"""
|
||||
Deletes a file at 'path'
|
||||
"""
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
def list_folder(self, folder_path):
|
||||
"""
|
||||
Returns list of files and subfolder in a 'folder_path'. Non recurs
|
||||
"""
|
||||
lst = []
|
||||
if os.path.isdir(folder_path):
|
||||
for (dir_path, dir_names, file_names) in os.walk(folder_path):
|
||||
for name in file_names:
|
||||
lst.append(os.path.join(dir_path, name))
|
||||
for name in dir_names:
|
||||
lst.append(os.path.join(dir_path, name))
|
||||
|
||||
return lst
|
||||
BIN
pype/modules/sync_server/providers/resources/local_drive.png
Normal file
BIN
pype/modules/sync_server/providers/resources/local_drive.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 766 B |
Binary file not shown.
|
Before Width: | Height: | Size: 557 B After Width: | Height: | Size: 1.2 KiB |
BIN
pype/modules/sync_server/resources/paused.png
Normal file
BIN
pype/modules/sync_server/resources/paused.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 692 B |
BIN
pype/modules/sync_server/resources/synced.png
Normal file
BIN
pype/modules/sync_server/resources/synced.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 561 B |
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -298,6 +298,62 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
|
|||
repre["data"] = repre_data
|
||||
repre.pop("_id", None)
|
||||
|
||||
# Prepare paths of source and destination files
|
||||
if len(published_files) == 1:
|
||||
src_to_dst_file_paths.append(
|
||||
(published_files[0], template_filled)
|
||||
)
|
||||
else:
|
||||
collections, remainders = clique.assemble(published_files)
|
||||
if remainders or not collections or len(collections) > 1:
|
||||
raise Exception((
|
||||
"Integrity error. Files of published representation "
|
||||
"is combination of frame collections and single files."
|
||||
"Collections: `{}` Single files: `{}`"
|
||||
).format(str(collections),
|
||||
str(remainders)))
|
||||
|
||||
src_col = collections[0]
|
||||
|
||||
# Get head and tail for collection
|
||||
frame_splitter = "_-_FRAME_SPLIT_-_"
|
||||
anatomy_data["frame"] = frame_splitter
|
||||
_anatomy_filled = anatomy.format(anatomy_data)
|
||||
_template_filled = _anatomy_filled["master"]["path"]
|
||||
head, tail = _template_filled.split(frame_splitter)
|
||||
padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding",
|
||||
anatomy.templates["render"].get("padding")
|
||||
)
|
||||
)
|
||||
|
||||
dst_col = clique.Collection(
|
||||
head=head, padding=padding, tail=tail
|
||||
)
|
||||
dst_col.indexes.clear()
|
||||
dst_col.indexes.update(src_col.indexes)
|
||||
for src_file, dst_file in zip(src_col, dst_col):
|
||||
src_to_dst_file_paths.append(
|
||||
(src_file, dst_file)
|
||||
)
|
||||
|
||||
# replace original file name with master name in repre doc
|
||||
for index in range(len(repre.get("files"))):
|
||||
file = repre.get("files")[index]
|
||||
file_name = os.path.basename(file.get('path'))
|
||||
for src_file, dst_file in src_to_dst_file_paths:
|
||||
src_file_name = os.path.basename(src_file)
|
||||
if src_file_name == file_name:
|
||||
repre["files"][index]["path"] = self._update_path(
|
||||
anatomy, repre["files"][index]["path"],
|
||||
src_file, dst_file)
|
||||
|
||||
repre["files"][index]["hash"] = self._update_hash(
|
||||
repre["files"][index]["hash"],
|
||||
src_file_name, dst_file
|
||||
)
|
||||
|
||||
schema.validate(repre)
|
||||
|
||||
repre_name_low = repre["name"].lower()
|
||||
|
|
@ -333,46 +389,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
|
|||
InsertOne(repre)
|
||||
)
|
||||
|
||||
# Prepare paths of source and destination files
|
||||
if len(published_files) == 1:
|
||||
src_to_dst_file_paths.append(
|
||||
(published_files[0], template_filled)
|
||||
)
|
||||
continue
|
||||
|
||||
collections, remainders = clique.assemble(published_files)
|
||||
if remainders or not collections or len(collections) > 1:
|
||||
raise Exception((
|
||||
"Integrity error. Files of published representation "
|
||||
"is combination of frame collections and single files."
|
||||
"Collections: `{}` Single files: `{}`"
|
||||
).format(str(collections), str(remainders)))
|
||||
|
||||
src_col = collections[0]
|
||||
|
||||
# Get head and tail for collection
|
||||
frame_splitter = "_-_FRAME_SPLIT_-_"
|
||||
anatomy_data["frame"] = frame_splitter
|
||||
_anatomy_filled = anatomy.format(anatomy_data)
|
||||
_template_filled = _anatomy_filled["master"]["path"]
|
||||
head, tail = _template_filled.split(frame_splitter)
|
||||
padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding",
|
||||
anatomy.templates["render"].get("padding")
|
||||
)
|
||||
)
|
||||
|
||||
dst_col = clique.Collection(
|
||||
head=head, padding=padding, tail=tail
|
||||
)
|
||||
dst_col.indexes.clear()
|
||||
dst_col.indexes.update(src_col.indexes)
|
||||
for src_file, dst_file in zip(src_col, dst_col):
|
||||
src_to_dst_file_paths.append(
|
||||
(src_file, dst_file)
|
||||
)
|
||||
|
||||
self.path_checks = []
|
||||
|
||||
# Copy(hardlink) paths of source and destination files
|
||||
|
|
@ -533,3 +549,39 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
|
|||
"type": "representation"
|
||||
}))
|
||||
return (master_version, master_repres)
|
||||
|
||||
def _update_path(self, anatomy, path, src_file, dst_file):
|
||||
"""
|
||||
Replaces source path with new master path
|
||||
|
||||
'path' contains original path with version, must be replaced with
|
||||
'master' path (with 'master' label and without version)
|
||||
|
||||
Args:
|
||||
anatomy (Anatomy) - to get rootless style of path
|
||||
path (string) - path from DB
|
||||
src_file (string) - original file path
|
||||
dst_file (string) - master file path
|
||||
"""
|
||||
_, rootless = anatomy.find_root_template_from_path(
|
||||
dst_file
|
||||
)
|
||||
_, rtls_src = anatomy.find_root_template_from_path(
|
||||
src_file
|
||||
)
|
||||
return path.replace(rtls_src, rootless)
|
||||
|
||||
def _update_hash(self, hash, src_file_name, dst_file):
|
||||
"""
|
||||
Updates hash value with proper master name
|
||||
"""
|
||||
src_file_name = self._get_name_without_ext(
|
||||
src_file_name)
|
||||
master_file_name = self._get_name_without_ext(
|
||||
dst_file)
|
||||
return hash.replace(src_file_name, master_file_name)
|
||||
|
||||
def _get_name_without_ext(self, value):
|
||||
file_name = os.path.basename(value)
|
||||
file_name, _ = os.path.splitext(file_name)
|
||||
return file_name
|
||||
|
|
|
|||
|
|
@ -66,6 +66,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"vdbcache",
|
||||
"scene",
|
||||
"vrayproxy",
|
||||
"vrayscene_layer",
|
||||
"render",
|
||||
"prerender",
|
||||
"imagesequence",
|
||||
|
|
@ -701,7 +702,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset["_id"])
|
||||
}, {'$set': {'data.subsetGroup':
|
||||
instance.data.get('subsetGroup')}}
|
||||
instance.data.get('subsetGroup')}}
|
||||
)
|
||||
|
||||
# Update families on subset.
|
||||
|
|
@ -878,9 +879,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
path = rootless_path
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(path))
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(path))
|
||||
return path
|
||||
|
||||
def get_files_info(self, instance, integrated_file_sizes):
|
||||
|
|
|
|||
|
|
@ -40,8 +40,50 @@ class PypeCommands:
|
|||
from pype.tools import standalonepublish
|
||||
standalonepublish.main()
|
||||
|
||||
def publish(self, gui, paths):
|
||||
pass
|
||||
@staticmethod
|
||||
def publish(paths):
|
||||
"""Start headless publishing.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
|
||||
Args:
|
||||
paths (list): Paths to jsons.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no pathto process.
|
||||
"""
|
||||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
from pype import install, uninstall
|
||||
from pype.api import Logger
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
log = Logger.get_logger()
|
||||
|
||||
install()
|
||||
|
||||
pyblish.api.register_target("filesequence")
|
||||
pyblish.api.register_host("shell")
|
||||
|
||||
os.environ["PYPE_PUBLISH_DATA"] = os.pathsep.join(paths)
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
uninstall()
|
||||
sys.exit(1)
|
||||
|
||||
log.info("Publish finished.")
|
||||
uninstall()
|
||||
|
||||
def texture_copy(self, project, asset, path):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@
|
|||
}
|
||||
},
|
||||
"sync_server": {
|
||||
"enabled": false,
|
||||
"enabled": true,
|
||||
"config": {
|
||||
"local_id": "local_0",
|
||||
"retry_cnt": "3",
|
||||
|
|
@ -192,7 +192,23 @@
|
|||
"gdrive": {
|
||||
"provider": "gdrive",
|
||||
"credentials_url": "",
|
||||
"root": "/sync_testing/test"
|
||||
"root": {
|
||||
"work": ""
|
||||
}
|
||||
},
|
||||
"studio": {
|
||||
"provider": "local_drive",
|
||||
"credentials_url": "",
|
||||
"root": {
|
||||
"work": ""
|
||||
}
|
||||
},
|
||||
"local_0": {
|
||||
"provider": "local_drive",
|
||||
"credentials_url": "",
|
||||
"root": {
|
||||
"work": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -56,12 +56,20 @@
|
|||
},
|
||||
"vray": {
|
||||
"enabled": true,
|
||||
"environment": {},
|
||||
"environment": {
|
||||
"__environment_keys__": {
|
||||
"vray": []
|
||||
}
|
||||
},
|
||||
"variants": {}
|
||||
},
|
||||
"yeti": {
|
||||
"enabled": true,
|
||||
"environment": {},
|
||||
"environment": {
|
||||
"__environment_keys__": {
|
||||
"yeti": []
|
||||
}
|
||||
},
|
||||
"variants": {}
|
||||
},
|
||||
"other": {
|
||||
|
|
|
|||
|
|
@ -494,7 +494,7 @@ class SystemSettings(RootEntity):
|
|||
|
||||
Implementation of abstract method.
|
||||
"""
|
||||
return DEFAULTS_DIR
|
||||
return os.path.join(DEFAULTS_DIR, SYSTEM_SETTINGS_KEY)
|
||||
|
||||
def _save_studio_values(self):
|
||||
settings_value = self.settings_value()
|
||||
|
|
|
|||
|
|
@ -66,10 +66,14 @@
|
|||
"label": "Credentials url"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"type": "dict-modifiable",
|
||||
"key": "root",
|
||||
"label": "Root"
|
||||
}]
|
||||
"label": "Roots",
|
||||
"collapsable": false,
|
||||
"collapsable_key": false,
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -13,7 +13,8 @@
|
|||
{
|
||||
"key": "environment",
|
||||
"label": "Environment",
|
||||
"type": "raw-json"
|
||||
"type": "raw-json",
|
||||
"env_group_key": "vray"
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
|
|
|
|||
|
|
@ -13,7 +13,8 @@
|
|||
{
|
||||
"key": "environment",
|
||||
"label": "Environment",
|
||||
"type": "raw-json"
|
||||
"type": "raw-json",
|
||||
"env_group_key": "yeti"
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue