mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge branch 'fusion_integration' of github.com:BigRoy/colorbleed-config into fusion_integration
This commit is contained in:
commit
3ebe8a567c
10 changed files with 321 additions and 147 deletions
|
|
@ -1,5 +1,101 @@
|
|||
from avalon import api
|
||||
import os
|
||||
import contextlib
|
||||
|
||||
from avalon import api
|
||||
import avalon.io as io
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_inputs(tool, inputs):
|
||||
"""Preserve the tool's inputs after context"""
|
||||
|
||||
comp = tool.Comp()
|
||||
|
||||
values = {}
|
||||
for name in inputs:
|
||||
tool_input = getattr(tool, name)
|
||||
value = tool_input[comp.TIME_UNDEFINED]
|
||||
values[name] = value
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for name, value in values.items():
|
||||
tool_input = getattr(tool, name)
|
||||
tool_input[comp.TIME_UNDEFINED] = value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_trim(loader, log=None):
|
||||
"""Preserve the relative trim of the Loader tool.
|
||||
|
||||
This tries to preserve the loader's trim (trim in and trim out) after
|
||||
the context by reapplying the "amount" it trims on the clip's length at
|
||||
start and end.
|
||||
|
||||
"""
|
||||
|
||||
# Get original trim as amount of "trimming" from length
|
||||
time = loader.Comp().TIME_UNDEFINED
|
||||
length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
|
||||
trim_from_start = loader["ClipTimeStart"][time]
|
||||
trim_from_end = length - loader["ClipTimeEnd"][time]
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
||||
length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
|
||||
if trim_from_start > length:
|
||||
trim_from_start = length
|
||||
if log:
|
||||
log.warning("Reducing trim in to %d "
|
||||
"(because of less frames)" % trim_from_start)
|
||||
|
||||
remainder = length - trim_from_start
|
||||
if trim_from_end > remainder:
|
||||
trim_from_end = remainder
|
||||
if log:
|
||||
log.warning("Reducing trim in to %d "
|
||||
"(because of less frames)" % trim_from_end)
|
||||
|
||||
loader["ClipTimeStart"][time] = trim_from_start
|
||||
loader["ClipTimeEnd"][time] = length - trim_from_end
|
||||
|
||||
|
||||
def loader_shift(loader, frame, relative=True):
|
||||
"""Shift global in time by i preserving duration
|
||||
|
||||
This moves the loader by i frames preserving global duration. When relative
|
||||
is False it will shift the global in to the start frame.
|
||||
|
||||
Args:
|
||||
loader (tool): The fusion loader tool.
|
||||
frame (int): The amount of frames to move.
|
||||
relative (bool): When True the shift is relative, else the shift will
|
||||
change the global in to frame.
|
||||
|
||||
Returns:
|
||||
int: The resulting relative frame change (how much it moved)
|
||||
|
||||
"""
|
||||
comp = loader.Comp()
|
||||
time = comp.TIME_UNDEFINED
|
||||
|
||||
if not relative:
|
||||
start = loader["GlobalIn"][time]
|
||||
frame -= start
|
||||
|
||||
# Shifting global in will try to automatically compensate for the change
|
||||
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
|
||||
# input values to "just shift" the clip
|
||||
with preserve_inputs(loader, inputs=["ClipTimeStart",
|
||||
"ClipTimeEnd",
|
||||
"HoldFirstFrame",
|
||||
"HoldLastFrame"]):
|
||||
loader["GlobalIn"][time] = loader["GlobalIn"][time] + frame
|
||||
|
||||
return int(frame)
|
||||
|
||||
|
||||
class FusionLoadSequence(api.Loader):
|
||||
|
|
@ -36,46 +132,97 @@ class FusionLoadSequence(api.Loader):
|
|||
tool = comp.AddTool("Loader", *args)
|
||||
tool["Clip"] = path
|
||||
|
||||
# Set global in point to start frame (if in version.data)
|
||||
start = context["version"]["data"].get("startFrame", None)
|
||||
if start is not None:
|
||||
loader_shift(tool, start, relative=False)
|
||||
|
||||
imprint_container(tool,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def _get_first_image(self, root):
|
||||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Fusion automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
- ClipTimeStart (if duration changes)
|
||||
- ClipTimeEnd (if duration changes)
|
||||
- GlobalIn (if duration changes)
|
||||
- GlobalEnd (if duration changes)
|
||||
- Reverse (sometimes?)
|
||||
- Loop (sometimes?)
|
||||
- Depth (always resets to "Format")
|
||||
- KeyCode (always resets to "")
|
||||
- TimeCodeOffset (always resets to 0)
|
||||
- ClipTimeStart: Fusion reset to 0 if duration changes
|
||||
- We keep the trim in as close as possible to the previous value.
|
||||
When there are less frames then the amount of trim we reduce
|
||||
it accordingly.
|
||||
|
||||
- ClipTimeEnd: Fusion reset to 0 if duration changes
|
||||
- We keep the trim out as close as possible to the previous value
|
||||
within new amount of frames after trim in (ClipTimeStart) has
|
||||
been set.
|
||||
|
||||
- GlobalIn: Fusion reset to comp's global in if duration changes
|
||||
- We change it to the "startFrame"
|
||||
|
||||
- GlobalEnd: Fusion resets to globalIn + length if duration changes
|
||||
- We do the same like Fusion - allow fusion to take control.
|
||||
|
||||
- HoldFirstFrame: Fusion resets this to 0
|
||||
- We preverse the value.
|
||||
|
||||
- HoldLastFrame: Fusion resets this to 0
|
||||
- We preverse the value.
|
||||
|
||||
- Reverse: Fusion resets to disabled if "Loop" is not enabled.
|
||||
- We preserve the value.
|
||||
|
||||
- Depth: Fusion resets to "Format"
|
||||
- We preverse the value.
|
||||
|
||||
- KeyCode: Fusion resets to ""
|
||||
- We preverse the value.
|
||||
|
||||
- TimeCodeOffset: Fusion resets to 0
|
||||
- We preverse the value.
|
||||
|
||||
"""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
root = api.get_representation_path(representation)
|
||||
path = self._get_first_image(root)
|
||||
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
root = api.get_representation_path(representation)
|
||||
path = self._get_first_image(root)
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({"type": "version",
|
||||
"_id": representation["parent"]})
|
||||
start = version["data"].get("startFrame")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(tool.Name, representation))
|
||||
start = 0
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update Loader"):
|
||||
tool["Clip"] = path
|
||||
|
||||
# Update the loader's path whilst preserving some values
|
||||
with preserve_trim(tool, log=self.log):
|
||||
with preserve_inputs(tool,
|
||||
inputs=("HoldFirstFrame",
|
||||
"HoldLastFrame",
|
||||
"Reverse",
|
||||
"Depth",
|
||||
"KeyCode",
|
||||
"TimeCodeOffset")):
|
||||
tool["Clip"] = path
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
global_in_changed = loader_shift(tool, start, relative=False)
|
||||
if global_in_changed:
|
||||
# Log this change to the user
|
||||
self.log.debug("Changed '%s' global in: %d" % (tool.Name,
|
||||
start))
|
||||
|
||||
# Update the imprinted representation
|
||||
tool.SetData("avalon.representation", str(representation["_id"]))
|
||||
|
|
@ -87,5 +234,11 @@ class FusionLoadSequence(api.Loader):
|
|||
tool = container["_tool"]
|
||||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Remove Loader"):
|
||||
tool.Delete()
|
||||
|
||||
def _get_first_image(self, root):
|
||||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
|
|
|
|||
|
|
@ -20,9 +20,5 @@ class CollectCurrentCompFusion(pyblish.api.ContextPlugin):
|
|||
context.data["currentComp"] = current_comp
|
||||
|
||||
# Store path to current file
|
||||
attrs = current_comp.GetAttrs()
|
||||
filepath = attrs.get("COMPS_FileName", "")
|
||||
filepath = current_comp.GetAttrs().get("COMPS_FileName", "")
|
||||
context.data['currentFile'] = filepath
|
||||
|
||||
# Labelize the context
|
||||
context.data["label"] = os.path.basename(filepath)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current file with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["fusion"]
|
||||
families = ["colorbleed.saver"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
||||
import os
|
||||
from colorbleed.lib import version_up
|
||||
from colorbleed.action import get_errored_plugins_from_data
|
||||
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(plugin.__name__ == "FusionSubmitDeadline"
|
||||
for plugin in errored_plugins):
|
||||
raise RuntimeError("Skipping incrementing current file because "
|
||||
"submission to deadline failed.")
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
||||
current_filepath = context.data["currentFile"]
|
||||
new_filepath = version_up(current_filepath)
|
||||
|
||||
comp.Save(new_filepath)
|
||||
21
colorbleed/plugins/fusion/publish/save_scene.py
Normal file
21
colorbleed/plugins/fusion/publish/save_scene.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class FusionSaveComp(pyblish.api.ContextPlugin):
|
||||
"""Save current comp"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.IntegratorOrder - 0.49
|
||||
hosts = ["fusion"]
|
||||
families = ["colorbleed.saver"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
||||
current = comp.GetAttrs().get("COMPS_FileName", "")
|
||||
assert context.data['currentFile'] == current
|
||||
|
||||
self.log.info("Saving current file..")
|
||||
comp.Save()
|
||||
|
|
@ -15,12 +15,8 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
|
||||
registered_root = api.registered_root()
|
||||
asset_name = api.Session["AVALON_ASSET"]
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
relative_file = current_file.replace(registered_root, "{root}")
|
||||
source_file = relative_file.replace("\\", "/")
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
filepath = context.data["currentFile"].replace("\\", "/")
|
||||
|
||||
# Get render globals node
|
||||
try:
|
||||
|
|
@ -30,24 +26,24 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
"renderGlobals node")
|
||||
return
|
||||
|
||||
default_layer = "{}.includeDefaultRenderLayer".format(render_globals)
|
||||
use_defaultlayer = cmds.getAttr(default_layer)
|
||||
|
||||
# Get render layers
|
||||
renderlayers = [i for i in cmds.ls(type="renderLayer") if
|
||||
cmds.getAttr("{}.renderable".format(i)) and not
|
||||
cmds.referenceQuery(i, isNodeReferenced=True)]
|
||||
|
||||
# Include/exclude default render layer
|
||||
default_layer = "{}.includeDefaultRenderLayer".format(render_globals)
|
||||
use_defaultlayer = cmds.getAttr(default_layer)
|
||||
if not use_defaultlayer:
|
||||
renderlayers = [i for i in renderlayers if
|
||||
not i.endswith("defaultRenderLayer")]
|
||||
|
||||
# Sort by displayOrder
|
||||
def sort_by_display_order(layer):
|
||||
return cmds.getAttr("%s.displayOrder" % layer)
|
||||
|
||||
renderlayers = sorted(renderlayers, key=sort_by_display_order)
|
||||
|
||||
if not use_defaultlayer:
|
||||
renderlayers = [i for i in renderlayers if
|
||||
not i.endswith("defaultRenderLayer")]
|
||||
|
||||
for layer in renderlayers:
|
||||
if layer.endswith("defaultRenderLayer"):
|
||||
layername = "masterLayer"
|
||||
|
|
@ -68,10 +64,14 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
# instance subset
|
||||
"family": "Render Layers",
|
||||
"families": ["colorbleed.renderlayer"],
|
||||
"asset": asset_name,
|
||||
"asset": asset,
|
||||
"time": api.time(),
|
||||
"author": context.data["user"],
|
||||
"source": source_file}
|
||||
|
||||
# Add source to allow tracing back to the scene from
|
||||
# which was submitted originally
|
||||
"source": filepath
|
||||
}
|
||||
|
||||
# Apply each user defined attribute as data
|
||||
for attr in cmds.listAttr(layer, userDefined=True) or list():
|
||||
|
|
|
|||
|
|
@ -64,14 +64,7 @@ class ExtractColorbleedAlembic(colorbleed.api.Extractor):
|
|||
extract_alembic(file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**{"step": instance.data.get("step", 1.0),
|
||||
"attr": ["cbId"],
|
||||
"attrPrefix": ["vray"],
|
||||
"writeVisibility": True,
|
||||
"writeCreases": True,
|
||||
"writeColorSets": writeColorSets,
|
||||
"uvWrite": True,
|
||||
"selection": True})
|
||||
**options)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import json
|
||||
import shutil
|
||||
import getpass
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -30,14 +29,14 @@ def get_renderer_variables(renderlayer=None):
|
|||
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
|
||||
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
|
||||
|
||||
filename_padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
|
||||
render_attrs["padding"]))
|
||||
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
|
||||
render_attrs["padding"]))
|
||||
|
||||
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
|
||||
|
||||
if renderer == "vray":
|
||||
# Maya's renderSettings function does not resolved V-Ray extension
|
||||
# Getting the extension for VRay settings node
|
||||
# Maya's renderSettings function does not return V-Ray file extension
|
||||
# so we get the extension from vraySettings
|
||||
extension = cmds.getAttr("vraySettings.imageFormatStr")
|
||||
|
||||
# When V-Ray image format has not been switched once from default .png
|
||||
|
|
@ -56,10 +55,39 @@ def get_renderer_variables(renderlayer=None):
|
|||
|
||||
return {"ext": extension,
|
||||
"filename_prefix": filename_prefix,
|
||||
"padding": filename_padding,
|
||||
"padding": padding,
|
||||
"filename_0": filename_0}
|
||||
|
||||
|
||||
def preview_fname(folder, scene, layer, padding, ext):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
folder (str): The root output folder (image path)
|
||||
scene (str): The scene name
|
||||
layer (str): The layer name to be rendered
|
||||
padding (int): The padding length
|
||||
ext(str): The output file extension
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format(
|
||||
scene=scene,
|
||||
layer=layer,
|
||||
number="#" * padding,
|
||||
ext=ext
|
||||
)
|
||||
|
||||
return os.path.join(folder, output)
|
||||
|
||||
|
||||
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit available render layers to Deadline
|
||||
|
||||
|
|
@ -81,44 +109,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
fpath = context.data["currentFile"]
|
||||
fname = os.path.basename(fpath)
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
scene = os.path.splitext(fname)[0]
|
||||
scene = os.path.splitext(filename)[0]
|
||||
dirname = os.path.join(workspace, "renders")
|
||||
renderlayer = instance.data['setMembers'] # rs_beauty
|
||||
renderlayer_name = instance.name # beauty
|
||||
renderlayer_globals = instance.data["renderGlobals"]
|
||||
legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
jobname = "%s - %s" % (fname, instance.name)
|
||||
jobname = "%s - %s" % (filename, instance.name)
|
||||
|
||||
# Get the variables depending on the renderer
|
||||
# Following hardcoded "renders/<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
render_variables = get_renderer_variables(renderlayer)
|
||||
output_filename_0 = self.preview_fname(scene,
|
||||
renderlayer_name,
|
||||
dirname,
|
||||
render_variables["padding"],
|
||||
render_variables["ext"])
|
||||
|
||||
# Get parent folder of render output
|
||||
render_folder = os.path.dirname(output_filename_0)
|
||||
output_filename_0 = preview_fname(folder=dirname,
|
||||
scene=scene,
|
||||
layer=renderlayer_name,
|
||||
padding=render_variables["padding"],
|
||||
ext=render_variables["ext"])
|
||||
|
||||
try:
|
||||
# Ensure folders exists
|
||||
os.makedirs(render_folder)
|
||||
# Ensure render folder exists
|
||||
os.makedirs(dirname)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Get the folder name, this will be the name of the metadata file
|
||||
json_fname = os.path.basename(render_folder)
|
||||
json_fpath = os.path.join(os.path.dirname(render_folder),
|
||||
"{}.json".format(json_fname))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
|
|
@ -126,7 +142,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": fname,
|
||||
"BatchName": filename,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
|
@ -149,7 +165,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": fpath,
|
||||
"SceneFile": filepath,
|
||||
|
||||
# Output directory and filename
|
||||
"OutputFilePath": dirname.replace("\\", "/"),
|
||||
|
|
@ -178,7 +194,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical variables with submission
|
||||
# Include critical environment variables with submission
|
||||
keys = [
|
||||
# This will trigger `userSetup.py` on the slave
|
||||
# such that proper initialisation happens the same
|
||||
|
|
@ -218,74 +234,12 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
response = requests.post(url, json=payload)
|
||||
if response.ok:
|
||||
# Write metadata for publish
|
||||
render_job = response.json()
|
||||
data = {
|
||||
"submission": payload,
|
||||
"session": api.Session,
|
||||
"instance": instance.data,
|
||||
"jobs": [render_job],
|
||||
}
|
||||
|
||||
with open(json_fpath, "w") as f:
|
||||
json.dump(data, f, indent=4, sort_keys=True)
|
||||
|
||||
self.log.info("Creating publish job")
|
||||
state = instance.data["suspendPublishJob"]
|
||||
publish_job = self.create_publish_job(fname,
|
||||
deadline_user,
|
||||
comment,
|
||||
jobname,
|
||||
render_job,
|
||||
json_fpath,
|
||||
state)
|
||||
if not publish_job:
|
||||
self.log.error("Could not submit publish job!")
|
||||
else:
|
||||
self.log.info(publish_job)
|
||||
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(dirname)
|
||||
except OSError:
|
||||
# This is nice-to-have, but not critical to the operation
|
||||
pass
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
def preview_fname(self, scene, layer, folder, padding, ext):
|
||||
"""Return outputted filename with #### for padding
|
||||
|
||||
Passing the absolute path to Deadline enables Deadline Monitor
|
||||
to provide the user with a Job Output menu option.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
|
||||
From
|
||||
/path/to/render.0000.png
|
||||
To
|
||||
/path/to/render.####.png
|
||||
|
||||
Args:
|
||||
layer: name of the current layer to be rendered
|
||||
folder (str): folder to which will be written
|
||||
padding (int): padding length
|
||||
ext(str): file extension
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
padded_basename = "{}.{}.{}".format(layer, "#" * padding, ext)
|
||||
scene_layer_folder = "{}_{}".format(scene, layer)
|
||||
preview_fname = os.path.join(folder, scene, scene_layer_folder,
|
||||
padded_basename)
|
||||
|
||||
return preview_fname
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
|
|||
families = ["colorbleed.model",
|
||||
"colorbleed.look",
|
||||
"colorbleed.rig"]
|
||||
optional = True
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
|
|
|||
22
colorbleed/plugins/publish/collect_context_label.py
Normal file
22
colorbleed/plugins/publish/collect_context_label.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectCurrentShellFile(pyblish.api.ContextPlugin):
|
||||
"""Labelize context using the registered host and current file"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.25
|
||||
label = "Context Label"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Get last registered host
|
||||
host = pyblish.api.registered_hosts()[-1]
|
||||
|
||||
# Get scene name from "currentFile"
|
||||
path = context.data.get("currentFile") or "<Unsaved>"
|
||||
base = os.path.basename(path)
|
||||
|
||||
# Set label
|
||||
label = "{host} - {scene}".format(host=host.title(), scene=base)
|
||||
context.data["label"] = label
|
||||
|
|
@ -54,9 +54,8 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Submit image sequence jobs to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["fusion"]
|
||||
families = ["fusion.deadline"]
|
||||
targets = ["deadline"]
|
||||
hosts = ["fusion", "maya"]
|
||||
families = ["colorbleed.saver", "colorbleed.renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue