mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
rename config folder
This commit is contained in:
parent
bb17ef7221
commit
7667674317
182 changed files with 272 additions and 272 deletions
46
pype/plugins/fusion/create/create_tiff_saver.py
Normal file
46
pype/plugins/fusion/create/create_tiff_saver.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import os
|
||||
|
||||
import avalon.api
|
||||
from avalon import fusion
|
||||
|
||||
|
||||
class CreateTiffSaver(avalon.api.Creator):
|
||||
|
||||
name = "tiffDefault"
|
||||
label = "Create Tiff Saver"
|
||||
hosts = ["fusion"]
|
||||
family = "studio.saver"
|
||||
|
||||
def process(self):
|
||||
|
||||
file_format = "TiffFormat"
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
# todo: improve method of getting current environment
|
||||
# todo: pref avalon.Session over os.environ
|
||||
|
||||
workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
filename = "{}..tiff".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", "preview", filename)
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
saver = comp.AddTool("Saver", *args)
|
||||
saver.SetAttrs({"TOOLS_Name": self.name})
|
||||
|
||||
# Setting input attributes is different from basic attributes
|
||||
# Not confused with "MainInputAttributes" which
|
||||
saver["Clip"] = filepath
|
||||
saver["OutputFormat"] = file_format
|
||||
|
||||
# # # Set standard TIFF settings
|
||||
if saver[file_format] is None:
|
||||
raise RuntimeError("File format is not set to TiffFormat, "
|
||||
"this is a bug")
|
||||
|
||||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
|
||||
saver[file_format]["SaveAlpha"] = 0
|
||||
|
||||
25
pype/plugins/fusion/inventory/select_containers.py
Normal file
25
pype/plugins/fusion/inventory/select_containers.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class FusionSelectContainers(api.InventoryAction):
|
||||
|
||||
label = "Select Containers"
|
||||
icon = "mouse-pointer"
|
||||
color = "#d8d8d8"
|
||||
|
||||
def process(self, containers):
|
||||
|
||||
import avalon.fusion
|
||||
|
||||
tools = [i["_tool"] for i in containers]
|
||||
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
|
||||
with avalon.fusion.comp_lock_and_undo_chunk(comp, self.label):
|
||||
# Clear selection
|
||||
flow.Select()
|
||||
|
||||
# Select tool
|
||||
for tool in tools:
|
||||
flow.Select(tool)
|
||||
68
pype/plugins/fusion/inventory/set_tool_color.py
Normal file
68
pype/plugins/fusion/inventory/set_tool_color.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
from avalon import api, style
|
||||
from avalon.vendor.Qt import QtGui, QtWidgets
|
||||
|
||||
import avalon.fusion
|
||||
|
||||
|
||||
class FusionSetToolColor(api.InventoryAction):
|
||||
"""Update the color of the selected tools"""
|
||||
|
||||
label = "Set Tool Color"
|
||||
icon = "plus"
|
||||
color = "#d8d8d8"
|
||||
_fallback_color = QtGui.QColor(1.0, 1.0, 1.0)
|
||||
|
||||
def process(self, containers):
|
||||
"""Color all selected tools the selected colors"""
|
||||
|
||||
result = []
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
|
||||
# Get tool color
|
||||
first = containers[0]
|
||||
tool = first["_tool"]
|
||||
color = tool.TileColor
|
||||
|
||||
if color is not None:
|
||||
qcolor = QtGui.QColor().fromRgbF(color["R"], color["G"], color["B"])
|
||||
else:
|
||||
qcolor = self._fallback_color
|
||||
|
||||
# Launch pick color
|
||||
picked_color = self.get_color_picker(qcolor)
|
||||
if not picked_color:
|
||||
return
|
||||
|
||||
with avalon.fusion.comp_lock_and_undo_chunk(comp):
|
||||
for container in containers:
|
||||
# Convert color to RGB 0-1 floats
|
||||
rgb_f = picked_color.getRgbF()
|
||||
rgb_f_table = {"R": rgb_f[0], "G": rgb_f[1], "B": rgb_f[2]}
|
||||
|
||||
# Update tool
|
||||
tool = container["_tool"]
|
||||
tool.TileColor = rgb_f_table
|
||||
|
||||
result.append(container)
|
||||
|
||||
return result
|
||||
|
||||
def get_color_picker(self, color):
|
||||
"""Launch color picker and return chosen color
|
||||
|
||||
Args:
|
||||
color(QtGui.QColor): Start color to display
|
||||
|
||||
Returns:
|
||||
QtGui.QColor
|
||||
|
||||
"""
|
||||
|
||||
color_dialog = QtWidgets.QColorDialog(color)
|
||||
color_dialog.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
accepted = color_dialog.exec_()
|
||||
if not accepted:
|
||||
return
|
||||
|
||||
return color_dialog.selectedColor()
|
||||
76
pype/plugins/fusion/load/actions.py
Normal file
76
pype/plugins/fusion/load/actions.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
"""A module containing generic loader actions that will display in the Loader.
|
||||
|
||||
"""
|
||||
|
||||
from avalon import api
|
||||
|
||||
|
||||
class FusionSetFrameRangeLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["studio.animation",
|
||||
"studio.camera",
|
||||
"studio.imagesequence",
|
||||
"studio.yeticache",
|
||||
"studio.pointcache"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from pype.fusion import lib
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print("Skipping setting frame range because start or "
|
||||
"end frame data is missing..")
|
||||
return
|
||||
|
||||
lib.update_frame_range(start, end)
|
||||
|
||||
|
||||
class FusionSetFrameRangeWithHandlesLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["studio.animation",
|
||||
"studio.camera",
|
||||
"studio.imagesequence",
|
||||
"studio.yeticache",
|
||||
"studio.pointcache"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Set frame range (with handles)"
|
||||
order = 12
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from pype.fusion import lib
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print("Skipping setting frame range because start or "
|
||||
"end frame data is missing..")
|
||||
return
|
||||
|
||||
# Include handles
|
||||
handles = version_data.get("handles", 0)
|
||||
start -= handles
|
||||
end += handles
|
||||
|
||||
lib.update_frame_range(start, end)
|
||||
259
pype/plugins/fusion/load/load_sequence.py
Normal file
259
pype/plugins/fusion/load/load_sequence.py
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from avalon import api
|
||||
import avalon.io as io
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_inputs(tool, inputs):
|
||||
"""Preserve the tool's inputs after context"""
|
||||
|
||||
comp = tool.Comp()
|
||||
|
||||
values = {}
|
||||
for name in inputs:
|
||||
tool_input = getattr(tool, name)
|
||||
value = tool_input[comp.TIME_UNDEFINED]
|
||||
values[name] = value
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for name, value in values.items():
|
||||
tool_input = getattr(tool, name)
|
||||
tool_input[comp.TIME_UNDEFINED] = value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_trim(loader, log=None):
|
||||
"""Preserve the relative trim of the Loader tool.
|
||||
|
||||
This tries to preserve the loader's trim (trim in and trim out) after
|
||||
the context by reapplying the "amount" it trims on the clip's length at
|
||||
start and end.
|
||||
|
||||
"""
|
||||
|
||||
# Get original trim as amount of "trimming" from length
|
||||
time = loader.Comp().TIME_UNDEFINED
|
||||
length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
|
||||
trim_from_start = loader["ClipTimeStart"][time]
|
||||
trim_from_end = length - loader["ClipTimeEnd"][time]
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
||||
length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
|
||||
if trim_from_start > length:
|
||||
trim_from_start = length
|
||||
if log:
|
||||
log.warning("Reducing trim in to %d "
|
||||
"(because of less frames)" % trim_from_start)
|
||||
|
||||
remainder = length - trim_from_start
|
||||
if trim_from_end > remainder:
|
||||
trim_from_end = remainder
|
||||
if log:
|
||||
log.warning("Reducing trim in to %d "
|
||||
"(because of less frames)" % trim_from_end)
|
||||
|
||||
loader["ClipTimeStart"][time] = trim_from_start
|
||||
loader["ClipTimeEnd"][time] = length - trim_from_end
|
||||
|
||||
|
||||
def loader_shift(loader, frame, relative=True):
|
||||
"""Shift global in time by i preserving duration
|
||||
|
||||
This moves the loader by i frames preserving global duration. When relative
|
||||
is False it will shift the global in to the start frame.
|
||||
|
||||
Args:
|
||||
loader (tool): The fusion loader tool.
|
||||
frame (int): The amount of frames to move.
|
||||
relative (bool): When True the shift is relative, else the shift will
|
||||
change the global in to frame.
|
||||
|
||||
Returns:
|
||||
int: The resulting relative frame change (how much it moved)
|
||||
|
||||
"""
|
||||
comp = loader.Comp()
|
||||
time = comp.TIME_UNDEFINED
|
||||
|
||||
old_in = loader["GlobalIn"][time]
|
||||
old_out = loader["GlobalOut"][time]
|
||||
|
||||
if relative:
|
||||
shift = frame
|
||||
else:
|
||||
shift = frame - old_in
|
||||
|
||||
# Shifting global in will try to automatically compensate for the change
|
||||
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
|
||||
# input values to "just shift" the clip
|
||||
with preserve_inputs(loader, inputs=["ClipTimeStart",
|
||||
"ClipTimeEnd",
|
||||
"HoldFirstFrame",
|
||||
"HoldLastFrame"]):
|
||||
|
||||
# GlobalIn cannot be set past GlobalOut or vice versa
|
||||
# so we must apply them in the order of the shift.
|
||||
if shift > 0:
|
||||
loader["GlobalOut"][time] = old_out + shift
|
||||
loader["GlobalIn"][time] = old_in + shift
|
||||
else:
|
||||
loader["GlobalIn"][time] = old_in + shift
|
||||
loader["GlobalOut"][time] = old_out + shift
|
||||
|
||||
return int(shift)
|
||||
|
||||
|
||||
class FusionLoadSequence(api.Loader):
|
||||
"""Load image sequence into Fusion"""
|
||||
|
||||
families = ["studio.imagesequence"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Load sequence"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from avalon.fusion import (
|
||||
imprint_container,
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
path = self._get_first_image(self.fname)
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
with comp_lock_and_undo_chunk(comp, "Create Loader"):
|
||||
|
||||
args = (-32768, -32768)
|
||||
tool = comp.AddTool("Loader", *args)
|
||||
tool["Clip"] = path
|
||||
|
||||
# Set global in point to start frame (if in version.data)
|
||||
start = context["version"]["data"].get("startFrame", None)
|
||||
if start is not None:
|
||||
loader_shift(tool, start, relative=False)
|
||||
|
||||
imprint_container(tool,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Fusion automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
- ClipTimeStart: Fusion reset to 0 if duration changes
|
||||
- We keep the trim in as close as possible to the previous value.
|
||||
When there are less frames then the amount of trim we reduce
|
||||
it accordingly.
|
||||
|
||||
- ClipTimeEnd: Fusion reset to 0 if duration changes
|
||||
- We keep the trim out as close as possible to the previous value
|
||||
within new amount of frames after trim in (ClipTimeStart) has
|
||||
been set.
|
||||
|
||||
- GlobalIn: Fusion reset to comp's global in if duration changes
|
||||
- We change it to the "startFrame"
|
||||
|
||||
- GlobalEnd: Fusion resets to globalIn + length if duration changes
|
||||
- We do the same like Fusion - allow fusion to take control.
|
||||
|
||||
- HoldFirstFrame: Fusion resets this to 0
|
||||
- We preverse the value.
|
||||
|
||||
- HoldLastFrame: Fusion resets this to 0
|
||||
- We preverse the value.
|
||||
|
||||
- Reverse: Fusion resets to disabled if "Loop" is not enabled.
|
||||
- We preserve the value.
|
||||
|
||||
- Depth: Fusion resets to "Format"
|
||||
- We preverse the value.
|
||||
|
||||
- KeyCode: Fusion resets to ""
|
||||
- We preverse the value.
|
||||
|
||||
- TimeCodeOffset: Fusion resets to 0
|
||||
- We preverse the value.
|
||||
|
||||
"""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
root = api.get_representation_path(representation)
|
||||
path = self._get_first_image(root)
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({"type": "version",
|
||||
"_id": representation["parent"]})
|
||||
start = version["data"].get("startFrame")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(tool.Name, representation))
|
||||
start = 0
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update Loader"):
|
||||
|
||||
# Update the loader's path whilst preserving some values
|
||||
with preserve_trim(tool, log=self.log):
|
||||
with preserve_inputs(tool,
|
||||
inputs=("HoldFirstFrame",
|
||||
"HoldLastFrame",
|
||||
"Reverse",
|
||||
"Depth",
|
||||
"KeyCode",
|
||||
"TimeCodeOffset")):
|
||||
tool["Clip"] = path
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
global_in_changed = loader_shift(tool, start, relative=False)
|
||||
if global_in_changed:
|
||||
# Log this change to the user
|
||||
self.log.debug("Changed '%s' global in: %d" % (tool.Name,
|
||||
start))
|
||||
|
||||
# Update the imprinted representation
|
||||
tool.SetData("avalon.representation", str(representation["_id"]))
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
tool = container["_tool"]
|
||||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Remove Loader"):
|
||||
tool.Delete()
|
||||
|
||||
def _get_first_image(self, root):
|
||||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
24
pype/plugins/fusion/publish/collect_comp.py
Normal file
24
pype/plugins/fusion/publish/collect_comp.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from avalon import fusion
|
||||
|
||||
|
||||
class CollectCurrentCompFusion(pyblish.api.ContextPlugin):
|
||||
"""Collect current comp"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
label = "Collect Current Comp"
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
"""Collect all image sequence tools"""
|
||||
|
||||
current_comp = fusion.get_current_comp()
|
||||
assert current_comp, "Must have active Fusion composition"
|
||||
context.data["currentComp"] = current_comp
|
||||
|
||||
# Store path to current file
|
||||
filepath = current_comp.GetAttrs().get("COMPS_FileName", "")
|
||||
context.data['currentFile'] = filepath
|
||||
22
pype/plugins/fusion/publish/collect_fusion_version.py
Normal file
22
pype/plugins/fusion/publish/collect_fusion_version.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFusionVersion(pyblish.api.ContextPlugin):
|
||||
"""Collect current comp"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Fusion Version"
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
"""Collect all image sequence tools"""
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
if not comp:
|
||||
raise RuntimeError("No comp previously collected, unable to "
|
||||
"retrieve Fusion version.")
|
||||
|
||||
version = comp.GetApp().Version
|
||||
context.data["fusionVersion"] = version
|
||||
|
||||
self.log.info("Fusion version: %s" % version)
|
||||
96
pype/plugins/fusion/publish/collect_instances.py
Normal file
96
pype/plugins/fusion/publish/collect_instances.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_comp_render_range(comp):
|
||||
"""Return comp's start and end render range."""
|
||||
comp_attrs = comp.GetAttrs()
|
||||
start = comp_attrs["COMPN_RenderStart"]
|
||||
end = comp_attrs["COMPN_RenderEnd"]
|
||||
|
||||
# Whenever render ranges are undefined fall back
|
||||
# to the comp's global start and end
|
||||
if start == -1000000000:
|
||||
start = comp_attrs["COMPN_GlobalEnd"]
|
||||
if end == -1000000000:
|
||||
end = comp_attrs["COMPN_GlobalStart"]
|
||||
|
||||
return start, end
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect Fusion saver instances
|
||||
|
||||
This additionally stores the Comp start and end render range in the
|
||||
current context's data as "startFrame" and "endFrame".
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Instances"
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
"""Collect all image sequence tools"""
|
||||
|
||||
from avalon.fusion.lib import get_frame_path
|
||||
|
||||
comp = context.data["currentComp"]
|
||||
|
||||
# Get all savers in the comp
|
||||
tools = comp.GetToolList(False).values()
|
||||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
|
||||
start, end = get_comp_render_range(comp)
|
||||
context.data["startFrame"] = start
|
||||
context.data["endFrame"] = end
|
||||
|
||||
for tool in savers:
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
||||
tool_attrs = tool.GetAttrs()
|
||||
active = not tool_attrs["TOOLB_PassThrough"]
|
||||
|
||||
if not path:
|
||||
self.log.warning("Skipping saver because it "
|
||||
"has no path set: {}".format(tool.Name))
|
||||
continue
|
||||
|
||||
filename = os.path.basename(path)
|
||||
head, padding, tail = get_frame_path(filename)
|
||||
ext = os.path.splitext(path)[1]
|
||||
assert tail == ext, ("Tail does not match %s" % ext)
|
||||
subset = head.rstrip("_. ") # subset is head of the filename
|
||||
|
||||
# Include start and end render frame in label
|
||||
label = "{subset} ({start}-{end})".format(subset=subset,
|
||||
start=int(start),
|
||||
end=int(end))
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"asset": os.environ["AVALON_ASSET"], # todo: not a constant
|
||||
"subset": subset,
|
||||
"path": path,
|
||||
"outputDir": os.path.dirname(path),
|
||||
"ext": ext, # todo: should be redundant
|
||||
"label": label,
|
||||
"families": ["studio.saver"],
|
||||
"family": "studio.saver",
|
||||
"active": active,
|
||||
"publish": active # backwards compatibility
|
||||
})
|
||||
|
||||
instance.append(tool)
|
||||
|
||||
self.log.info("Found: \"%s\" " % path)
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=self.sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
44
pype/plugins/fusion/publish/collect_render_target.py
Normal file
44
pype/plugins/fusion/publish/collect_render_target.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFusionRenderMode(pyblish.api.InstancePlugin):
|
||||
"""Collect current comp's render Mode
|
||||
|
||||
Options:
|
||||
renderlocal
|
||||
deadline
|
||||
|
||||
Note that this value is set for each comp separately. When you save the
|
||||
comp this information will be stored in that file. If for some reason the
|
||||
available tool does not visualize which render mode is set for the
|
||||
current comp, please run the following line in the console (Py2)
|
||||
|
||||
comp.GetData("studio.rendermode")
|
||||
|
||||
This will return the name of the current render mode as seen above under
|
||||
Options.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
label = "Collect Render Mode"
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect all image sequence tools"""
|
||||
options = ["renderlocal", "deadline"]
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
raise RuntimeError("No comp previously collected, unable to "
|
||||
"retrieve Fusion version.")
|
||||
|
||||
rendermode = comp.GetData("studio.rendermode") or "renderlocal"
|
||||
assert rendermode in options, "Must be supported render mode"
|
||||
|
||||
self.log.info("Render mode: {0}".format(rendermode))
|
||||
|
||||
# Append family
|
||||
family = "studio.saver.{0}".format(rendermode)
|
||||
instance.data["families"].append(family)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current file with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver.deadline"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
||||
from pype.lib import version_up
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(plugin.__name__ == "FusionSubmitDeadline"
|
||||
for plugin in errored_plugins):
|
||||
raise RuntimeError("Skipping incrementing current file because "
|
||||
"submission to deadline failed.")
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
||||
current_filepath = context.data["currentFile"]
|
||||
new_filepath = version_up(current_filepath)
|
||||
|
||||
comp.Save(new_filepath)
|
||||
98
pype/plugins/fusion/publish/publish_image_sequences.py
Normal file
98
pype/plugins/fusion/publish/publish_image_sequences.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import re
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
def _get_script():
|
||||
"""Get path to the image sequence script"""
|
||||
|
||||
# todo: use a more elegant way to get the python script
|
||||
|
||||
try:
|
||||
from pype.scripts import publish_filesequence
|
||||
except Exception:
|
||||
raise RuntimeError("Expected module 'publish_imagesequence'"
|
||||
"to be available")
|
||||
|
||||
module_path = publish_filesequence.__file__
|
||||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[:-len(".pyc")] + ".py"
|
||||
|
||||
return module_path
|
||||
|
||||
|
||||
class PublishImageSequence(pyblish.api.InstancePlugin):
|
||||
"""Publish the generated local image sequences."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Publish Rendered Image Sequence(s)"
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver.renderlocal"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Skip this plug-in if the ExtractImageSequence failed
|
||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
||||
if any(plugin.__name__ == "FusionRenderLocal" for plugin in
|
||||
errored_plugins):
|
||||
raise RuntimeError("Fusion local render failed, "
|
||||
"publishing images skipped.")
|
||||
|
||||
subset = instance.data["subset"]
|
||||
ext = instance.data["ext"]
|
||||
|
||||
# Regex to match resulting renders
|
||||
regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
|
||||
ext=re.escape(ext))
|
||||
|
||||
# The instance has most of the information already stored
|
||||
metadata = {
|
||||
"regex": regex,
|
||||
"startFrame": instance.context.data["startFrame"],
|
||||
"endFrame": instance.context.data["endFrame"],
|
||||
"families": ["studio.imagesequence"],
|
||||
}
|
||||
|
||||
# Write metadata and store the path in the instance
|
||||
output_directory = instance.data["outputDir"]
|
||||
path = os.path.join(output_directory,
|
||||
"{}_metadata.json".format(subset))
|
||||
with open(path, "w") as f:
|
||||
json.dump(metadata, f)
|
||||
|
||||
assert os.path.isfile(path), ("Stored path is not a file for %s"
|
||||
% instance.data["name"])
|
||||
|
||||
# Suppress any subprocess console
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = subprocess.SW_HIDE
|
||||
|
||||
process = subprocess.Popen(["python", _get_script(),
|
||||
"--paths", path],
|
||||
bufsize=1,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
startupinfo=startupinfo)
|
||||
|
||||
while True:
|
||||
output = process.stdout.readline()
|
||||
# Break when there is no output or a return code has been given
|
||||
if output == '' and process.poll() is not None:
|
||||
process.stdout.close()
|
||||
break
|
||||
if output:
|
||||
line = output.strip()
|
||||
if line.startswith("ERROR"):
|
||||
self.log.error(line)
|
||||
else:
|
||||
self.log.info(line)
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("Process quit with non-zero "
|
||||
"return code: {}".format(process.returncode))
|
||||
42
pype/plugins/fusion/publish/render_local.py
Normal file
42
pype/plugins/fusion/publish/render_local.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import pyblish.api
|
||||
|
||||
import avalon.fusion as fusion
|
||||
|
||||
|
||||
class FusionRenderLocal(pyblish.api.InstancePlugin):
|
||||
"""Render the current Fusion composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
This will run the local render of Fusion.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Render Local"
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver.renderlocal"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
current_comp = context.data["currentComp"]
|
||||
start_frame = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
end_frame = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
|
||||
self.log.info("Starting render")
|
||||
self.log.info("Start frame: {}".format(start_frame))
|
||||
self.log.info("End frame: {}".format(end_frame))
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render()
|
||||
|
||||
if not result:
|
||||
raise RuntimeError("Comp render failed")
|
||||
21
pype/plugins/fusion/publish/save_scene.py
Normal file
21
pype/plugins/fusion/publish/save_scene.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class FusionSaveComp(pyblish.api.ContextPlugin):
|
||||
"""Save current comp"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
||||
current = comp.GetAttrs().get("COMPS_FileName", "")
|
||||
assert context.data['currentFile'] == current
|
||||
|
||||
self.log.info("Saving current file..")
|
||||
comp.Save()
|
||||
149
pype/plugins/fusion/publish/submit_deadline.py
Normal file
149
pype/plugins/fusion/publish/submit_deadline.py
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver.deadline"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
from avalon.fusion.lib import get_frame_path
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
saver_instances = []
|
||||
for instance in context[:]:
|
||||
if not self.families[0] in instance.data.get("families"):
|
||||
# Allow only saver family instances
|
||||
continue
|
||||
|
||||
if not instance.data.get("publish", True):
|
||||
# Skip inactive instances
|
||||
continue
|
||||
self.log.debug(instance.data["name"])
|
||||
saver_instances.append(instance)
|
||||
|
||||
if not saver_instances:
|
||||
raise RuntimeError("No instances found for Deadline submittion")
|
||||
|
||||
fusion_version = int(context.data["fusionVersion"])
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": filename,
|
||||
|
||||
# User, as seen in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
# Use a default submission pool for Fusion
|
||||
"Pool": "fusion",
|
||||
|
||||
"Plugin": "Fusion",
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(context.data["startFrame"]),
|
||||
end=int(context.data["endFrame"])
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"FlowFile": filepath,
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": str(fusion_version),
|
||||
|
||||
# Render in high quality
|
||||
"HighQuality": True,
|
||||
|
||||
# Whether saver output should be checked after rendering
|
||||
# is complete
|
||||
"CheckOutput": True,
|
||||
|
||||
# Proxy: higher numbers smaller images for faster test renders
|
||||
# 1 = no proxy quality
|
||||
"Proxy": 1,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Enable going to rendered frames from Deadline Monitor
|
||||
for index, instance in enumerate(saver_instances):
|
||||
head, padding, tail = get_frame_path(instance.data["path"])
|
||||
path = "{}{}{}".format(head, "#" * padding, tail)
|
||||
folder, filename = os.path.split(path)
|
||||
payload["JobInfo"]["OutputDirectory%d" % index] = folder
|
||||
payload["JobInfo"]["OutputFilename%d" % index] = filename
|
||||
|
||||
# Include critical variables with submission
|
||||
keys = [
|
||||
# TODO: This won't work if the slaves don't have accesss to
|
||||
# these paths, such as if slaves are running Linux and the
|
||||
# submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"OFX_PLUGIN_PATH",
|
||||
"FUSION9_MasterPrefs"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store the response for dependent job submission plug-ins
|
||||
for instance in saver_instances:
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
40
pype/plugins/fusion/publish/validate_background_depth.py
Normal file
40
pype/plugins/fusion/publish/validate_background_depth.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
import pyblish.api
|
||||
|
||||
from config import action
|
||||
|
||||
|
||||
class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
||||
"""Validate if all Background tool are set to float32 bit"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Background Depth 32 bit"
|
||||
actions = [action.RepairAction]
|
||||
hosts = ["fusion"]
|
||||
families = ["studio.saver"]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
context = instance.context
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have Comp object"
|
||||
|
||||
backgrounds = comp.GetToolList(False, "Background").values()
|
||||
if not backgrounds:
|
||||
return []
|
||||
|
||||
return [i for i in backgrounds if i.GetInput("Depth") != 4.0]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found %i nodes which are not set to float32"
|
||||
% len(invalid))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
comp = instance.context.data.get("currentComp")
|
||||
invalid = cls.get_invalid(instance)
|
||||
for i in invalid:
|
||||
i.SetInput("Depth", 4.0, comp.TIME_UNDEFINED)
|
||||
29
pype/plugins/fusion/publish/validate_comp_saved.py
Normal file
29
pype/plugins/fusion/publish/validate_comp_saved.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
|
||||
"""Ensure current comp is saved"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Comp Saved"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have Comp object"
|
||||
attrs = comp.GetAttrs()
|
||||
|
||||
filename = attrs["COMPS_FileName"]
|
||||
if not filename:
|
||||
raise RuntimeError("Comp is not saved.")
|
||||
|
||||
if not os.path.exists(filename):
|
||||
raise RuntimeError("Comp file does not exist: %s" % filename)
|
||||
|
||||
if attrs["COMPB_Modified"]:
|
||||
self.log.warning("Comp is modified. Save your comp to ensure your "
|
||||
"changes propagate correctly.")
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import pyblish.api
|
||||
|
||||
from config import action
|
||||
|
||||
|
||||
class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
||||
"""Valid if all savers have the input attribute CreateDir checked on
|
||||
|
||||
This attribute ensures that the folders to which the saver will write
|
||||
will be created.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
actions = [action.RepairAction]
|
||||
label = "Validate Create Folder Checked"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
active = instance.data.get("active", instance.data.get("publish"))
|
||||
if not active:
|
||||
return []
|
||||
|
||||
tool = instance[0]
|
||||
create_dir = tool.GetInput("CreateDir")
|
||||
if create_dir == 0.0:
|
||||
cls.log.error("%s has Create Folder turned off" % instance[0].Name)
|
||||
return [tool]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found Saver with Create Folder During "
|
||||
"Render checked off")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid = cls.get_invalid(instance)
|
||||
for tool in invalid:
|
||||
tool.SetInput("CreateDir", 1.0)
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
|
||||
"""Ensure the Saver has an extension in the filename path
|
||||
|
||||
This disallows files written as `filename` instead of `filename.frame.ext`.
|
||||
Fusion does not always set an extension for your filename when
|
||||
changing the file format of the saver.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Filename Has Extension"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found Saver without an extension")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
path = instance.data["path"]
|
||||
fname, ext = os.path.splitext(path)
|
||||
|
||||
if not ext:
|
||||
tool = instance[0]
|
||||
cls.log.error("%s has no extension specified" % tool.Name)
|
||||
return [tool]
|
||||
|
||||
return []
|
||||
29
pype/plugins/fusion/publish/validate_saver_has_input.py
Normal file
29
pype/plugins/fusion/publish/validate_saver_has_input.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
||||
"""Validate saver has incoming connection
|
||||
|
||||
This ensures a Saver has at least an input connection.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Has Input"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
saver = instance[0]
|
||||
if not saver.Input.GetConnectedOutput():
|
||||
return [saver]
|
||||
|
||||
return []
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Saver has no incoming connection: "
|
||||
"{} ({})".format(instance, invalid[0].Name))
|
||||
44
pype/plugins/fusion/publish/validate_saver_passthrough.py
Normal file
44
pype/plugins/fusion/publish/validate_saver_passthrough.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
||||
"""Validate saver passthrough is similar to Pyblish publish state"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Passthrough"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Workaround for ContextPlugin always running, even if no instance
|
||||
# is present with the family
|
||||
instances = pyblish.api.instances_by_plugin(instances=list(context),
|
||||
plugin=self)
|
||||
if not instances:
|
||||
self.log.debug("Ignoring plugin.. (bugfix)")
|
||||
|
||||
invalid_instances = []
|
||||
for instance in instances:
|
||||
invalid = self.is_invalid(instance)
|
||||
if invalid:
|
||||
invalid_instances.append(instance)
|
||||
|
||||
if invalid_instances:
|
||||
self.log.info("Reset pyblish to collect your current scene state, "
|
||||
"that should fix error.")
|
||||
raise RuntimeError("Invalid instances: "
|
||||
"{0}".format(invalid_instances))
|
||||
|
||||
def is_invalid(self, instance):
|
||||
|
||||
saver = instance[0]
|
||||
attr = saver.GetAttrs()
|
||||
active = not attr["TOOLB_PassThrough"]
|
||||
|
||||
if active != instance.data["publish"]:
|
||||
self.log.info("Saver has different passthrough state than "
|
||||
"Pyblish: {} ({})".format(instance, saver.Name))
|
||||
return [saver]
|
||||
|
||||
return []
|
||||
29
pype/plugins/fusion/publish/validate_unique_subsets.py
Normal file
29
pype/plugins/fusion/publish/validate_unique_subsets.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
||||
"""Ensure all instances have a unique subset name"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Unique Subsets"
|
||||
families = ["studio.saver"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
context = instance.context
|
||||
subset = instance.data["subset"]
|
||||
for other_instance in context[:]:
|
||||
if other_instance == instance:
|
||||
continue
|
||||
|
||||
if other_instance.data["subset"] == subset:
|
||||
return [instance] # current instance is invalid
|
||||
|
||||
return []
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Animation content is invalid. See log.")
|
||||
Loading…
Add table
Add a link
Reference in a new issue