Merge remote-tracking branch 'BigRoy/fusion_new_publisher' into fusion-new-publisher

Merge BoyRoys PR with latest OP dev-branch
This commit is contained in:
Jacob Danell 2023-02-23 19:14:16 +01:00
commit 245e9944cd
21 changed files with 636 additions and 237 deletions

View file

@ -1,20 +1,11 @@
from .pipeline import (
install,
uninstall,
FusionHost,
ls,
imprint_container,
parse_container
)
from .workio import (
open_file,
save_file,
current_file,
has_unsaved_changes,
file_extensions,
work_root
parse_container,
list_instances,
remove_instance
)
from .lib import (
@ -30,21 +21,11 @@ from .menu import launch_openpype_menu
__all__ = [
# pipeline
"install",
"uninstall",
"ls",
"imprint_container",
"parse_container",
# workio
"open_file",
"save_file",
"current_file",
"has_unsaved_changes",
"file_extensions",
"work_root",
# lib
"maintained_selection",
"update_frame_range",

View file

@ -0,0 +1,54 @@
import pyblish.api
from openpype.hosts.fusion.api.lib import get_current_comp
from openpype.pipeline.publish import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid nodes in Maya when plug-in failed.
To retrieve the invalid nodes this assumes a static `get_invalid()`
method is available on the plugin.
"""
label = "Select invalid"
on = "failed" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
invalid = list()
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
if isinstance(invalid_nodes, (list, tuple)):
invalid.extend(invalid_nodes)
else:
self.log.warning("Plug-in returned to be invalid, "
"but has no selectable nodes.")
if not invalid:
# Assume relevant comp is current comp and clear selection
self.log.info("No invalid tools found.")
comp = get_current_comp()
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
return
# Assume a single comp
first_tool = invalid[0]
comp = first_tool.Comp()
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
names = set()
for tool in invalid:
flow.Select(tool, True)
names.add(tool.Name)
self.log.info("Selecting invalid tools: %s" % ", ".join(sorted(names)))

View file

@ -53,7 +53,6 @@ class OpenPypeMenu(QtWidgets.QWidget):
asset_label.setAlignment(QtCore.Qt.AlignHCenter)
workfiles_btn = QtWidgets.QPushButton("Workfiles...", self)
create_btn = QtWidgets.QPushButton("Create...", self)
publish_btn = QtWidgets.QPushButton("Publish...", self)
load_btn = QtWidgets.QPushButton("Load...", self)
manager_btn = QtWidgets.QPushButton("Manage...", self)
@ -76,7 +75,6 @@ class OpenPypeMenu(QtWidgets.QWidget):
layout.addSpacing(20)
layout.addWidget(create_btn)
layout.addWidget(load_btn)
layout.addWidget(publish_btn)
layout.addWidget(manager_btn)
@ -101,7 +99,6 @@ class OpenPypeMenu(QtWidgets.QWidget):
self.asset_label = asset_label
workfiles_btn.clicked.connect(self.on_workfile_clicked)
create_btn.clicked.connect(self.on_create_clicked)
publish_btn.clicked.connect(self.on_publish_clicked)
load_btn.clicked.connect(self.on_load_clicked)
manager_btn.clicked.connect(self.on_manager_clicked)
@ -145,11 +142,8 @@ class OpenPypeMenu(QtWidgets.QWidget):
def on_workfile_clicked(self):
host_tools.show_workfiles()
def on_create_clicked(self):
host_tools.show_creator()
def on_publish_clicked(self):
host_tools.show_publish()
host_tools.show_publisher()
def on_load_clicked(self):
host_tools.show_loader(use_context=True)

View file

@ -4,6 +4,7 @@ Basic avalon integration
import os
import sys
import logging
import contextlib
import pyblish.api
from qtpy import QtCore
@ -17,15 +18,14 @@ from openpype.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
register_inventory_action_path,
deregister_loader_plugin_path,
deregister_creator_plugin_path,
deregister_inventory_action_path,
AVALON_CONTAINER_ID,
)
from openpype.pipeline.load import any_outdated_containers
from openpype.hosts.fusion import FUSION_HOST_DIR
from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
from openpype.tools.utils import host_tools
from .lib import (
get_current_comp,
comp_lock_and_undo_chunk,
@ -66,71 +66,99 @@ class FusionLogHandler(logging.Handler):
self.print(entry)
def install():
"""Install fusion-specific functionality of OpenPype.
class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
name = "fusion"
This is where you install menus and register families, data
and loaders into fusion.
def install(self):
"""Install fusion-specific functionality of OpenPype.
It is called automatically when installing via
`openpype.pipeline.install_host(openpype.hosts.fusion.api)`
This is where you install menus and register families, data
and loaders into fusion.
See the Maya equivalent for inspiration on how to implement this.
It is called automatically when installing via
`openpype.pipeline.install_host(openpype.hosts.fusion.api)`
"""
# Remove all handlers associated with the root logger object, because
# that one always logs as "warnings" incorrectly.
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
See the Maya equivalent for inspiration on how to implement this.
# Attach default logging handler that prints to active comp
logger = logging.getLogger()
formatter = logging.Formatter(fmt="%(message)s\n")
handler = FusionLogHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
"""
# Remove all handlers associated with the root logger object, because
# that one always logs as "warnings" incorrectly.
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
pyblish.api.register_host("fusion")
pyblish.api.register_plugin_path(PUBLISH_PATH)
log.info("Registering Fusion plug-ins..")
# Attach default logging handler that prints to active comp
logger = logging.getLogger()
formatter = logging.Formatter(fmt="%(message)s\n")
handler = FusionLogHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
pyblish.api.register_host("fusion")
pyblish.api.register_plugin_path(PUBLISH_PATH)
log.info("Registering Fusion plug-ins..")
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled
)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
# Register events
register_event_callback("open", on_after_open)
register_event_callback("save", on_save)
register_event_callback("new", on_new)
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled)
# Register events
register_event_callback("open", on_after_open)
register_event_callback("save", on_save)
register_event_callback("new", on_new)
def uninstall():
"""Uninstall all that was installed
# region workfile io api
def has_unsaved_changes(self):
comp = get_current_comp()
return comp.GetAttrs()["COMPB_Modified"]
This is where you undo everything that was done in `install()`.
That means, removing menus, deregistering families and data
and everything. It should be as though `install()` was never run,
because odds are calling this function means the user is interested
in re-installing shortly afterwards. If, for example, he has been
modifying the menu or registered families.
def get_workfile_extensions(self):
return [".comp"]
"""
pyblish.api.deregister_host("fusion")
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
log.info("Deregistering Fusion plug-ins..")
def save_workfile(self, dst_path=None):
comp = get_current_comp()
comp.Save(dst_path)
deregister_loader_plugin_path(LOAD_PATH)
deregister_creator_plugin_path(CREATE_PATH)
deregister_inventory_action_path(INVENTORY_PATH)
def open_workfile(self, filepath):
# Hack to get fusion, see
# openpype.hosts.fusion.api.pipeline.get_current_comp()
fusion = getattr(sys.modules["__main__"], "fusion", None)
pyblish.api.deregister_callback(
"instanceToggled", on_pyblish_instance_toggled
)
return fusion.LoadComp(filepath)
def get_current_workfile(self):
comp = get_current_comp()
current_filepath = comp.GetAttrs()["COMPS_FileName"]
if not current_filepath:
return None
return current_filepath
def work_root(self, session):
work_dir = session["AVALON_WORKDIR"]
scene_dir = session.get("AVALON_SCENEDIR")
if scene_dir:
return os.path.join(work_dir, scene_dir)
else:
return work_dir
# endregion
@contextlib.contextmanager
def maintained_selection(self):
from .lib import maintained_selection
return maintained_selection()
def get_containers(self):
return ls()
def update_context_data(self, data, changes):
print(data, changes)
def get_context_data(self):
return {}
def on_pyblish_instance_toggled(instance, old_value, new_value):
@ -283,9 +311,51 @@ def parse_container(tool):
return container
# TODO: Function below is currently unused prototypes
def list_instances(creator_id=None):
"""Return created instances in current workfile which will be published.
Returns:
(list) of dictionaries matching instances format
"""
comp = get_current_comp()
tools = comp.GetToolList(False).values()
instance_signature = {
"id": "pyblish.avalon.instance",
"identifier": creator_id
}
instances = []
for tool in tools:
data = tool.GetData('openpype')
if not isinstance(data, dict):
continue
if data.get("id") != instance_signature["id"]:
continue
if creator_id and data.get("identifier") != creator_id:
continue
instances.append(tool)
return instances
# TODO: Function below is currently unused prototypes
def remove_instance(instance):
"""Remove instance from current workfile.
Args:
instance (dict): instance representation from subsetmanager model
"""
# Assume instance is a Fusion tool directly
instance["tool"].Delete()
class FusionEventThread(QtCore.QThread):
"""QThread which will periodically ping Fusion app for any events.
The fusion.UIManager must be set up to be notified of events before they'll
be reported by this thread, for example:
fusion.UIManager.AddNotify("Comp_Save", None)
@ -393,4 +463,4 @@ class FusionEventHandler(QtCore.QObject):
# Comp Opened
elif what in {"Comp_Opened"}:
emit_event("open", data=event)
emit_event("open", data=event)

View file

@ -1,45 +0,0 @@
"""Host API required Work Files tool"""
import sys
import os
from .lib import get_current_comp
def file_extensions():
return [".comp"]
def has_unsaved_changes():
comp = get_current_comp()
return comp.GetAttrs()["COMPB_Modified"]
def save_file(filepath):
comp = get_current_comp()
comp.Save(filepath)
def open_file(filepath):
# Hack to get fusion, see
# openpype.hosts.fusion.api.pipeline.get_current_comp()
fusion = getattr(sys.modules["__main__"], "fusion", None)
return fusion.LoadComp(filepath)
def current_file():
comp = get_current_comp()
current_filepath = comp.GetAttrs()["COMPS_FileName"]
if not current_filepath:
return None
return current_filepath
def work_root(session):
work_dir = session["AVALON_WORKDIR"]
scene_dir = session.get("AVALON_SCENEDIR")
if scene_dir:
return os.path.join(work_dir, scene_dir)
else:
return work_dir

View file

@ -13,11 +13,11 @@ def main(env):
# However the contents of that folder can conflict with Qt library dlls
# so we make sure to move out of it to avoid DLL Load Failed errors.
os.chdir("..")
from openpype.hosts.fusion import api
from openpype.hosts.fusion.api import FusionHost
from openpype.hosts.fusion.api import menu
# activate resolve from pype
install_host(api)
install_host(FusionHost())
log = Logger.get_logger(__name__)
log.info(f"Registered host: {registered_host()}")

View file

@ -1,49 +0,0 @@
import os
from openpype.pipeline import (
LegacyCreator,
legacy_io
)
from openpype.hosts.fusion.api import (
get_current_comp,
comp_lock_and_undo_chunk
)
class CreateOpenEXRSaver(LegacyCreator):
name = "openexrDefault"
label = "Create OpenEXR Saver"
hosts = ["fusion"]
family = "render"
defaults = ["Main"]
def process(self):
file_format = "OpenEXRFormat"
comp = get_current_comp()
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])
filename = "{}..exr".format(self.name)
filepath = os.path.join(workdir, "render", filename)
with comp_lock_and_undo_chunk(comp):
args = (-32768, -32768) # Magical position numbers
saver = comp.AddTool("Saver", *args)
saver.SetAttrs({"TOOLS_Name": self.name})
# Setting input attributes is different from basic attributes
# Not confused with "MainInputAttributes" which
saver["Clip"] = filepath
saver["OutputFormat"] = file_format
# Check file format settings are available
if saver[file_format] is None:
raise RuntimeError("File format is not set to {}, "
"this is a bug".format(file_format))
# Set file format attributes
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
saver[file_format]["SaveAlpha"] = 0

View file

@ -0,0 +1,187 @@
import os
import qtawesome
from openpype.hosts.fusion.api import (
get_current_comp,
comp_lock_and_undo_chunk
)
from openpype.pipeline import (
legacy_io,
Creator,
CreatedInstance
)
class CreateSaver(Creator):
identifier = "io.openpype.creators.fusion.saver"
name = "saver"
label = "Saver"
family = "render"
default_variants = ["Main"]
description = "Fusion Saver to generate image sequence"
def create(self, subset_name, instance_data, pre_create_data):
# TODO: Add pre_create attributes to choose file format?
file_format = "OpenEXRFormat"
comp = get_current_comp()
with comp_lock_and_undo_chunk(comp):
args = (-32768, -32768) # Magical position numbers
saver = comp.AddTool("Saver", *args)
self._update_tool_with_data(saver, data=instance_data)
saver["OutputFormat"] = file_format
# Check file format settings are available
if saver[file_format] is None:
raise RuntimeError("File format is not set to {}, "
"this is a bug".format(file_format))
# Set file format attributes
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
saver[file_format]["SaveAlpha"] = 0
self._imprint(saver, instance_data)
# Register the CreatedInstance
instance = CreatedInstance(
family=self.family,
subset_name=subset_name,
data=instance_data,
creator=self)
# Insert the transient data
instance.transient_data["tool"] = saver
self._add_instance_to_context(instance)
return instance
def collect_instances(self):
comp = get_current_comp()
tools = comp.GetToolList(False, "Saver").values()
for tool in tools:
data = self.get_managed_tool_data(tool)
if not data:
data = self._collect_unmanaged_saver(tool)
# Add instance
created_instance = CreatedInstance.from_existing(data, self)
# Collect transient data
created_instance.transient_data["tool"] = tool
self._add_instance_to_context(created_instance)
def get_icon(self):
return qtawesome.icon("fa.eye", color="white")
def update_instances(self, update_list):
for update in update_list:
instance = update.instance
# Get the new values after the changes by key, ignore old value
new_data = {
key: new for key, (_old, new) in update.changes.items()
}
tool = instance.transient_data["tool"]
self._update_tool_with_data(tool, new_data)
self._imprint(tool, new_data)
def remove_instances(self, instances):
for instance in instances:
# Remove the tool from the scene
tool = instance.transient_data["tool"]
if tool:
tool.Delete()
# Remove the collected CreatedInstance to remove from UI directly
self._remove_instance_from_context(instance)
def _imprint(self, tool, data):
# Save all data in a "openpype.{key}" = value data
for key, value in data.items():
tool.SetData("openpype.{}".format(key), value)
def _update_tool_with_data(self, tool, data):
"""Update tool node name and output path based on subset data"""
if "subset" not in data:
return
original_subset = tool.GetData("openpype.subset")
subset = data["subset"]
if original_subset != subset:
# Subset change detected
# Update output filepath
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])
filename = "{}..exr".format(subset)
filepath = os.path.join(workdir, "render", subset, filename)
tool["Clip"] = filepath
# Rename tool
if tool.Name != subset:
print(f"Renaming {tool.Name} -> {subset}")
tool.SetAttrs({"TOOLS_Name": subset})
def _collect_unmanaged_saver(self, tool):
# TODO: this should not be done this way - this should actually
# get the data as stored on the tool explicitly (however)
# that would disallow any 'regular saver' to be collected
# unless the instance data is stored on it to begin with
print("Collecting unmanaged saver..")
comp = tool.Comp()
# Allow regular non-managed savers to also be picked up
project = legacy_io.Session["AVALON_PROJECT"]
asset = legacy_io.Session["AVALON_ASSET"]
task = legacy_io.Session["AVALON_TASK"]
path = tool["Clip"][comp.TIME_UNDEFINED]
fname = os.path.basename(path)
fname, _ext = os.path.splitext(fname)
subset = fname.rstrip(".")
attrs = tool.GetAttrs()
passthrough = attrs["TOOLB_PassThrough"]
variant = subset[len("render"):]
return {
# Required data
"project": project,
"asset": asset,
"subset": subset,
"task": task,
"variant": variant,
"active": not passthrough,
"family": self.family,
# Unique identifier for instance and this creator
"id": "pyblish.avalon.instance",
"creator_identifier": self.identifier
}
def get_managed_tool_data(self, tool):
"""Return data of the tool if it matches creator identifier"""
data = tool.GetData('openpype')
if not isinstance(data, dict):
return
required = {
"id": "pyblish.avalon.instance",
"creator_identifier": self.identifier
}
for key, value in required.items():
if key not in data or data[key] != value:
return
return data

View file

@ -0,0 +1,130 @@
import collections
import qtawesome
import openpype.hosts.fusion.api as api
from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
legacy_io,
)
def flatten_dict(d, parent_key=None, separator="."):
items = []
for key, v in d.items():
new_key = parent_key + separator + key if parent_key else key
if isinstance(v, collections.MutableMapping):
items.extend(flatten_dict(v, new_key, separator=separator).items())
else:
items.append((new_key, v))
return dict(items)
class FusionWorkfileCreator(AutoCreator):
identifier = "workfile"
family = "workfile"
label = "Workfile"
default_variant = "Main"
create_allow_context_change = False
data_key = "openpype.workfile"
def collect_instances(self):
comp = api.get_current_comp()
data = comp.GetData(self.data_key)
if not data:
return
instance = CreatedInstance(
family=self.family,
subset_name=data["subset"],
data=data,
creator=self
)
instance.transient_data["comp"] = comp
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for update in update_list:
instance = update.instance
comp = instance.transient_data["comp"]
if not hasattr(comp, "SetData"):
# Comp is not alive anymore, likely closed by the user
self.log.error("Workfile comp not found for existing instance."
" Comp might have been closed in the meantime.")
continue
# TODO: It appears sometimes this could be 'nested'
# Get the new values after the changes by key, ignore old value
new_data = {
key: new for key, (_old, new) in update.changes.items()
}
self._imprint(comp, new_data)
def create(self, options=None):
comp = api.get_current_comp()
if not comp:
self.log.error("Unable to find current comp")
return
# TODO: Is this really necessary?
# Force kill any existing "workfile" instances
for instance in self.create_context.instances:
if instance.family == self.family:
self.log.debug(f"Removing instance: {instance}")
self._remove_instance_from_context(instance)
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
host_name = legacy_io.Session["AVALON_APP"]
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
self.default_variant, task_name, asset_doc,
project_name, host_name
)
data = {
"asset": asset_name,
"task": task_name,
"variant": self.default_variant
}
data.update(self.get_dynamic_data(
self.default_variant, task_name, asset_doc,
project_name, host_name
))
instance = CreatedInstance(
family=self.family,
subset_name=subset_name,
data=data,
creator=self
)
instance.transient_data["comp"] = comp
self._add_instance_to_context(instance)
self._imprint(comp, data)
def get_icon(self):
return qtawesome.icon("fa.file-o", color="white")
def _imprint(self, comp, data):
# TODO: Should this keys persist or not? I'd prefer not
# Do not persist the current context for the Workfile
for key in ["variant", "subset", "asset", "task"]:
data.pop(key, None)
# Flatten any potential nested dicts
data = flatten_dict(data, separator=".")
# Prefix with data key openpype.workfile
data = {f"{self.data_key}.{key}" for key, value in data.items()}
comp.SetData(data)

View file

@ -97,10 +97,15 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
label = "Collect Inputs"
order = pyblish.api.CollectorOrder + 0.2
hosts = ["fusion"]
families = ["render"]
def process(self, instance):
# Get all upstream and include itself
if not any(instance[:]):
self.log.debug("No tool found in instance, skipping..")
return
tool = instance[0]
nodes = list(iter_upstream(tool))
nodes.append(tool)

View file

@ -30,7 +30,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
"""
order = pyblish.api.CollectorOrder
label = "Collect Instances"
label = "Collect Instances Data"
hosts = ["fusion"]
def process(self, context):
@ -39,68 +39,53 @@ class CollectInstances(pyblish.api.ContextPlugin):
from openpype.hosts.fusion.api.lib import get_frame_path
comp = context.data["currentComp"]
# Get all savers in the comp
tools = comp.GetToolList(False).values()
savers = [tool for tool in tools if tool.ID == "Saver"]
start, end, global_start, global_end = get_comp_render_range(comp)
context.data["frameStart"] = int(start)
context.data["frameEnd"] = int(end)
context.data["frameStartHandle"] = int(global_start)
context.data["frameEndHandle"] = int(global_end)
for tool in savers:
path = tool["Clip"][comp.TIME_UNDEFINED]
tool_attrs = tool.GetAttrs()
active = not tool_attrs["TOOLB_PassThrough"]
if not path:
self.log.warning("Skipping saver because it "
"has no path set: {}".format(tool.Name))
continue
filename = os.path.basename(path)
head, padding, tail = get_frame_path(filename)
ext = os.path.splitext(path)[1]
assert tail == ext, ("Tail does not match %s" % ext)
subset = head.rstrip("_. ") # subset is head of the filename
for instance in context:
# Include start and end render frame in label
subset = instance.data["subset"]
label = "{subset} ({start}-{end})".format(subset=subset,
start=int(start),
end=int(end))
instance = context.create_instance(subset)
instance.data.update({
"asset": os.environ["AVALON_ASSET"], # todo: not a constant
"subset": subset,
"path": path,
"outputDir": os.path.dirname(path),
"ext": ext, # todo: should be redundant
"label": label,
# todo: Allow custom frame range per instance
"task": context.data["task"],
"frameStart": context.data["frameStart"],
"frameEnd": context.data["frameEnd"],
"frameStartHandle": context.data["frameStartHandle"],
"frameEndHandle": context.data["frameStartHandle"],
"fps": context.data["fps"],
"families": ["render", "review"],
"family": "render",
"active": active,
"publish": active # backwards compatibility
})
instance.append(tool)
if instance.data["family"] == "render":
# TODO: This should probably move into a collector of
# its own for the "render" family
# This is only the case for savers currently but not
# for workfile instances. So we assume saver here.
tool = instance.data["transientData"]["tool"]
path = tool["Clip"][comp.TIME_UNDEFINED]
self.log.info("Found: \"%s\" " % path)
filename = os.path.basename(path)
head, padding, tail = get_frame_path(filename)
ext = os.path.splitext(path)[1]
assert tail == ext, ("Tail does not match %s" % ext)
# Sort/grouped by family (preserving local index)
context[:] = sorted(context, key=self.sort_by_family)
instance.data.update({
"path": path,
"outputDir": os.path.dirname(path),
"ext": ext, # todo: should be redundant?
return context
"families": ["render", "review"],
"family": "render",
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))
# Backwards compatibility: embed tool in instance.data
"tool": tool
})
# Add tool itself as member
instance.append(tool)

View file

@ -11,7 +11,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
label = "Increment current file"
order = pyblish.api.IntegratorOrder + 9.0
hosts = ["fusion"]
families = ["render.farm"]
families = ["workfile"]
optional = True
def process(self, context):

View file

@ -7,7 +7,7 @@ class FusionSaveComp(pyblish.api.ContextPlugin):
label = "Save current file"
order = pyblish.api.ExtractorOrder - 0.49
hosts = ["fusion"]
families = ["render"]
families = ["render", "workfile"]
def process(self, context):

View file

@ -1,6 +1,9 @@
import pyblish.api
from openpype.pipeline.publish import RepairAction
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
@ -13,6 +16,8 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
families = ["render"]
optional = True
actions = [SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
@ -29,8 +34,10 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found %i nodes which are not set to float32"
% len(invalid))
raise PublishValidationError(
"Found {} Backgrounds tools which"
" are not set to float32".format(len(invalid)),
title=self.label)
@classmethod
def repair(cls, instance):

View file

@ -1,6 +1,7 @@
import os
import pyblish.api
from openpype.pipeline import PublishValidationError
class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
@ -19,10 +20,12 @@ class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
filename = attrs["COMPS_FileName"]
if not filename:
raise RuntimeError("Comp is not saved.")
raise PublishValidationError("Comp is not saved.",
title=self.label)
if not os.path.exists(filename):
raise RuntimeError("Comp file does not exist: %s" % filename)
raise PublishValidationError(
"Comp file does not exist: %s" % filename, title=self.label)
if attrs["COMPB_Modified"]:
self.log.warning("Comp is modified. Save your comp to ensure your "

View file

@ -1,6 +1,9 @@
import pyblish.api
from openpype.pipeline.publish import RepairAction
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
@ -15,6 +18,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
label = "Validate Create Folder Checked"
families = ["render"]
hosts = ["fusion"]
actions = [SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
@ -31,8 +35,9 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found Saver with Create Folder During "
"Render checked off")
raise PublishValidationError(
"Found Saver with Create Folder During Render checked off",
title=self.label)
@classmethod
def repair(cls, instance):

View file

@ -1,6 +1,9 @@
import os
import pyblish.api
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
@ -16,11 +19,13 @@ class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
label = "Validate Filename Has Extension"
families = ["render"]
hosts = ["fusion"]
actions = [SelectInvalidAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found Saver without an extension")
raise PublishValidationError("Found Saver without an extension",
title=self.label)
@classmethod
def get_invalid(cls, instance):

View file

@ -1,4 +1,7 @@
import pyblish.api
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateSaverHasInput(pyblish.api.InstancePlugin):
@ -12,6 +15,7 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
label = "Validate Saver Has Input"
families = ["render"]
hosts = ["fusion"]
actions = [SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
@ -25,5 +29,8 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Saver has no incoming connection: "
"{} ({})".format(instance, invalid[0].Name))
saver_name = invalid[0].Name
raise PublishValidationError(
"Saver has no incoming connection: {} ({})".format(instance,
saver_name),
title=self.label)

View file

@ -1,4 +1,7 @@
import pyblish.api
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
@ -8,6 +11,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
label = "Validate Saver Passthrough"
families = ["render"]
hosts = ["fusion"]
actions = [SelectInvalidAction]
def process(self, context):
@ -27,8 +31,9 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
if invalid_instances:
self.log.info("Reset pyblish to collect your current scene state, "
"that should fix error.")
raise RuntimeError("Invalid instances: "
"{0}".format(invalid_instances))
raise PublishValidationError(
"Invalid instances: {0}".format(invalid_instances),
title=self.label)
def is_invalid(self, instance):
@ -36,7 +41,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
attr = saver.GetAttrs()
active = not attr["TOOLB_PassThrough"]
if active != instance.data["publish"]:
if active != instance.data.get("publish", True):
self.log.info("Saver has different passthrough state than "
"Pyblish: {} ({})".format(instance, saver.Name))
return [saver]

View file

@ -0,0 +1,55 @@
from collections import defaultdict
import pyblish.api
from openpype.pipeline import PublishValidationError
from openpype.hosts.fusion.api.action import SelectInvalidAction
class ValidateUniqueSubsets(pyblish.api.ContextPlugin):
"""Ensure all instances have a unique subset name"""
order = pyblish.api.ValidatorOrder
label = "Validate Unique Subsets"
families = ["render"]
hosts = ["fusion"]
actions = [SelectInvalidAction]
@classmethod
def get_invalid(cls, context):
# Collect instances per subset per asset
instances_per_subset_asset = defaultdict(lambda: defaultdict(list))
for instance in context:
asset = instance.data.get("asset", context.data.get("asset"))
subset = instance.data.get("subset", context.data.get("subset"))
instances_per_subset_asset[asset][subset].append(instance)
# Find which asset + subset combination has more than one instance
# Those are considered invalid because they'd integrate to the same
# destination.
invalid = []
for asset, instances_per_subset in instances_per_subset_asset.items():
for subset, instances in instances_per_subset.items():
if len(instances) > 1:
cls.log.warning(
"{asset} > {subset} used by more than "
"one instance: {instances}".format(
asset=asset,
subset=subset,
instances=instances
)
)
invalid.extend(instances)
# Return tools for the invalid instances so they can be selected
invalid = [instance.data["tool"] for instance in invalid]
return invalid
def process(self, context):
invalid = self.get_invalid(context)
if invalid:
raise PublishValidationError("Multiple instances are set to "
"the same asset > subset.",
title=self.label)