mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch '2.x/develop' into develop
This commit is contained in:
commit
1fb305e7c8
169 changed files with 2737 additions and 576 deletions
61
pype/hooks/fusion/prelaunch.py
Normal file
61
pype/hooks/fusion/prelaunch.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
from pype.lib import PypeHook
|
||||
from pypeapp import Logger
|
||||
from pype.hosts.fusion import utils
|
||||
|
||||
|
||||
class FusionPrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Fusion
|
||||
project inside.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# making sure pyton 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(env.get("PYTHON36", ""))
|
||||
assert os.path.isdir(py36_dir), (
|
||||
"Python 3.6 is not installed at the provided folder path. Either "
|
||||
"make sure the `environments\resolve.json` is having correctly "
|
||||
"set `PYTHON36` or make sure Python 3.6 is installed "
|
||||
f"in given path. \nPYTHON36E: `{py36_dir}`"
|
||||
)
|
||||
self.log.info(f"Path to Fusion Python folder: `{py36_dir}`...")
|
||||
env["PYTHON36"] = py36_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(env.get("FUSION_UTILITY_SCRIPTS_DIR", ""))
|
||||
assert os.path.isdir(us_dir), (
|
||||
"Fusion utility script dir does not exists. Either make sure "
|
||||
"the `environments\fusion.json` is having correctly set "
|
||||
"`FUSION_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
|
||||
f"FUSION_UTILITY_SCRIPTS_DIR: `{us_dir}`"
|
||||
)
|
||||
|
||||
try:
|
||||
__import__("avalon.fusion")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
utils.setup(env)
|
||||
|
||||
return True
|
||||
|
|
@ -1,63 +1,38 @@
|
|||
import os
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pype import PLUGINS_DIR
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "fusion", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "fusion", "inventory")
|
||||
from .utils import (
|
||||
setup
|
||||
)
|
||||
|
||||
|
||||
def install():
|
||||
print("Registering Fusion plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
from .lib import (
|
||||
get_additional_data,
|
||||
update_frame_range
|
||||
)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = ["imagesequence",
|
||||
"camera",
|
||||
"pointcache"]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
from .menu import launch_pype_menu
|
||||
|
||||
|
||||
def uninstall():
|
||||
print("Deregistering Fusion plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
__all__ = [
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
# utils
|
||||
"setup",
|
||||
"get_resolve_module",
|
||||
|
||||
# lib
|
||||
"get_additional_data",
|
||||
"update_frame_range",
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
|
||||
savers = [tool for tool in instance if
|
||||
getattr(tool, "ID", None) == "Saver"]
|
||||
if not savers:
|
||||
return
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
passthrough = not new_value
|
||||
with comp_lock_and_undo_chunk(comp,
|
||||
undo_queue_name="Change instance "
|
||||
"active state"):
|
||||
for tool in savers:
|
||||
attrs = tool.GetAttrs()
|
||||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
]
|
||||
|
|
|
|||
170
pype/hosts/fusion/menu.py
Normal file
170
pype/hosts/fusion/menu.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from .pipeline import (
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
loader,
|
||||
sceneinventory,
|
||||
libraryloader
|
||||
)
|
||||
|
||||
from .scripts import (
|
||||
set_rendermode,
|
||||
duplicate_with_inputs
|
||||
)
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
if not os.path.exists(path):
|
||||
print("Unable to load stylesheet, file not found in resources")
|
||||
return ""
|
||||
|
||||
with open(path, "r") as file_stream:
|
||||
stylesheet = file_stream.read()
|
||||
return stylesheet
|
||||
|
||||
|
||||
class Spacer(QtWidgets.QWidget):
|
||||
def __init__(self, height, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setFixedHeight(height)
|
||||
|
||||
real_spacer = QtWidgets.QWidget(self)
|
||||
real_spacer.setObjectName("Spacer")
|
||||
real_spacer.setFixedHeight(height)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(real_spacer)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
|
||||
class PypeMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName("PypeMenu")
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.render_mode_widget = None
|
||||
self.setWindowTitle("Pype")
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles", self)
|
||||
create_btn = QtWidgets.QPushButton("Create", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish", self)
|
||||
load_btn = QtWidgets.QPushButton("Load", self)
|
||||
inventory_btn = QtWidgets.QPushButton("Inventory", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode", self)
|
||||
duplicate_with_inputs_btn = QtWidgets.QPushButton(
|
||||
"Duplicate with input connections", self
|
||||
)
|
||||
reset_resolution_btn = QtWidgets.QPushButton(
|
||||
"Reset Resolution from project", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(10, 20, 10, 20)
|
||||
|
||||
layout.addWidget(workfiles_btn)
|
||||
layout.addWidget(create_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(load_btn)
|
||||
layout.addWidget(inventory_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(libload_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(rendermode_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(duplicate_with_inputs_btn)
|
||||
layout.addWidget(reset_resolution_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
workfiles_btn.clicked.connect(self.on_workfile_clicked)
|
||||
create_btn.clicked.connect(self.on_create_clicked)
|
||||
publish_btn.clicked.connect(self.on_publish_clicked)
|
||||
load_btn.clicked.connect(self.on_load_clicked)
|
||||
inventory_btn.clicked.connect(self.on_inventory_clicked)
|
||||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendernode_clicked)
|
||||
duplicate_with_inputs_btn.clicked.connect(
|
||||
self.on_duplicate_with_inputs_clicked)
|
||||
reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked)
|
||||
|
||||
def on_workfile_clicked(self):
|
||||
print("Clicked Workfile")
|
||||
launch_workfiles_app()
|
||||
|
||||
def on_create_clicked(self):
|
||||
print("Clicked Create")
|
||||
creator.show()
|
||||
|
||||
def on_publish_clicked(self):
|
||||
print("Clicked Publish")
|
||||
publish(None)
|
||||
|
||||
def on_load_clicked(self):
|
||||
print("Clicked Load")
|
||||
loader.show(use_context=True)
|
||||
|
||||
def on_inventory_clicked(self):
|
||||
print("Clicked Inventory")
|
||||
sceneinventory.show()
|
||||
|
||||
def on_libload_clicked(self):
|
||||
print("Clicked Library")
|
||||
libraryloader.show()
|
||||
|
||||
def on_rendernode_clicked(self):
|
||||
from avalon import style
|
||||
print("Clicked Set Render Mode")
|
||||
if self.render_mode_widget is None:
|
||||
window = set_rendermode.SetRenderMode()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
self.render_mode_widget = window
|
||||
else:
|
||||
self.render_mode_widget.show()
|
||||
|
||||
def on_duplicate_with_inputs_clicked(self):
|
||||
duplicate_with_inputs.duplicate_with_input_connections()
|
||||
print("Clicked Set Colorspace")
|
||||
|
||||
def on_reset_resolution_clicked(self):
|
||||
print("Clicked Reset Resolution")
|
||||
|
||||
|
||||
def launch_pype_menu():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
app.setQuitOnLastWindowClosed(False)
|
||||
|
||||
pype_menu = PypeMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
29
pype/hosts/fusion/menu_style.qss
Normal file
29
pype/hosts/fusion/menu_style.qss
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
QWidget {
|
||||
background-color: #282828;
|
||||
border-radius: 3;
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
border: 1px solid #090909;
|
||||
background-color: #201f1f;
|
||||
color: #ffffff;
|
||||
padding: 5;
|
||||
}
|
||||
|
||||
QPushButton:focus {
|
||||
background-color: "#171717";
|
||||
color: #d0d0d0;
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
background-color: "#171717";
|
||||
color: #e64b3d;
|
||||
}
|
||||
|
||||
#PypeMenu {
|
||||
border: 1px solid #fef9ef;
|
||||
}
|
||||
|
||||
#Spacer {
|
||||
background-color: #282828;
|
||||
}
|
||||
114
pype/hosts/fusion/pipeline.py
Normal file
114
pype/hosts/fusion/pipeline.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
"""
|
||||
Basic avalon integration
|
||||
"""
|
||||
import os
|
||||
|
||||
from avalon.tools import workfiles
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pypeapp import Logger
|
||||
from pype import PLUGINS_DIR
|
||||
|
||||
log = Logger().get_logger(__name__, "fusion")
|
||||
|
||||
|
||||
AVALON_CONFIG = os.environ["AVALON_CONFIG"]
|
||||
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "fusion", "inventory")
|
||||
|
||||
PUBLISH_PATH = os.path.join(
|
||||
PLUGINS_DIR, "fusion", "publish"
|
||||
).replace("\\", "/")
|
||||
|
||||
|
||||
def install():
|
||||
"""Install fusion-specific functionality of avalon-core.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
||||
It is called automatically when installing via `api.install(avalon.fusion)`
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
"""
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = ["imagesequence",
|
||||
"camera",
|
||||
"pointcache"]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
log.info("pype.hosts.fusion installed")
|
||||
|
||||
pyblish.register_host("fusion")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all tha was installed
|
||||
|
||||
This is where you undo everything that was done in `install()`.
|
||||
That means, removing menus, deregistering families and data
|
||||
and everything. It should be as though `install()` was never run,
|
||||
because odds are calling this function means the user is interested
|
||||
in re-installing shortly afterwards. If, for example, he has been
|
||||
modifying the menu or registered families.
|
||||
|
||||
"""
|
||||
pyblish.deregister_host("fusion")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
|
||||
savers = [tool for tool in instance if
|
||||
getattr(tool, "ID", None) == "Saver"]
|
||||
if not savers:
|
||||
return
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
passthrough = not new_value
|
||||
with comp_lock_and_undo_chunk(comp,
|
||||
undo_queue_name="Change instance "
|
||||
"active state"):
|
||||
for tool in savers:
|
||||
attrs = tool.GetAttrs()
|
||||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
|
||||
|
||||
def launch_workfiles_app(*args):
|
||||
workdir = os.environ["AVALON_WORKDIR"]
|
||||
workfiles.show(workdir)
|
||||
|
||||
|
||||
def publish(parent):
|
||||
"""Shorthand to publish from within host"""
|
||||
from avalon.tools import publish
|
||||
return publish.show(parent)
|
||||
42
pype/hosts/fusion/scripts/duplicate_with_inputs.py
Normal file
42
pype/hosts/fusion/scripts/duplicate_with_inputs.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from avalon import fusion
|
||||
|
||||
|
||||
def is_connected(input):
|
||||
"""Return whether an input has incoming connection"""
|
||||
return input.GetAttrs()["INPB_Connected"]
|
||||
|
||||
|
||||
def duplicate_with_input_connections():
|
||||
"""Duplicate selected tools with incoming connections."""
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
original_tools = comp.GetToolList(True).values()
|
||||
if not original_tools:
|
||||
return # nothing selected
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(
|
||||
comp, "Duplicate With Input Connections"):
|
||||
|
||||
# Generate duplicates
|
||||
comp.Copy()
|
||||
comp.SetActiveTool()
|
||||
comp.Paste()
|
||||
duplicate_tools = comp.GetToolList(True).values()
|
||||
|
||||
# Copy connections
|
||||
for original, new in zip(original_tools, duplicate_tools):
|
||||
|
||||
original_inputs = original.GetInputList().values()
|
||||
new_inputs = new.GetInputList().values()
|
||||
assert len(original_inputs) == len(new_inputs)
|
||||
|
||||
for original_input, new_input in zip(original_inputs, new_inputs):
|
||||
|
||||
if is_connected(original_input):
|
||||
|
||||
if is_connected(new_input):
|
||||
# Already connected if it is between the copied tools
|
||||
continue
|
||||
|
||||
new_input.ConnectTo(original_input.GetConnectedOutput())
|
||||
assert is_connected(new_input), "Must be connected now"
|
||||
|
|
@ -32,7 +32,7 @@ def _format_version_folder(folder):
|
|||
|
||||
new_version = 1
|
||||
if os.path.isdir(folder):
|
||||
re_version = re.compile("v\d+$")
|
||||
re_version = re.compile(r"v\d+$")
|
||||
versions = [i for i in os.listdir(folder) if os.path.isdir(i)
|
||||
and re_version.match(i)]
|
||||
if versions:
|
||||
|
|
@ -87,7 +87,7 @@ def _format_filepath(session):
|
|||
|
||||
# Create new unqiue filepath
|
||||
if os.path.exists(new_filepath):
|
||||
new_filepath = studio.version_up(new_filepath)
|
||||
new_filepath = pype.version_up(new_filepath)
|
||||
|
||||
return new_filepath
|
||||
|
||||
|
|
@ -95,6 +95,15 @@ def _format_filepath(session):
|
|||
def _update_savers(comp, session):
|
||||
"""Update all savers of the current comp to ensure the output is correct
|
||||
|
||||
This will refactor the Saver file outputs to the renders of the new session
|
||||
that is provided.
|
||||
|
||||
In the case the original saver path had a path set relative to a /fusion/
|
||||
folder then that relative path will be matched with the exception of all
|
||||
"version" (e.g. v010) references will be reset to v001. Otherwise only a
|
||||
version folder will be computed in the new session's work "render" folder
|
||||
to dump the files in and keeping the original filenames.
|
||||
|
||||
Args:
|
||||
comp (object): current comp instance
|
||||
session (dict): the current Avalon session
|
||||
|
|
@ -114,8 +123,36 @@ def _update_savers(comp, session):
|
|||
savers = comp.GetToolList(False, "Saver").values()
|
||||
for saver in savers:
|
||||
filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0]
|
||||
filename = os.path.basename(filepath)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
|
||||
# Get old relative path to the "fusion" app folder so we can apply
|
||||
# the same relative path afterwards. If not found fall back to
|
||||
# using just a version folder with the filename in it.
|
||||
# todo: can we make this less magical?
|
||||
relpath = filepath.replace("\\", "/").rsplit("/fusion/", 1)[-1]
|
||||
|
||||
if os.path.isabs(relpath):
|
||||
# If not relative to a "/fusion/" folder then just use filename
|
||||
filename = os.path.basename(filepath)
|
||||
log.warning("Can't parse relative path, refactoring to only"
|
||||
"filename in a version folder: %s" % filename)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
|
||||
else:
|
||||
# Else reuse the relative path
|
||||
# Reset version in folder and filename in the relative path
|
||||
# to v001. The version should be is only detected when prefixed
|
||||
# with either `_v` (underscore) or `/v` (folder)
|
||||
version_pattern = r"(/|_)v[0-9]+"
|
||||
if re.search(version_pattern, relpath):
|
||||
new_relpath = re.sub(version_pattern,
|
||||
r"\1v001",
|
||||
relpath)
|
||||
log.info("Resetting version folders to v001: "
|
||||
"%s -> %s" % (relpath, new_relpath))
|
||||
relpath = new_relpath
|
||||
|
||||
new_path = os.path.join(new_work, relpath)
|
||||
|
||||
saver["Clip"] = new_path
|
||||
|
||||
|
||||
|
|
@ -138,6 +175,13 @@ def update_frame_range(comp, representations):
|
|||
versions = io.find({"type": "version", "_id": {"$in": version_ids}})
|
||||
versions = list(versions)
|
||||
|
||||
versions = [v for v in versions
|
||||
if v["data"].get("startFrame", None) is not None]
|
||||
|
||||
if not versions:
|
||||
log.warning("No versions loaded to match frame range to.\n")
|
||||
return
|
||||
|
||||
start = min(v["data"]["frameStart"] for v in versions)
|
||||
end = max(v["data"]["frameEnd"] for v in versions)
|
||||
|
||||
|
|
@ -180,7 +224,8 @@ def switch(asset_name, filepath=None, new=True):
|
|||
else:
|
||||
fusion = _get_fusion_instance()
|
||||
current_comp = fusion.LoadComp(filepath, quiet=True)
|
||||
assert current_comp is not None, "Fusion could not load '%s'" % filepath
|
||||
assert current_comp is not None, (
|
||||
"Fusion could not load '{}'").format(filepath)
|
||||
|
||||
host = api.registered_host()
|
||||
containers = list(host.ls())
|
||||
|
|
@ -189,8 +234,9 @@ def switch(asset_name, filepath=None, new=True):
|
|||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = pype.switch_item(container,
|
||||
asset_name=asset_name)
|
||||
representation = pype.switch_item(
|
||||
container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
except Exception as e:
|
||||
current_comp.Print("Error in switching! %s\n" % e.message)
|
||||
|
|
@ -223,6 +269,8 @@ def switch(asset_name, filepath=None, new=True):
|
|||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# QUESTION: can we convert this to gui rather then standalone script?
|
||||
# TODO: convert to gui tool
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Switch to a shot within an"
|
||||
|
|
|
|||
|
|
@ -1,87 +0,0 @@
|
|||
"""This module is used for command line publishing of image sequences."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
handler = logging.basicConfig()
|
||||
log = logging.getLogger("Publish Image Sequences")
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
|
||||
def publish(paths, gui=False):
|
||||
"""Publish rendered image sequences based on the job data
|
||||
|
||||
Args:
|
||||
paths (list): a list of paths where to publish from
|
||||
gui (bool, Optional): Choose to show Pyblish GUI, default is False
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
assert isinstance(paths, (list, tuple)), "Must be list of paths"
|
||||
log.info(paths)
|
||||
assert any(paths), "No paths found in the list"
|
||||
# Set the paths to publish for the collector if any provided
|
||||
if paths:
|
||||
os.environ["FILESEQUENCE"] = os.pathsep.join(paths)
|
||||
|
||||
# Install Avalon with shell as current host
|
||||
from avalon import api, shell
|
||||
api.install(shell)
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
pyblish.api.register_target("filesequence")
|
||||
pyblish.api.register_host("shell")
|
||||
|
||||
# Publish items
|
||||
if gui:
|
||||
import pyblish_qml
|
||||
pyblish_qml.show(modal=True)
|
||||
else:
|
||||
|
||||
import pyblish.util
|
||||
context = pyblish.util.publish()
|
||||
|
||||
if not context:
|
||||
log.warning("Nothing collected.")
|
||||
sys.exit(1)
|
||||
|
||||
# Collect errors, {plugin name: error}
|
||||
error_results = [r for r in context.data["results"] if r["error"]]
|
||||
|
||||
if error_results:
|
||||
log.error(" Errors occurred ...")
|
||||
for result in error_results:
|
||||
log.error(error_format.format(**result))
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def __main__():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--paths",
|
||||
nargs="*",
|
||||
default=[],
|
||||
help="The filepaths to publish. This can be a "
|
||||
"directory or a path to a .json publish "
|
||||
"configuration.")
|
||||
parser.add_argument("--gui",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Whether to run Pyblish in GUI mode.")
|
||||
|
||||
kwargs, args = parser.parse_known_args()
|
||||
|
||||
print("Running publish imagesequence...")
|
||||
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
|
||||
publish(kwargs.paths, gui=kwargs.gui)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__main__()
|
||||
112
pype/hosts/fusion/scripts/set_rendermode.py
Normal file
112
pype/hosts/fusion/scripts/set_rendermode.py
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
from avalon.vendor.Qt import QtWidgets
|
||||
from avalon.vendor import qtawesome
|
||||
import avalon.fusion as avalon
|
||||
|
||||
|
||||
_help = {"local": "Render the comp on your own machine and publish "
|
||||
"it from that the destination folder",
|
||||
"farm": "Submit a Fusion render job to a Render farm to use all other"
|
||||
" computers and add a publish job"}
|
||||
|
||||
|
||||
class SetRenderMode(QtWidgets.QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
QtWidgets.QWidget.__init__(self, parent)
|
||||
|
||||
self._comp = avalon.get_current_comp()
|
||||
self._comp_name = self._get_comp_name()
|
||||
|
||||
self.setWindowTitle("Set Render Mode")
|
||||
self.setFixedSize(300, 175)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout()
|
||||
|
||||
# region comp info
|
||||
comp_info_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
update_btn = QtWidgets.QPushButton(qtawesome.icon("fa.refresh",
|
||||
color="white"), "")
|
||||
update_btn.setFixedWidth(25)
|
||||
update_btn.setFixedHeight(25)
|
||||
|
||||
comp_information = QtWidgets.QLineEdit()
|
||||
comp_information.setEnabled(False)
|
||||
|
||||
comp_info_layout.addWidget(comp_information)
|
||||
comp_info_layout.addWidget(update_btn)
|
||||
# endregion comp info
|
||||
|
||||
# region modes
|
||||
mode_options = QtWidgets.QComboBox()
|
||||
mode_options.addItems(_help.keys())
|
||||
|
||||
mode_information = QtWidgets.QTextEdit()
|
||||
mode_information.setReadOnly(True)
|
||||
# endregion modes
|
||||
|
||||
accept_btn = QtWidgets.QPushButton("Accept")
|
||||
|
||||
layout.addLayout(comp_info_layout)
|
||||
layout.addWidget(mode_options)
|
||||
layout.addWidget(mode_information)
|
||||
layout.addWidget(accept_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
self.comp_information = comp_information
|
||||
self.update_btn = update_btn
|
||||
|
||||
self.mode_options = mode_options
|
||||
self.mode_information = mode_information
|
||||
|
||||
self.accept_btn = accept_btn
|
||||
|
||||
self.connections()
|
||||
self.update()
|
||||
|
||||
# Force updated render mode help text
|
||||
self._update_rendermode_info()
|
||||
|
||||
def connections(self):
|
||||
"""Build connections between code and buttons"""
|
||||
|
||||
self.update_btn.clicked.connect(self.update)
|
||||
self.accept_btn.clicked.connect(self._set_comp_rendermode)
|
||||
self.mode_options.currentIndexChanged.connect(
|
||||
self._update_rendermode_info)
|
||||
|
||||
def update(self):
|
||||
"""Update all information in the UI"""
|
||||
|
||||
self._comp = avalon.get_current_comp()
|
||||
self._comp_name = self._get_comp_name()
|
||||
self.comp_information.setText(self._comp_name)
|
||||
|
||||
# Update current comp settings
|
||||
mode = self._get_comp_rendermode()
|
||||
index = self.mode_options.findText(mode)
|
||||
self.mode_options.setCurrentIndex(index)
|
||||
|
||||
def _update_rendermode_info(self):
|
||||
rendermode = self.mode_options.currentText()
|
||||
self.mode_information.setText(_help[rendermode])
|
||||
|
||||
def _get_comp_name(self):
|
||||
return self._comp.GetAttrs("COMPS_Name")
|
||||
|
||||
def _get_comp_rendermode(self):
|
||||
return self._comp.GetData("pype.rendermode") or "local"
|
||||
|
||||
def _set_comp_rendermode(self):
|
||||
rendermode = self.mode_options.currentText()
|
||||
self._comp.SetData("pype.rendermode", rendermode)
|
||||
|
||||
self._comp.Print("Updated render mode to '%s'\n" % rendermode)
|
||||
self.hide()
|
||||
|
||||
def _validation(self):
|
||||
ui_mode = self.mode_options.currentText()
|
||||
comp_mode = self._get_comp_rendermode()
|
||||
|
||||
return comp_mode == ui_mode
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
from avalon import fusion
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all selected backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(True, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
from avalon import fusion
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(False, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
from avalon import fusion
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all selected loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Loaders to 32bit'):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
14
pype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py
Normal file
14
pype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
from avalon import fusion
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Loaders to 32bit'):
|
||||
tools = comp.GetToolList(False, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
26
pype/hosts/fusion/utility_scripts/Pype_menu.py
Normal file
26
pype/hosts/fusion/utility_scripts/Pype_menu.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
import sys
|
||||
import pype
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def main(env):
|
||||
from pype.hosts.fusion import menu
|
||||
import avalon.fusion
|
||||
# Registers pype's Global pyblish plugins
|
||||
pype.install()
|
||||
|
||||
# activate resolve from pype
|
||||
avalon.api.install(avalon.fusion)
|
||||
|
||||
log.info(f"Avalon registred hosts: {avalon.api.registered_host()}")
|
||||
|
||||
menu.launch_pype_menu()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = main(os.environ)
|
||||
sys.exit(not bool(result))
|
||||
201
pype/hosts/fusion/utility_scripts/switch_ui.py
Normal file
201
pype/hosts/fusion/utility_scripts/switch_ui.py
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
import os
|
||||
import glob
|
||||
import logging
|
||||
|
||||
import avalon.io as io
|
||||
import avalon.api as api
|
||||
import avalon.pipeline as pipeline
|
||||
import avalon.fusion
|
||||
import avalon.style as style
|
||||
from avalon.vendor.Qt import QtWidgets, QtCore
|
||||
from avalon.vendor import qtawesome as qta
|
||||
|
||||
|
||||
log = logging.getLogger("Fusion Switch Shot")
|
||||
|
||||
|
||||
class App(QtWidgets.QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
|
||||
################################################
|
||||
# |---------------------| |------------------| #
|
||||
# |Comp | |Asset | #
|
||||
# |[..][ v]| |[ v]| #
|
||||
# |---------------------| |------------------| #
|
||||
# | Update existing comp [ ] | #
|
||||
# |------------------------------------------| #
|
||||
# | Switch | #
|
||||
# |------------------------------------------| #
|
||||
################################################
|
||||
|
||||
QtWidgets.QWidget.__init__(self, parent)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout()
|
||||
|
||||
# Comp related input
|
||||
comp_hlayout = QtWidgets.QHBoxLayout()
|
||||
comp_label = QtWidgets.QLabel("Comp file")
|
||||
comp_label.setFixedWidth(50)
|
||||
comp_box = QtWidgets.QComboBox()
|
||||
|
||||
button_icon = qta.icon("fa.folder", color="white")
|
||||
open_from_dir = QtWidgets.QPushButton()
|
||||
open_from_dir.setIcon(button_icon)
|
||||
|
||||
comp_box.setFixedHeight(25)
|
||||
open_from_dir.setFixedWidth(25)
|
||||
open_from_dir.setFixedHeight(25)
|
||||
|
||||
comp_hlayout.addWidget(comp_label)
|
||||
comp_hlayout.addWidget(comp_box)
|
||||
comp_hlayout.addWidget(open_from_dir)
|
||||
|
||||
# Asset related input
|
||||
asset_hlayout = QtWidgets.QHBoxLayout()
|
||||
asset_label = QtWidgets.QLabel("Shot")
|
||||
asset_label.setFixedWidth(50)
|
||||
|
||||
asset_box = QtWidgets.QComboBox()
|
||||
asset_box.setLineEdit(QtWidgets.QLineEdit())
|
||||
asset_box.setFixedHeight(25)
|
||||
|
||||
refresh_icon = qta.icon("fa.refresh", color="white")
|
||||
refresh_btn = QtWidgets.QPushButton()
|
||||
refresh_btn.setIcon(refresh_icon)
|
||||
|
||||
asset_box.setFixedHeight(25)
|
||||
refresh_btn.setFixedWidth(25)
|
||||
refresh_btn.setFixedHeight(25)
|
||||
|
||||
asset_hlayout.addWidget(asset_label)
|
||||
asset_hlayout.addWidget(asset_box)
|
||||
asset_hlayout.addWidget(refresh_btn)
|
||||
|
||||
# Options
|
||||
options = QtWidgets.QHBoxLayout()
|
||||
options.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
current_comp_check = QtWidgets.QCheckBox()
|
||||
current_comp_check.setChecked(True)
|
||||
current_comp_label = QtWidgets.QLabel("Use current comp")
|
||||
|
||||
options.addWidget(current_comp_label)
|
||||
options.addWidget(current_comp_check)
|
||||
|
||||
accept_btn = QtWidgets.QPushButton("Switch")
|
||||
|
||||
layout.addLayout(options)
|
||||
layout.addLayout(comp_hlayout)
|
||||
layout.addLayout(asset_hlayout)
|
||||
layout.addWidget(accept_btn)
|
||||
|
||||
self._open_from_dir = open_from_dir
|
||||
self._comps = comp_box
|
||||
self._assets = asset_box
|
||||
self._use_current = current_comp_check
|
||||
self._accept_btn = accept_btn
|
||||
self._refresh_btn = refresh_btn
|
||||
|
||||
self.setWindowTitle("Fusion Switch Shot")
|
||||
self.setLayout(layout)
|
||||
|
||||
self.resize(260, 140)
|
||||
self.setMinimumWidth(260)
|
||||
self.setFixedHeight(140)
|
||||
|
||||
self.connections()
|
||||
|
||||
# Update ui to correct state
|
||||
self._on_use_current_comp()
|
||||
self._refresh()
|
||||
|
||||
def connections(self):
|
||||
self._use_current.clicked.connect(self._on_use_current_comp)
|
||||
self._open_from_dir.clicked.connect(self._on_open_from_dir)
|
||||
self._refresh_btn.clicked.connect(self._refresh)
|
||||
self._accept_btn.clicked.connect(self._on_switch)
|
||||
|
||||
def _on_use_current_comp(self):
|
||||
state = self._use_current.isChecked()
|
||||
self._open_from_dir.setEnabled(not state)
|
||||
self._comps.setEnabled(not state)
|
||||
|
||||
def _on_open_from_dir(self):
|
||||
|
||||
start_dir = self._get_context_directory()
|
||||
comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
|
||||
self, "Choose comp", start_dir)
|
||||
|
||||
if not comp_file:
|
||||
return
|
||||
|
||||
# Create completer
|
||||
self.populate_comp_box([comp_file])
|
||||
self._refresh()
|
||||
|
||||
def _refresh(self):
|
||||
# Clear any existing items
|
||||
self._assets.clear()
|
||||
|
||||
asset_names = [a["name"] for a in self.collect_assets()]
|
||||
completer = QtWidgets.QCompleter(asset_names)
|
||||
|
||||
self._assets.setCompleter(completer)
|
||||
self._assets.addItems(asset_names)
|
||||
|
||||
def _on_switch(self):
|
||||
|
||||
if not self._use_current.isChecked():
|
||||
file_name = self._comps.itemData(self._comps.currentIndex())
|
||||
else:
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
file_name = comp.GetAttrs("COMPS_FileName")
|
||||
|
||||
asset = self._assets.currentText()
|
||||
|
||||
import colorbleed.scripts.fusion_switch_shot as switch_shot
|
||||
switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
|
||||
|
||||
def _get_context_directory(self):
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": api.Session["AVALON_PROJECT"]},
|
||||
projection={"config": True})
|
||||
|
||||
template = project["config"]["template"]["work"]
|
||||
dir = pipeline._format_work_template(template, api.Session)
|
||||
|
||||
return dir
|
||||
|
||||
def collect_slap_comps(self, directory):
|
||||
items = glob.glob("{}/*.comp".format(directory))
|
||||
return items
|
||||
|
||||
def collect_assets(self):
|
||||
return list(io.find({"type": "asset", "silo": "film"}))
|
||||
|
||||
def populate_comp_box(self, files):
|
||||
"""Ensure we display the filename only but the path is stored as well
|
||||
|
||||
Args:
|
||||
files (list): list of full file path [path/to/item/item.ext,]
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
for f in files:
|
||||
filename = os.path.basename(f)
|
||||
self._comps.addItem(filename, userData=f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
api.install(avalon.fusion)
|
||||
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
window = App()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
sys.exit(app.exec_())
|
||||
37
pype/hosts/fusion/utility_scripts/update_loader_ranges.py
Normal file
37
pype/hosts/fusion/utility_scripts/update_loader_ranges.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"""Forces Fusion to 'retrigger' the Loader to update.
|
||||
|
||||
Warning:
|
||||
This might change settings like 'Reverse', 'Loop', trims and other
|
||||
settings of the Loader. So use this at your own risk.
|
||||
|
||||
"""
|
||||
from avalon import fusion
|
||||
|
||||
|
||||
def update_loader_ranges():
|
||||
comp = fusion.get_current_comp()
|
||||
with fusion.comp_lock_and_undo_chunk(comp, "Reload clip time ranges"):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
|
||||
# Get tool attributes
|
||||
tool_a = tool.GetAttrs()
|
||||
clipTable = tool_a['TOOLST_Clip_Name']
|
||||
altclipTable = tool_a['TOOLST_AltClip_Name']
|
||||
startTime = tool_a['TOOLNT_Clip_Start']
|
||||
old_global_in = tool.GlobalIn[comp.CurrentTime]
|
||||
|
||||
# Reapply
|
||||
for index, _ in clipTable.items():
|
||||
time = startTime[index]
|
||||
tool.Clip[time] = tool.Clip[time]
|
||||
|
||||
for index, _ in altclipTable.items():
|
||||
time = startTime[index]
|
||||
tool.ProxyFilename[time] = tool.ProxyFilename[time]
|
||||
|
||||
tool.GlobalIn[comp.CurrentTime] = old_global_in
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_loader_ranges()
|
||||
85
pype/hosts/fusion/utils.py
Normal file
85
pype/hosts/fusion/utils.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
#! python3
|
||||
|
||||
"""
|
||||
Fusion tools for setting environment
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "fusion")
|
||||
|
||||
|
||||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Fusion/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.normpath(
|
||||
os.path.join(us_dir, s))
|
||||
log.info(f"Removing `{path}`...")
|
||||
|
||||
# remove file or directory if not in our folders
|
||||
if not os.path.isdir(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.normpath(os.path.join(d, s))
|
||||
dst = os.path.normpath(os.path.join(us_dir, s))
|
||||
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
|
||||
# copy file or directory from our folders to fusion's folder
|
||||
if not os.path.isdir(src):
|
||||
shutil.copy2(src, dst)
|
||||
else:
|
||||
shutil.copytree(src, dst)
|
||||
|
||||
|
||||
def setup(env=None):
|
||||
""" Wrapper installer started from pype.hooks.fusion.FusionPrelaunch()
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Fusion Pype wrapper has been installed")
|
||||
|
|
@ -31,17 +31,17 @@ __all__ = [
|
|||
]
|
||||
|
||||
# get logger
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
''' Creating all important host related variables '''
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
|
||||
# plugin root path
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "inventory")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "hiero", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "hiero", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "hiero", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "hiero", "inventory")
|
||||
|
||||
# registering particular pyblish gui but `lite` is recomended!!
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
|
|
@ -50,7 +50,7 @@ if os.getenv("PYBLISH_GUI", None):
|
|||
|
||||
def install():
|
||||
"""
|
||||
Installing Nukestudio integration for avalon
|
||||
Installing Hiero integration for avalon
|
||||
|
||||
Args:
|
||||
config (obj): avalon config module `pype` in our case, it is not
|
||||
|
|
@ -61,8 +61,8 @@ def install():
|
|||
# adding all events
|
||||
_register_events()
|
||||
|
||||
log.info("Registering NukeStudio plug-ins..")
|
||||
pyblish.register_host("nukestudio")
|
||||
log.info("Registering Hiero plug-ins..")
|
||||
pyblish.register_host("hiero")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
|
@ -87,11 +87,11 @@ def install():
|
|||
|
||||
def uninstall():
|
||||
"""
|
||||
Uninstalling Nukestudio integration for avalon
|
||||
Uninstalling Hiero integration for avalon
|
||||
|
||||
"""
|
||||
log.info("Deregistering NukeStudio plug-ins..")
|
||||
pyblish.deregister_host("nukestudio")
|
||||
log.info("Deregistering Hiero plug-ins..")
|
||||
pyblish.deregister_host("hiero")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
|
@ -102,7 +102,7 @@ def _register_events():
|
|||
Adding all callbacks.
|
||||
"""
|
||||
|
||||
# if task changed then change notext of nukestudio
|
||||
# if task changed then change notext of hiero
|
||||
avalon.on("taskChanged", _update_menu_task_label)
|
||||
log.info("Installed event callback for 'taskChanged'..")
|
||||
|
||||
|
|
@ -4,7 +4,7 @@ from pype.api import Logger
|
|||
from .lib import sync_avalon_data_to_workfile, launch_workfiles_app
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def startupCompleted(event):
|
||||
|
|
@ -8,7 +8,7 @@ from avalon.vendor.Qt import (QtWidgets, QtGui)
|
|||
import pype.api as pype
|
||||
from pype.api import Logger, Anatomy
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
cached_process = None
|
||||
|
||||
|
|
@ -82,7 +82,7 @@ def sync_avalon_data_to_workfile():
|
|||
|
||||
def launch_workfiles_app(event):
|
||||
"""
|
||||
Event for launching workfiles after nukestudio start
|
||||
Event for launching workfiles after hiero start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
|
|
@ -109,9 +109,9 @@ def reload_config():
|
|||
"pypeapp",
|
||||
"{}.api".format(AVALON_CONFIG),
|
||||
"{}.templates".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.lib".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.tags".format(AVALON_CONFIG)
|
||||
"{}.hosts.hiero.lib".format(AVALON_CONFIG),
|
||||
"{}.hosts.hiero.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.hiero.tags".format(AVALON_CONFIG)
|
||||
):
|
||||
log.info("Reloading module: {}...".format(module))
|
||||
try:
|
||||
|
|
@ -12,7 +12,7 @@ from .lib import (
|
|||
set_workfiles
|
||||
)
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._change_context_menu = None
|
||||
|
|
@ -38,7 +38,7 @@ def _update_menu_task_label(*args):
|
|||
|
||||
def install():
|
||||
"""
|
||||
Installing menu into Nukestudio
|
||||
Installing menu into Hiero
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -3,10 +3,12 @@ import os
|
|||
import json
|
||||
import hiero
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
from pype.api import Logger
|
||||
from avalon import io
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def tag_data():
|
||||
|
|
@ -65,21 +67,23 @@ def add_tags_from_presets():
|
|||
|
||||
log.debug("Setting default tags on project: {}".format(project.name()))
|
||||
|
||||
# get nukestudio tags.json
|
||||
# get hiero tags.json
|
||||
nks_pres_tags = tag_data()
|
||||
|
||||
# Get project task types.
|
||||
tasks = io.find_one({"type": "project"})["config"]["tasks"]
|
||||
nks_pres_tags["[Tasks]"] = {}
|
||||
for task in tasks:
|
||||
nks_pres_tags["[Tasks]"][task["name"]] = {
|
||||
log.debug("__ tasks: {}".format(pformat(tasks)))
|
||||
for task_type in tasks.keys():
|
||||
nks_pres_tags["[Tasks]"][task_type.lower()] = {
|
||||
"editable": "1",
|
||||
"note": "",
|
||||
"icon": {
|
||||
"path": "icons:TagGood.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "task"
|
||||
"family": "task",
|
||||
"type": task_type
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -4,11 +4,11 @@ from avalon import api
|
|||
from pype.api import Logger
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return api.HOST_WORKFILE_EXTENSIONS["nukestudio"]
|
||||
return api.HOST_WORKFILE_EXTENSIONS["hiero"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
@ -32,8 +32,19 @@ def deferred():
|
|||
command=lambda *args: BuildWorkfile().process()
|
||||
)
|
||||
|
||||
def add_look_assigner_item():
|
||||
import mayalookassigner
|
||||
cmds.menuItem(
|
||||
"Look assigner",
|
||||
parent=pipeline._menu,
|
||||
command=lambda *args: mayalookassigner.show()
|
||||
)
|
||||
|
||||
log.info("Attempting to install scripts menu..")
|
||||
|
||||
add_build_workfiles_item()
|
||||
add_look_assigner_item()
|
||||
|
||||
try:
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
import scriptsmenu.scriptsmenu as scriptsmenu
|
||||
|
|
@ -42,7 +53,6 @@ def deferred():
|
|||
"Skipping studio.menu install, because "
|
||||
"'scriptsmenu' module seems unavailable."
|
||||
)
|
||||
add_build_workfiles_item()
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ClockifySync(api.Action):
|
|||
|
||||
projects_info = {}
|
||||
for project in projects_to_sync:
|
||||
task_types = [task['name'] for task in project['config']['tasks']]
|
||||
task_types = project['config']['tasks'].keys()
|
||||
projects_info[project['name']] = task_types
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from bson.objectid import ObjectId
|
|||
from bson.errors import InvalidId
|
||||
from pymongo import UpdateOne
|
||||
import ftrack_api
|
||||
from pype.api import config
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -23,9 +24,9 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
# Current schemas for avalon types
|
||||
EntitySchemas = {
|
||||
"project": "avalon-core:project-2.0",
|
||||
"project": "avalon-core:project-2.1",
|
||||
"asset": "avalon-core:asset-3.0",
|
||||
"config": "avalon-core:config-1.0"
|
||||
"config": "avalon-core:config-1.1"
|
||||
}
|
||||
|
||||
# Group name of custom attributes
|
||||
|
|
@ -50,7 +51,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
|||
if in_schema:
|
||||
schema_name = in_schema
|
||||
elif entity_type == "project":
|
||||
schema_name = "project-2.0"
|
||||
schema_name = "project-2.1"
|
||||
elif entity_type == "task":
|
||||
schema_name = "task"
|
||||
|
||||
|
|
@ -103,6 +104,14 @@ def get_pype_attr(session, split_hierarchical=True):
|
|||
|
||||
|
||||
def from_dict_to_set(data):
|
||||
"""
|
||||
Converts 'data' into $set part of MongoDB update command.
|
||||
Args:
|
||||
data: (dictionary) - up-to-date data from Ftrack
|
||||
|
||||
Returns:
|
||||
(dictionary) - { "$set" : "{..}"}
|
||||
"""
|
||||
result = {"$set": {}}
|
||||
dict_queue = queue.Queue()
|
||||
dict_queue.put((None, data))
|
||||
|
|
@ -114,7 +123,8 @@ def from_dict_to_set(data):
|
|||
if _key is not None:
|
||||
new_key = "{}.{}".format(_key, key)
|
||||
|
||||
if not isinstance(value, dict):
|
||||
if not isinstance(value, dict) or \
|
||||
(isinstance(value, dict) and not bool(value)): # empty dic
|
||||
result["$set"][new_key] = value
|
||||
continue
|
||||
dict_queue.put((new_key, value))
|
||||
|
|
@ -123,6 +133,8 @@ def from_dict_to_set(data):
|
|||
|
||||
def get_avalon_project_template(project_name):
|
||||
"""Get avalon template
|
||||
Args:
|
||||
project_name: (string)
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
|
|
@ -135,6 +147,16 @@ def get_avalon_project_template(project_name):
|
|||
|
||||
|
||||
def get_project_apps(in_app_list):
|
||||
"""
|
||||
Returns metadata information about apps in 'in_app_list' enhanced
|
||||
from toml files.
|
||||
Args:
|
||||
in_app_list: (list) - names of applications
|
||||
|
||||
Returns:
|
||||
tuple (list, dictionary) - list of dictionaries about apps
|
||||
dictionary of warnings
|
||||
"""
|
||||
apps = []
|
||||
# TODO report
|
||||
missing_toml_msg = "Missing config file for application"
|
||||
|
|
@ -239,6 +261,28 @@ def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
|
|||
return hier_values
|
||||
|
||||
|
||||
def get_task_short_name(task_type):
|
||||
"""
|
||||
Returns short name (code) for 'task_type'. Short name stored in
|
||||
metadata dictionary in project.config per each 'task_type'.
|
||||
Could be used in anatomy, paths etc.
|
||||
If no appropriate short name is found in mapping, 'task_type' is
|
||||
returned back unchanged.
|
||||
|
||||
Currently stores data in:
|
||||
'pype-config/presets/ftrack/project_defaults.json'
|
||||
Args:
|
||||
task_type: (string) - Animation | Modeling ...
|
||||
|
||||
Returns:
|
||||
(string) - anim | model ...
|
||||
"""
|
||||
presets = config.get_presets()['ftrack']['project_defaults']\
|
||||
.get("task_short_names")
|
||||
|
||||
return presets.get(task_type, task_type)
|
||||
|
||||
|
||||
class SyncEntitiesFactory:
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
|
|
@ -378,7 +422,7 @@ class SyncEntitiesFactory:
|
|||
"custom_attributes": {},
|
||||
"hier_attrs": {},
|
||||
"avalon_attrs": {},
|
||||
"tasks": []
|
||||
"tasks": {}
|
||||
})
|
||||
|
||||
for entity in all_project_entities:
|
||||
|
|
@ -389,7 +433,9 @@ class SyncEntitiesFactory:
|
|||
continue
|
||||
|
||||
elif entity_type_low == "task":
|
||||
entities_dict[parent_id]["tasks"].append(entity["name"])
|
||||
# enrich task info with additional metadata
|
||||
task = {"type": entity["type"]["name"]}
|
||||
entities_dict[parent_id]["tasks"][entity["name"]] = task
|
||||
continue
|
||||
|
||||
entity_id = entity["id"]
|
||||
|
|
@ -416,6 +462,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities (assets stored in
|
||||
MongoDB) accessible by its '_id'
|
||||
(mongo intenal ID - example ObjectId("5f48de5830a9467b34b69798"))
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity asset}
|
||||
"""
|
||||
if self._avalon_ents_by_id is None:
|
||||
self._avalon_ents_by_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -425,6 +478,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_ftrack_id(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'ftrackId'
|
||||
(id from ftrack)
|
||||
(example '431ee3f2-e91a-11ea-bfa4-92591a5b5e3e')
|
||||
Returns:
|
||||
(dictionary) - {"(ftrackId)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_ftrack_id is None:
|
||||
self._avalon_ents_by_ftrack_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -437,6 +498,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_name(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'name'
|
||||
(example 'Hero')
|
||||
Returns:
|
||||
(dictionary) - {"(name)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_name is None:
|
||||
self._avalon_ents_by_name = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -446,6 +514,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'visualParent'
|
||||
(example ObjectId("5f48de5830a9467b34b69798"))
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity}
|
||||
"""
|
||||
if self._avalon_ents_by_parent_id is None:
|
||||
self._avalon_ents_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -458,6 +535,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_ents(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(list) of assets
|
||||
"""
|
||||
if self._avalon_archived_ents is None:
|
||||
self._avalon_archived_ents = [
|
||||
ent for ent in self.dbcon.find({"type": "archived_asset"})
|
||||
|
|
@ -466,6 +551,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_name(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_name' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset name
|
||||
"""
|
||||
if self._avalon_archived_by_name is None:
|
||||
self._avalon_archived_by_name = collections.defaultdict(list)
|
||||
for ent in self.avalon_archived_ents:
|
||||
|
|
@ -474,6 +567,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_id' for performance
|
||||
Returns:
|
||||
(dictionary) of assets accessible by asset mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_id is None:
|
||||
self._avalon_archived_by_id = {
|
||||
str(ent["_id"]): ent for ent in self.avalon_archived_ents
|
||||
|
|
@ -482,6 +583,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB per their's parent
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset parent
|
||||
mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_parent_id is None:
|
||||
self._avalon_archived_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_archived_ents:
|
||||
|
|
@ -494,6 +604,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def subsets_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of subsets from Mongo ("type": "subset")
|
||||
grouped by their parent.
|
||||
|
||||
Fills 'self._subsets_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists)
|
||||
"""
|
||||
if self._subsets_by_parent_id is None:
|
||||
self._subsets_by_parent_id = collections.defaultdict(list)
|
||||
for subset in self.dbcon.find({"type": "subset"}):
|
||||
|
|
@ -515,6 +633,11 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def all_ftrack_names(self):
|
||||
"""
|
||||
Returns lists of names of all entities in Ftrack
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
return [
|
||||
ent_dict["name"] for ent_dict in self.entities_dict.values() if (
|
||||
ent_dict.get("name")
|
||||
|
|
@ -534,8 +657,9 @@ class SyncEntitiesFactory:
|
|||
name = entity_dict["name"]
|
||||
entity_type = entity_dict["entity_type"]
|
||||
# Tasks must be checked too
|
||||
for task_name in entity_dict["tasks"]:
|
||||
passed = task_names.get(task_name)
|
||||
for task in entity_dict["tasks"].items():
|
||||
task_name, task = task
|
||||
passed = task_name
|
||||
if passed is None:
|
||||
passed = check_regex(
|
||||
task_name, "task", schema_patterns=_schema_patterns
|
||||
|
|
@ -1014,9 +1138,13 @@ class SyncEntitiesFactory:
|
|||
if not msg or not items:
|
||||
continue
|
||||
self.report_items["warning"][msg] = items
|
||||
|
||||
tasks = {}
|
||||
for tt in task_types:
|
||||
tasks[tt["name"]] = {
|
||||
"short_name": get_task_short_name(tt["name"])
|
||||
}
|
||||
self.entities_dict[id]["final_entity"]["config"] = {
|
||||
"tasks": [{"name": tt["name"]} for tt in task_types],
|
||||
"tasks": tasks,
|
||||
"apps": proj_apps
|
||||
}
|
||||
continue
|
||||
|
|
@ -1029,7 +1157,7 @@ class SyncEntitiesFactory:
|
|||
|
||||
data["parents"] = parents
|
||||
data["hierarchy"] = hierarchy
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", [])
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", {})
|
||||
self.entities_dict[id]["final_entity"]["data"] = data
|
||||
self.entities_dict[id]["final_entity"]["type"] = "asset"
|
||||
|
||||
|
|
@ -1904,10 +2032,10 @@ class SyncEntitiesFactory:
|
|||
filter = {"_id": ObjectId(mongo_id)}
|
||||
change_data = from_dict_to_set(changes)
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
|
||||
if not mongo_changes_bulk:
|
||||
# TODO LOG
|
||||
return
|
||||
log.debug("mongo_changes_bulk:: {}".format(mongo_changes_bulk))
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def reload_parents(self, hierarchy_changing_ids):
|
||||
|
|
|
|||
|
|
@ -143,15 +143,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task.lower() in existing_tasks:
|
||||
task_name = next(iter(task))
|
||||
task_type = task[task_name]["type"]
|
||||
if task_name.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
tasks_to_create.append((task_name, task_type))
|
||||
|
||||
for task in tasks_to_create:
|
||||
for task_name, task_type in tasks_to_create:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=task,
|
||||
name=task_name,
|
||||
task_type=task_type,
|
||||
parent=entity
|
||||
)
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -4,16 +4,16 @@ import avalon.api
|
|||
from avalon import fusion
|
||||
|
||||
|
||||
class CreateTiffSaver(avalon.api.Creator):
|
||||
class CreateOpenEXRSaver(avalon.api.Creator):
|
||||
|
||||
name = "tiffDefault"
|
||||
label = "Create Tiff Saver"
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
hosts = ["fusion"]
|
||||
family = "saver"
|
||||
family = "render"
|
||||
|
||||
def process(self):
|
||||
|
||||
file_format = "TiffFormat"
|
||||
file_format = "OpenEXRFormat"
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ class CreateTiffSaver(avalon.api.Creator):
|
|||
workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
filename = "{}..tiff".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", "preview", filename)
|
||||
filepath = os.path.join(workdir, "render", filename)
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
|
|
@ -43,4 +43,3 @@ class CreateTiffSaver(avalon.api.Creator):
|
|||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
|
||||
saver[file_format]["SaveAlpha"] = 0
|
||||
|
||||
|
|
@ -4,6 +4,10 @@ import contextlib
|
|||
from avalon import api
|
||||
import avalon.io as io
|
||||
|
||||
from avalon import fusion
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_inputs(tool, inputs):
|
||||
|
|
@ -113,7 +117,7 @@ def loader_shift(loader, frame, relative=True):
|
|||
class FusionLoadSequence(api.Loader):
|
||||
"""Load image sequence into Fusion"""
|
||||
|
||||
families = ["imagesequence"]
|
||||
families = ["imagesequence", "review"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Load sequence"
|
||||
|
|
@ -134,7 +138,7 @@ class FusionLoadSequence(api.Loader):
|
|||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
path = self._get_first_image(self.fname)
|
||||
path = self._get_first_image(os.path.dirname(self.fname))
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
|
|
|
|||
|
|
@ -43,8 +43,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
|
||||
start, end = get_comp_render_range(comp)
|
||||
context.data["frameStart"] = start
|
||||
context.data["frameEnd"] = end
|
||||
context.data["frameStart"] = int(start)
|
||||
context.data["frameEnd"] = int(end)
|
||||
|
||||
for tool in savers:
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
|
@ -76,8 +76,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"outputDir": os.path.dirname(path),
|
||||
"ext": ext, # todo: should be redundant
|
||||
"label": label,
|
||||
"families": ["saver"],
|
||||
"family": "saver",
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"fps": context.data["fps"],
|
||||
"families": ["render", "review", "ftrack"],
|
||||
"family": "render",
|
||||
"active": active,
|
||||
"publish": active # backwards compatibility
|
||||
})
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin):
|
|||
"""Collect current comp's render Mode
|
||||
|
||||
Options:
|
||||
renderlocal
|
||||
deadline
|
||||
local
|
||||
farm
|
||||
|
||||
Note that this value is set for each comp separately. When you save the
|
||||
comp this information will be stored in that file. If for some reason the
|
||||
|
|
@ -23,22 +23,22 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.4
|
||||
label = "Collect Render Mode"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect all image sequence tools"""
|
||||
options = ["renderlocal", "deadline"]
|
||||
options = ["local", "farm"]
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
raise RuntimeError("No comp previously collected, unable to "
|
||||
"retrieve Fusion version.")
|
||||
|
||||
rendermode = comp.GetData("pype.rendermode") or "renderlocal"
|
||||
rendermode = comp.GetData("pype.rendermode") or "local"
|
||||
assert rendermode in options, "Must be supported render mode"
|
||||
|
||||
self.log.info("Render mode: {0}".format(rendermode))
|
||||
|
||||
# Append family
|
||||
family = "saver.{0}".format(rendermode)
|
||||
family = "render.{0}".format(rendermode)
|
||||
instance.data["families"].append(family)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.deadline"]
|
||||
families = ["render.farm"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -23,7 +23,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
if any(plugin.__name__ == "FusionSubmitDeadline"
|
||||
for plugin in errored_plugins):
|
||||
raise RuntimeError("Skipping incrementing current file because "
|
||||
"submission to deadline failed.")
|
||||
"submission to render farm failed.")
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
|
|
|||
|
|
@ -1,98 +0,0 @@
|
|||
import re
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
def _get_script():
|
||||
"""Get path to the image sequence script"""
|
||||
|
||||
# todo: use a more elegant way to get the python script
|
||||
|
||||
try:
|
||||
from pype.scripts import publish_filesequence
|
||||
except Exception:
|
||||
raise RuntimeError("Expected module 'publish_imagesequence'"
|
||||
"to be available")
|
||||
|
||||
module_path = publish_filesequence.__file__
|
||||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[:-len(".pyc")] + ".py"
|
||||
|
||||
return module_path
|
||||
|
||||
|
||||
class PublishImageSequence(pyblish.api.InstancePlugin):
|
||||
"""Publish the generated local image sequences."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Publish Rendered Image Sequence(s)"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.renderlocal"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Skip this plug-in if the ExtractImageSequence failed
|
||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
||||
if any(plugin.__name__ == "FusionRenderLocal" for plugin in
|
||||
errored_plugins):
|
||||
raise RuntimeError("Fusion local render failed, "
|
||||
"publishing images skipped.")
|
||||
|
||||
subset = instance.data["subset"]
|
||||
ext = instance.data["ext"]
|
||||
|
||||
# Regex to match resulting renders
|
||||
regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
|
||||
ext=re.escape(ext))
|
||||
|
||||
# The instance has most of the information already stored
|
||||
metadata = {
|
||||
"regex": regex,
|
||||
"frameStart": instance.context.data["frameStart"],
|
||||
"frameEnd": instance.context.data["frameEnd"],
|
||||
"families": ["imagesequence"],
|
||||
}
|
||||
|
||||
# Write metadata and store the path in the instance
|
||||
output_directory = instance.data["outputDir"]
|
||||
path = os.path.join(output_directory,
|
||||
"{}_metadata.json".format(subset))
|
||||
with open(path, "w") as f:
|
||||
json.dump(metadata, f)
|
||||
|
||||
assert os.path.isfile(path), ("Stored path is not a file for %s"
|
||||
% instance.data["name"])
|
||||
|
||||
# Suppress any subprocess console
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = subprocess.SW_HIDE
|
||||
|
||||
process = subprocess.Popen(["python", _get_script(),
|
||||
"--paths", path],
|
||||
bufsize=1,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
startupinfo=startupinfo)
|
||||
|
||||
while True:
|
||||
output = process.stdout.readline()
|
||||
# Break when there is no output or a return code has been given
|
||||
if output == '' and process.poll() is not None:
|
||||
process.stdout.close()
|
||||
break
|
||||
if output:
|
||||
line = output.strip()
|
||||
if line.startswith("ERROR"):
|
||||
self.log.error(line)
|
||||
else:
|
||||
self.log.info(line)
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("Process quit with non-zero "
|
||||
"return code: {}".format(process.returncode))
|
||||
|
|
@ -1,9 +1,11 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
import avalon.fusion as fusion
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class FusionRenderLocal(pyblish.api.InstancePlugin):
|
||||
class Fusionlocal(pyblish.api.InstancePlugin):
|
||||
"""Render the current Fusion composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
|
|
@ -11,15 +13,13 @@ class FusionRenderLocal(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
label = "Render Local"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.renderlocal"]
|
||||
families = ["render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
|
|
@ -28,15 +28,40 @@ class FusionRenderLocal(pyblish.api.InstancePlugin):
|
|||
context.data[key] = True
|
||||
|
||||
current_comp = context.data["currentComp"]
|
||||
start_frame = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
end_frame = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
frame_start = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
frame_end = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
path = instance.data["path"]
|
||||
output_dir = instance.data["outputDir"]
|
||||
|
||||
ext = os.path.splitext(os.path.basename(path))[-1]
|
||||
|
||||
self.log.info("Starting render")
|
||||
self.log.info("Start frame: {}".format(start_frame))
|
||||
self.log.info("End frame: {}".format(end_frame))
|
||||
self.log.info("Start frame: {}".format(frame_start))
|
||||
self.log.info("End frame: {}".format(frame_end))
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render()
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
collected_frames = os.listdir(output_dir)
|
||||
repre = {
|
||||
'name': ext[1:],
|
||||
'ext': ext[1:],
|
||||
'frameStart': "%0{}d".format(len(str(frame_end))) % frame_start,
|
||||
'files': collected_frames,
|
||||
"stagingDir": output_dir,
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
# review representation
|
||||
repre_preview = repre.copy()
|
||||
repre_preview["name"] = repre_preview["ext"] = "mp4"
|
||||
repre_preview["tags"] = ["review", "preview", "ftrackreview", "delete"]
|
||||
instance.data["representations"].append(repre_preview)
|
||||
|
||||
self.log.debug(f"_ instance.data: {pformat(instance.data)}")
|
||||
|
||||
if not result:
|
||||
raise RuntimeError("Comp render failed")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class FusionSaveComp(pyblish.api.ContextPlugin):
|
|||
label = "Save current file"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
|
|||
|
|
@ -19,10 +19,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.deadline"]
|
||||
families = ["render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
|||
label = "Validate Background Depth 32 bit"
|
||||
actions = [action.RepairAction]
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Comp Saved"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
actions = [action.RepairAction]
|
||||
label = "Validate Create Folder Checked"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Filename Has Extension"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Has Input"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Passthrough"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Unique Subsets"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
@ -14,7 +14,7 @@ class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
subset = instance.data["subset"]
|
||||
for other_instance in context[:]:
|
||||
for other_instance in context:
|
||||
if other_instance == instance:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -86,3 +86,5 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
frame_end_h = frame_end + context.data["handleEnd"]
|
||||
context.data["frameStartHandle"] = frame_start_h
|
||||
context.data["frameEndHandle"] = frame_end_h
|
||||
|
||||
context.data["fps"] = data["fps"]
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CollectCurrentUserPype(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
user = os.getenv("PYPE_USERNAME", "").strip()
|
||||
if not user:
|
||||
return
|
||||
user = context.data.get("user", getpass.getuser())
|
||||
|
||||
context.data["user"] = user
|
||||
self.log.debug("Pype user is \"{}\"".format(user))
|
||||
self.log.debug("Colected user \"{}\"".format(user))
|
||||
|
|
|
|||
|
|
@ -23,10 +23,11 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"nuke",
|
||||
"maya",
|
||||
"shell",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"premiere",
|
||||
"standalonepublisher",
|
||||
"harmony"
|
||||
"fusion"
|
||||
]
|
||||
optional = True
|
||||
|
||||
|
|
@ -314,12 +315,15 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"comment": context.data.get("comment") or ""
|
||||
})
|
||||
|
||||
intent_label = context.data.get("intent")
|
||||
intent_label = context.data.get("intent") or ""
|
||||
if intent_label and isinstance(intent_label, dict):
|
||||
intent_label = intent_label.get("label")
|
||||
value = intent_label.get("value")
|
||||
if value:
|
||||
intent_label = intent_label["label"]
|
||||
else:
|
||||
intent_label = ""
|
||||
|
||||
if intent_label:
|
||||
burnin_data["intent"] = intent_label
|
||||
burnin_data["intent"] = intent_label
|
||||
|
||||
temp_data = {
|
||||
"frame_start": frame_start,
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
data["inputs"] = entity_data.get("inputs", [])
|
||||
|
||||
# Tasks.
|
||||
tasks = entity_data.get("tasks", [])
|
||||
tasks = entity_data.get("tasks", {})
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data["tasks"] = tasks
|
||||
parents = []
|
||||
|
|
@ -99,11 +99,14 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if entity:
|
||||
# Do not override data, only update
|
||||
cur_entity_data = entity.get("data") or {}
|
||||
new_tasks = data.pop("tasks", [])
|
||||
if "tasks" in cur_entity_data and new_tasks:
|
||||
for task_name in new_tasks:
|
||||
if task_name not in cur_entity_data["tasks"]:
|
||||
cur_entity_data["tasks"].append(task_name)
|
||||
new_tasks = data.pop("tasks", {})
|
||||
if "tasks" not in cur_entity_data and not new_tasks:
|
||||
continue
|
||||
for task in new_tasks:
|
||||
task_name = next(iter(task))
|
||||
if task_name in cur_entity_data["tasks"].keys():
|
||||
continue
|
||||
cur_entity_data["tasks"][task_name] = task[task_name]
|
||||
cur_entity_data.update(data)
|
||||
data = cur_entity_data
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
"""Create jpg thumbnail from sequence using ffmpeg"""
|
||||
|
||||
label = "Extract Jpeg EXR"
|
||||
hosts = ["shell"]
|
||||
hosts = ["shell", "fusion"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
enabled = False
|
||||
|
|
|
|||
|
|
@ -26,10 +26,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"nuke",
|
||||
"maya",
|
||||
"shell",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"premiere",
|
||||
"harmony",
|
||||
"standalonepublisher"
|
||||
"standalonepublisher",
|
||||
"fusion"
|
||||
]
|
||||
|
||||
# Supported extensions
|
||||
|
|
|
|||
89
pype/plugins/global/publish/extract_scanline_exr.py
Normal file
89
pype/plugins/global/publish/extract_scanline_exr.py
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Convert exrs in representation to tiled exrs usin oiio tools."""
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
import pype.lib
|
||||
|
||||
|
||||
class ExtractScanlineExr(pyblish.api.InstancePlugin):
|
||||
"""Convert tiled EXRs to scanline using OIIO tool."""
|
||||
|
||||
label = "Extract Scanline EXR"
|
||||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
representations_new = []
|
||||
|
||||
for repre in representations:
|
||||
self.log.info(
|
||||
"Processing representation {}".format(repre.get("name")))
|
||||
tags = repre.get("tags", [])
|
||||
if "toScanline" not in tags:
|
||||
self.log.info(" - missing toScanline tag")
|
||||
continue
|
||||
|
||||
# run only on exrs
|
||||
if repre.get("ext") != "exr":
|
||||
self.log.info("- not EXR files")
|
||||
continue
|
||||
|
||||
if not isinstance(repre['files'], (list, tuple)):
|
||||
input_files = [repre['files']]
|
||||
self.log.info("We have a single frame")
|
||||
else:
|
||||
input_files = repre['files']
|
||||
self.log.info("We have a sequence")
|
||||
|
||||
stagingdir = os.path.normpath(repre.get("stagingDir"))
|
||||
|
||||
oiio_tool_path = os.getenv("PYPE_OIIO_PATH", "")
|
||||
|
||||
for file in input_files:
|
||||
|
||||
original_name = os.path.join(stagingdir, file)
|
||||
temp_name = os.path.join(stagingdir, "__{}".format(file))
|
||||
# move original render to temp location
|
||||
shutil.move(original_name, temp_name)
|
||||
oiio_cmd = []
|
||||
oiio_cmd.append(oiio_tool_path)
|
||||
oiio_cmd.append(
|
||||
os.path.join(stagingdir, temp_name)
|
||||
)
|
||||
oiio_cmd.append("--scanline")
|
||||
oiio_cmd.append("-o")
|
||||
oiio_cmd.append(os.path.join(stagingdir, original_name))
|
||||
|
||||
subprocess_exr = " ".join(oiio_cmd)
|
||||
self.log.info(f"running: {subprocess_exr}")
|
||||
pype.api.subprocess(subprocess_exr)
|
||||
|
||||
# raise error if there is no ouptput
|
||||
if not os.path.exists(os.path.join(stagingdir, original_name)):
|
||||
self.log.error(
|
||||
("File {} was not converted "
|
||||
"by oiio tool!").format(original_name))
|
||||
raise AssertionError("OIIO tool conversion failed")
|
||||
else:
|
||||
try:
|
||||
os.remove(temp_name)
|
||||
except OSError as e:
|
||||
self.log.warning("Unable to delete temp file")
|
||||
self.log.warning(e)
|
||||
|
||||
repre['name'] = 'exr'
|
||||
try:
|
||||
repre['tags'].remove('toScanline')
|
||||
except ValueError:
|
||||
# no `toScanline` tag present
|
||||
pass
|
||||
|
||||
instance.data["representations"] += representations_new
|
||||
|
|
@ -521,8 +521,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# get 'files' info for representation and all attached resources
|
||||
self.log.debug("Preparing files information ...")
|
||||
representation["files"] = self.get_files_info(
|
||||
instance,
|
||||
self.integrated_file_sizes)
|
||||
instance,
|
||||
self.integrated_file_sizes)
|
||||
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
destination_list.append(dst)
|
||||
|
|
@ -543,10 +543,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
repre_ids_to_remove.append(repre["_id"])
|
||||
io.delete_many({"_id": {"$in": repre_ids_to_remove}})
|
||||
|
||||
self.log.debug("__ representations: {}".format(representations))
|
||||
for rep in instance.data["representations"]:
|
||||
self.log.debug("__ represNAME: {}".format(rep['name']))
|
||||
self.log.debug("__ represPATH: {}".format(rep['published_path']))
|
||||
self.log.debug("__ rep: {}".format(rep))
|
||||
|
||||
io.insert_many(representations)
|
||||
instance.data["published_representations"] = (
|
||||
published_representations
|
||||
|
|
|
|||
|
|
@ -174,7 +174,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"FTRACK_SERVER",
|
||||
"PYPE_METADATA_FILE",
|
||||
"AVALON_PROJECT",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"PYPE_LOG_NO_COLORS",
|
||||
"PYPE_USERNAME"
|
||||
]
|
||||
|
||||
# custom deadline atributes
|
||||
|
|
@ -193,7 +194,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"slate": ["slateFrame"],
|
||||
"review": ["lutPath"],
|
||||
"render2d": ["bakeScriptPath", "bakeRenderPath",
|
||||
"bakeWriteNodeName", "version"]
|
||||
"bakeWriteNodeName", "version"],
|
||||
"renderlayer": ["convertToScanline"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
|
|
@ -297,6 +299,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
environment["PYPE_METADATA_FILE"] = roothless_metadata_path
|
||||
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
|
||||
environment["PYPE_LOG_NO_COLORS"] = "1"
|
||||
environment["PYPE_USERNAME"] = instance.context.data["user"]
|
||||
try:
|
||||
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
|
||||
except KeyError:
|
||||
|
|
@ -491,6 +494,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"tags": ["review"] if preview else []
|
||||
}
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance_data.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
# poor man exclusion
|
||||
if ext in self.skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
|
@ -581,6 +589,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if instance.get("multipartExr", False):
|
||||
rep["tags"].append("multipartExr")
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
representations.append(rep)
|
||||
|
||||
self._solve_families(instance, preview)
|
||||
|
|
@ -727,6 +740,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"family": "prerender",
|
||||
"families": []})
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version")
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
|
|||
"houdini",
|
||||
"maya",
|
||||
"nuke",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"photoshop",
|
||||
"premiere",
|
||||
"resolve",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CollectClipMetadata(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Metadata"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
item = instance.data["item"]
|
||||
|
|
@ -10,7 +10,7 @@ class CollectClipTimecodes(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Timecodes"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -8,7 +8,7 @@ class ExtractPlateCheck(api.ContextPlugin):
|
|||
|
||||
order = api.ExtractorOrder + 0.01
|
||||
label = "Plates Export Waiting"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["encode"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -6,7 +6,7 @@ class ExtractTasks(api.InstancePlugin):
|
|||
|
||||
order = api.ExtractorOrder
|
||||
label = "Tasks"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ class ValidateProjectRoot(api.ContextPlugin):
|
|||
|
||||
order = api.ValidatorOrder
|
||||
label = "Project Root"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
actions = [RepairProjectRoot]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -5,7 +5,7 @@ class ValidateResolvedPaths(api.ContextPlugin):
|
|||
|
||||
order = api.ValidatorOrder
|
||||
label = "Resolved Paths"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
import os
|
||||
|
|
@ -13,7 +13,7 @@ class ValidateOutputRange(api.InstancePlugin):
|
|||
order = api.ValidatorOrder
|
||||
families = ["trackItem.task"]
|
||||
label = "Output Range"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -43,7 +43,7 @@ class ValidateImageSequence(api.InstancePlugin):
|
|||
families = ["trackItem.task", "img"]
|
||||
match = api.Subset
|
||||
label = "Image Sequence"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -10,7 +10,7 @@ class ValidateClip(api.InstancePlugin):
|
|||
families = ["clip"]
|
||||
# match = api.Exact
|
||||
label = "Validate Track Item"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -2,11 +2,11 @@ from pyblish import api
|
|||
|
||||
|
||||
class ValidateViewerLut(api.ContextPlugin):
|
||||
"""Validate viewer lut in NukeStudio is the same as in Nuke."""
|
||||
"""Validate viewer lut in Hiero is the same as in Nuke."""
|
||||
|
||||
order = api.ValidatorOrder
|
||||
label = "Viewer LUT"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -14,8 +14,8 @@ class ValidateViewerLut(api.ContextPlugin):
|
|||
import hiero
|
||||
|
||||
# nuke_lut = nuke.ViewerProcess.node()["current"].value()
|
||||
nukestudio_lut = context.data["activeProject"].lutSettingViewer()
|
||||
self.log.info("__ nukestudio_lut: {}".format(nukestudio_lut))
|
||||
hiero_lut = context.data["activeProject"].lutSettingViewer()
|
||||
self.log.info("__ hiero_lut: {}".format(hiero_lut))
|
||||
|
||||
msg = "Viewer LUT can only be RGB"
|
||||
assert "RGB" in nukestudio_lut, msg
|
||||
assert "RGB" in hiero_lut, msg
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from avalon import api
|
||||
import hiero
|
||||
from pype.hosts.nukestudio import lib
|
||||
from pype.hosts.hiero import lib
|
||||
reload(lib)
|
||||
|
||||
|
||||
|
|
@ -14,7 +14,7 @@ class CollectAssetBuilds(api.ContextPlugin):
|
|||
# Run just after CollectClip
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect AssetBuilds"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
asset_builds = {}
|
||||
|
|
@ -14,7 +14,7 @@ class CollectAudio(api.InstancePlugin):
|
|||
# Run just before CollectSubsets
|
||||
order = api.CollectorOrder + 0.1021
|
||||
label = "Collect Audio"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -8,7 +8,7 @@ class CollectCalculateRetime(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect Calculate Retiming"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['retime']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipResolution(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Clip Resoluton"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
sequence = instance.context.data['activeSequence']
|
||||
|
|
@ -9,7 +9,7 @@ class CollectClips(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Clips"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
# create asset_names conversion table
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Frame Ranges"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ class CollectFramerate(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Framerate"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
sequence = context.data["activeSequence"]
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipHandles(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.0121
|
||||
label = "Collect Handles"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
assets_shared = context.data.get("assetsShared")
|
||||
|
|
@ -13,7 +13,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Hierarchy Clip"
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
order = pyblish.api.CollectorOrder + 0.102
|
||||
families = ["clip"]
|
||||
|
||||
def convert_to_entity(self, key, value):
|
||||
|
|
@ -46,7 +46,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
clip_out = instance.data["clipOut"]
|
||||
fps = context.data["fps"]
|
||||
|
||||
# build data for inner nukestudio project property
|
||||
# build data for inner hiero project property
|
||||
data = {
|
||||
"sequence": (
|
||||
context.data['activeSequence'].name().replace(' ', '_')
|
||||
|
|
@ -6,7 +6,7 @@ class CollectLeaderClip(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.0111
|
||||
label = "Collect Leader Clip"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -16,7 +16,7 @@ class CollectPlates(api.InstancePlugin):
|
|||
# Run just before CollectSubsets
|
||||
order = api.CollectorOrder + 0.1021
|
||||
label = "Collect Plates"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -85,7 +85,7 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.48
|
||||
label = "Collect Plates Data"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["plate"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -192,16 +192,17 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
instance.data["representations"].append(
|
||||
plates_mov_representation)
|
||||
|
||||
thumb_file = head + ".png"
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
thumb_file = "{}_{}{}".format(head, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
thumb_frame = instance.data["sourceIn"] + ((instance.data["sourceOut"] - instance.data["sourceIn"])/2)
|
||||
|
||||
thumbnail = item.thumbnail(thumb_frame).save(
|
||||
thumb_path,
|
||||
format='png'
|
||||
)
|
||||
self.log.debug("__ sourceIn: `{}`".format(instance.data["sourceIn"]))
|
||||
self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(thumbnail, thumb_frame))
|
||||
self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(
|
||||
thumbnail, thumb_frame))
|
||||
|
||||
thumb_representation = {
|
||||
'files': thumb_file,
|
||||
|
|
@ -5,7 +5,7 @@ class CollectClipSubsets(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.103
|
||||
label = "Collect Remove Clip Instaces"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -15,7 +15,7 @@ class CollectReviews(api.InstancePlugin):
|
|||
# Run just before CollectSubsets
|
||||
order = api.CollectorOrder + 0.1022
|
||||
label = "Collect Reviews"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["plate"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -142,12 +142,12 @@ class CollectReviews(api.InstancePlugin):
|
|||
staging_dir = os.path.dirname(
|
||||
source_path)
|
||||
|
||||
thumb_file = head + ".png"
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
thumb_file = "{}_{}{}".format(head, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
self.log.debug("__ thumb_path: {}".format(thumb_path))
|
||||
|
||||
thumb_frame = instance.data["sourceIn"] + (
|
||||
(instance.data["sourceOut"] - instance.data["sourceIn"]) / 2)
|
||||
self.log.debug("__ thumb_frame: {}".format(thumb_frame))
|
||||
thumbnail = item.thumbnail(thumb_frame).save(
|
||||
thumb_path,
|
||||
|
|
@ -7,7 +7,7 @@ class CollectSequence(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder - 0.01
|
||||
label = "Collect Sequence"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
context.data['activeSequence'] = hiero.ui.activeSequence()
|
||||
|
|
@ -7,7 +7,7 @@ class CollectShots(api.InstancePlugin):
|
|||
# Run just before CollectClipSubsets
|
||||
order = api.CollectorOrder + 0.1021
|
||||
label = "Collect Shots"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -40,10 +40,10 @@ class CollectShots(api.InstancePlugin):
|
|||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
|
||||
data["label"] = (
|
||||
"{} - {} - tasks: {} - assetbuilds: {} - comments: {}".format(
|
||||
"{} - {} - tasks:{} - assetbuilds:{} - comments:{}".format(
|
||||
data["asset"],
|
||||
data["subset"],
|
||||
data["tasks"],
|
||||
[task.keys()[0] for task in data["tasks"]],
|
||||
[x["name"] for x in data.get("assetbuilds", [])],
|
||||
len(data.get("comments", []))
|
||||
)
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipTagComments(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.013
|
||||
label = "Collect Comments"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipTagFrameStart(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.013
|
||||
label = "Collect Frame Start"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -7,7 +7,7 @@ class CollectClipTagHandles(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.012
|
||||
label = "Collect Tag Handles"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipTagResolution(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.013
|
||||
label = "Collect Source Resolution"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectTagRetime(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.014
|
||||
label = "Collect Retiming Tag"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipSubsetsTags(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.012
|
||||
label = "Collect Tags Subsets"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipTagTasks(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.012
|
||||
label = "Collect Tag Tasks"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -20,8 +20,9 @@ class CollectClipTagTasks(api.InstancePlugin):
|
|||
|
||||
# gets only task family tags and collect labels
|
||||
if "task" in t_family:
|
||||
t_task = t_metadata.get("tag.label", "")
|
||||
tasks.append(t_task)
|
||||
t_task_name = t_metadata.get("tag.label", "")
|
||||
t_task_type = t_metadata.get("tag.type", "")
|
||||
tasks.append({t_task_name: {"type": t_task_type}})
|
||||
|
||||
instance.data["tasks"] = tasks
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipTags(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.011
|
||||
label = "Collect Tags"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -7,7 +7,7 @@ class ExtractAudioFile(pype.api.Extractor):
|
|||
|
||||
order = api.ExtractorOrder
|
||||
label = "Extract Subset Audio"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip", "audio"]
|
||||
match = api.Intersection
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue