mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge remote-tracking branch 'origin/develop' into 3.0/refactoring
# Conflicts: # pype/hosts/harmony/__init__.py
This commit is contained in:
commit
386a3d8b5c
185 changed files with 3083 additions and 836 deletions
24
.circleci/config.yml
Normal file
24
.circleci/config.yml
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
version: 2.1
|
||||
|
||||
jobs:
|
||||
deploy-website:
|
||||
docker:
|
||||
- image: circleci/node:10.16
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Deploying to GitHub Pages
|
||||
command: |
|
||||
git config --global user.email "mkolar@users.noreply.github.com"
|
||||
git config --global user.name "Website Deployment Script"
|
||||
echo "machine github.com login mkolar password $GITHUB_TOKEN" > ~/.netrc
|
||||
cd website && yarn install && GIT_USER=mkolar yarn run publish-gh-pages
|
||||
|
||||
workflows:
|
||||
build_and_deploy:
|
||||
jobs:
|
||||
- deploy-website:
|
||||
filters:
|
||||
branches:
|
||||
only: feature/move_documentation
|
||||
63
pype/hooks/fusion/prelaunch.py
Normal file
63
pype/hooks/fusion/prelaunch.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
import importlib
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
from pype.hosts.fusion import utils
|
||||
from pype.lib import PypeHook
|
||||
|
||||
|
||||
class FusionPrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Fusion
|
||||
project inside.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# making sure pyton 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(env.get("PYTHON36", ""))
|
||||
assert os.path.isdir(py36_dir), (
|
||||
"Python 3.6 is not installed at the provided folder path. Either "
|
||||
"make sure the `environments\resolve.json` is having correctly "
|
||||
"set `PYTHON36` or make sure Python 3.6 is installed "
|
||||
f"in given path. \nPYTHON36E: `{py36_dir}`"
|
||||
)
|
||||
self.log.info(f"Path to Fusion Python folder: `{py36_dir}`...")
|
||||
env["PYTHON36"] = py36_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(env.get("FUSION_UTILITY_SCRIPTS_DIR", ""))
|
||||
assert os.path.isdir(us_dir), (
|
||||
"Fusion utility script dir does not exists. Either make sure "
|
||||
"the `environments\fusion.json` is having correctly set "
|
||||
"`FUSION_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
|
||||
f"FUSION_UTILITY_SCRIPTS_DIR: `{us_dir}`"
|
||||
)
|
||||
|
||||
try:
|
||||
__import__("avalon.fusion")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
utils.setup(env)
|
||||
|
||||
return True
|
||||
|
|
@ -1,63 +1,33 @@
|
|||
import os
|
||||
from .lib import (
|
||||
get_additional_data,
|
||||
update_frame_range
|
||||
)
|
||||
from .menu import launch_pype_menu
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
from .utils import (
|
||||
setup
|
||||
)
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pype import PLUGINS_DIR
|
||||
__all__ = [
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "fusion", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "fusion", "inventory")
|
||||
# utils
|
||||
"setup",
|
||||
"get_resolve_module",
|
||||
|
||||
# lib
|
||||
"get_additional_data",
|
||||
"update_frame_range",
|
||||
|
||||
def install():
|
||||
print("Registering Fusion plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = ["imagesequence",
|
||||
"camera",
|
||||
"pointcache"]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
|
||||
def uninstall():
|
||||
print("Deregistering Fusion plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
|
||||
savers = [tool for tool in instance if
|
||||
getattr(tool, "ID", None) == "Saver"]
|
||||
if not savers:
|
||||
return
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
passthrough = not new_value
|
||||
with comp_lock_and_undo_chunk(comp,
|
||||
undo_queue_name="Change instance "
|
||||
"active state"):
|
||||
for tool in savers:
|
||||
attrs = tool.GetAttrs()
|
||||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
]
|
||||
|
|
|
|||
168
pype/hosts/fusion/menu.py
Normal file
168
pype/hosts/fusion/menu.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
loader,
|
||||
sceneinventory,
|
||||
libraryloader
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
from .scripts import (
|
||||
set_rendermode,
|
||||
duplicate_with_inputs
|
||||
)
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
if not os.path.exists(path):
|
||||
print("Unable to load stylesheet, file not found in resources")
|
||||
return ""
|
||||
|
||||
with open(path, "r") as file_stream:
|
||||
stylesheet = file_stream.read()
|
||||
return stylesheet
|
||||
|
||||
|
||||
class Spacer(QtWidgets.QWidget):
|
||||
def __init__(self, height, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setFixedHeight(height)
|
||||
|
||||
real_spacer = QtWidgets.QWidget(self)
|
||||
real_spacer.setObjectName("Spacer")
|
||||
real_spacer.setFixedHeight(height)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(real_spacer)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
|
||||
class PypeMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName("PypeMenu")
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.render_mode_widget = None
|
||||
self.setWindowTitle("Pype")
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles", self)
|
||||
create_btn = QtWidgets.QPushButton("Create", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish", self)
|
||||
load_btn = QtWidgets.QPushButton("Load", self)
|
||||
inventory_btn = QtWidgets.QPushButton("Inventory", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode", self)
|
||||
duplicate_with_inputs_btn = QtWidgets.QPushButton(
|
||||
"Duplicate with input connections", self
|
||||
)
|
||||
reset_resolution_btn = QtWidgets.QPushButton(
|
||||
"Reset Resolution from project", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(10, 20, 10, 20)
|
||||
|
||||
layout.addWidget(workfiles_btn)
|
||||
layout.addWidget(create_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(load_btn)
|
||||
layout.addWidget(inventory_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(libload_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(rendermode_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(duplicate_with_inputs_btn)
|
||||
layout.addWidget(reset_resolution_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
workfiles_btn.clicked.connect(self.on_workfile_clicked)
|
||||
create_btn.clicked.connect(self.on_create_clicked)
|
||||
publish_btn.clicked.connect(self.on_publish_clicked)
|
||||
load_btn.clicked.connect(self.on_load_clicked)
|
||||
inventory_btn.clicked.connect(self.on_inventory_clicked)
|
||||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendernode_clicked)
|
||||
duplicate_with_inputs_btn.clicked.connect(
|
||||
self.on_duplicate_with_inputs_clicked)
|
||||
reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked)
|
||||
|
||||
def on_workfile_clicked(self):
|
||||
print("Clicked Workfile")
|
||||
launch_workfiles_app()
|
||||
|
||||
def on_create_clicked(self):
|
||||
print("Clicked Create")
|
||||
creator.show()
|
||||
|
||||
def on_publish_clicked(self):
|
||||
print("Clicked Publish")
|
||||
publish(None)
|
||||
|
||||
def on_load_clicked(self):
|
||||
print("Clicked Load")
|
||||
loader.show(use_context=True)
|
||||
|
||||
def on_inventory_clicked(self):
|
||||
print("Clicked Inventory")
|
||||
sceneinventory.show()
|
||||
|
||||
def on_libload_clicked(self):
|
||||
print("Clicked Library")
|
||||
libraryloader.show()
|
||||
|
||||
def on_rendernode_clicked(self):
|
||||
from avalon import style
|
||||
print("Clicked Set Render Mode")
|
||||
if self.render_mode_widget is None:
|
||||
window = set_rendermode.SetRenderMode()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
self.render_mode_widget = window
|
||||
else:
|
||||
self.render_mode_widget.show()
|
||||
|
||||
def on_duplicate_with_inputs_clicked(self):
|
||||
duplicate_with_inputs.duplicate_with_input_connections()
|
||||
print("Clicked Set Colorspace")
|
||||
|
||||
def on_reset_resolution_clicked(self):
|
||||
print("Clicked Reset Resolution")
|
||||
|
||||
|
||||
def launch_pype_menu():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
app.setQuitOnLastWindowClosed(False)
|
||||
|
||||
pype_menu = PypeMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
29
pype/hosts/fusion/menu_style.qss
Normal file
29
pype/hosts/fusion/menu_style.qss
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
QWidget {
|
||||
background-color: #282828;
|
||||
border-radius: 3;
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
border: 1px solid #090909;
|
||||
background-color: #201f1f;
|
||||
color: #ffffff;
|
||||
padding: 5;
|
||||
}
|
||||
|
||||
QPushButton:focus {
|
||||
background-color: "#171717";
|
||||
color: #d0d0d0;
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
background-color: "#171717";
|
||||
color: #e64b3d;
|
||||
}
|
||||
|
||||
#PypeMenu {
|
||||
border: 1px solid #fef9ef;
|
||||
}
|
||||
|
||||
#Spacer {
|
||||
background-color: #282828;
|
||||
}
|
||||
115
pype/hosts/fusion/pipeline.py
Normal file
115
pype/hosts/fusion/pipeline.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
"""
|
||||
Basic avalon integration
|
||||
"""
|
||||
import os
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.tools import workfiles
|
||||
from pyblish import api as pyblish
|
||||
from pypeapp import Logger
|
||||
|
||||
from pype import PLUGINS_DIR
|
||||
|
||||
log = Logger().get_logger(__name__, "fusion")
|
||||
|
||||
|
||||
AVALON_CONFIG = os.environ["AVALON_CONFIG"]
|
||||
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "fusion", "inventory")
|
||||
|
||||
PUBLISH_PATH = os.path.join(
|
||||
PLUGINS_DIR, "fusion", "publish"
|
||||
).replace("\\", "/")
|
||||
|
||||
|
||||
def install():
|
||||
"""Install fusion-specific functionality of avalon-core.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
||||
It is called automatically when installing via `api.install(avalon.fusion)`
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
"""
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = ["imagesequence",
|
||||
"camera",
|
||||
"pointcache"]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
log.info("pype.hosts.fusion installed")
|
||||
|
||||
pyblish.register_host("fusion")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all tha was installed
|
||||
|
||||
This is where you undo everything that was done in `install()`.
|
||||
That means, removing menus, deregistering families and data
|
||||
and everything. It should be as though `install()` was never run,
|
||||
because odds are calling this function means the user is interested
|
||||
in re-installing shortly afterwards. If, for example, he has been
|
||||
modifying the menu or registered families.
|
||||
|
||||
"""
|
||||
pyblish.deregister_host("fusion")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
|
||||
savers = [tool for tool in instance if
|
||||
getattr(tool, "ID", None) == "Saver"]
|
||||
if not savers:
|
||||
return
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
passthrough = not new_value
|
||||
with comp_lock_and_undo_chunk(comp,
|
||||
undo_queue_name="Change instance "
|
||||
"active state"):
|
||||
for tool in savers:
|
||||
attrs = tool.GetAttrs()
|
||||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
|
||||
|
||||
def launch_workfiles_app(*args):
|
||||
workdir = os.environ["AVALON_WORKDIR"]
|
||||
workfiles.show(workdir)
|
||||
|
||||
|
||||
def publish(parent):
|
||||
"""Shorthand to publish from within host"""
|
||||
from avalon.tools import publish
|
||||
return publish.show(parent)
|
||||
42
pype/hosts/fusion/scripts/duplicate_with_inputs.py
Normal file
42
pype/hosts/fusion/scripts/duplicate_with_inputs.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from avalon import fusion
|
||||
|
||||
|
||||
def is_connected(input):
|
||||
"""Return whether an input has incoming connection"""
|
||||
return input.GetAttrs()["INPB_Connected"]
|
||||
|
||||
|
||||
def duplicate_with_input_connections():
|
||||
"""Duplicate selected tools with incoming connections."""
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
original_tools = comp.GetToolList(True).values()
|
||||
if not original_tools:
|
||||
return # nothing selected
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(
|
||||
comp, "Duplicate With Input Connections"):
|
||||
|
||||
# Generate duplicates
|
||||
comp.Copy()
|
||||
comp.SetActiveTool()
|
||||
comp.Paste()
|
||||
duplicate_tools = comp.GetToolList(True).values()
|
||||
|
||||
# Copy connections
|
||||
for original, new in zip(original_tools, duplicate_tools):
|
||||
|
||||
original_inputs = original.GetInputList().values()
|
||||
new_inputs = new.GetInputList().values()
|
||||
assert len(original_inputs) == len(new_inputs)
|
||||
|
||||
for original_input, new_input in zip(original_inputs, new_inputs):
|
||||
|
||||
if is_connected(original_input):
|
||||
|
||||
if is_connected(new_input):
|
||||
# Already connected if it is between the copied tools
|
||||
continue
|
||||
|
||||
new_input.ConnectTo(original_input.GetConnectedOutput())
|
||||
assert is_connected(new_input), "Must be connected now"
|
||||
|
|
@ -1,15 +1,15 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import avalon.fusion
|
||||
# Pipeline imports
|
||||
from avalon import api, io, pipeline
|
||||
import avalon.fusion
|
||||
|
||||
import pype.hosts.fusion.lib as fusion_lib
|
||||
# Config imports
|
||||
import pype.lib as pype
|
||||
import pype.hosts.fusion.lib as fusion_lib
|
||||
|
||||
log = logging.getLogger("Update Slap Comp")
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ def _format_version_folder(folder):
|
|||
|
||||
new_version = 1
|
||||
if os.path.isdir(folder):
|
||||
re_version = re.compile("v\d+$")
|
||||
re_version = re.compile(r"v\d+$")
|
||||
versions = [i for i in os.listdir(folder) if os.path.isdir(i)
|
||||
and re_version.match(i)]
|
||||
if versions:
|
||||
|
|
@ -87,7 +87,7 @@ def _format_filepath(session):
|
|||
|
||||
# Create new unqiue filepath
|
||||
if os.path.exists(new_filepath):
|
||||
new_filepath = studio.version_up(new_filepath)
|
||||
new_filepath = pype.version_up(new_filepath)
|
||||
|
||||
return new_filepath
|
||||
|
||||
|
|
@ -95,6 +95,15 @@ def _format_filepath(session):
|
|||
def _update_savers(comp, session):
|
||||
"""Update all savers of the current comp to ensure the output is correct
|
||||
|
||||
This will refactor the Saver file outputs to the renders of the new session
|
||||
that is provided.
|
||||
|
||||
In the case the original saver path had a path set relative to a /fusion/
|
||||
folder then that relative path will be matched with the exception of all
|
||||
"version" (e.g. v010) references will be reset to v001. Otherwise only a
|
||||
version folder will be computed in the new session's work "render" folder
|
||||
to dump the files in and keeping the original filenames.
|
||||
|
||||
Args:
|
||||
comp (object): current comp instance
|
||||
session (dict): the current Avalon session
|
||||
|
|
@ -114,8 +123,36 @@ def _update_savers(comp, session):
|
|||
savers = comp.GetToolList(False, "Saver").values()
|
||||
for saver in savers:
|
||||
filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0]
|
||||
filename = os.path.basename(filepath)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
|
||||
# Get old relative path to the "fusion" app folder so we can apply
|
||||
# the same relative path afterwards. If not found fall back to
|
||||
# using just a version folder with the filename in it.
|
||||
# todo: can we make this less magical?
|
||||
relpath = filepath.replace("\\", "/").rsplit("/fusion/", 1)[-1]
|
||||
|
||||
if os.path.isabs(relpath):
|
||||
# If not relative to a "/fusion/" folder then just use filename
|
||||
filename = os.path.basename(filepath)
|
||||
log.warning("Can't parse relative path, refactoring to only"
|
||||
"filename in a version folder: %s" % filename)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
|
||||
else:
|
||||
# Else reuse the relative path
|
||||
# Reset version in folder and filename in the relative path
|
||||
# to v001. The version should be is only detected when prefixed
|
||||
# with either `_v` (underscore) or `/v` (folder)
|
||||
version_pattern = r"(/|_)v[0-9]+"
|
||||
if re.search(version_pattern, relpath):
|
||||
new_relpath = re.sub(version_pattern,
|
||||
r"\1v001",
|
||||
relpath)
|
||||
log.info("Resetting version folders to v001: "
|
||||
"%s -> %s" % (relpath, new_relpath))
|
||||
relpath = new_relpath
|
||||
|
||||
new_path = os.path.join(new_work, relpath)
|
||||
|
||||
saver["Clip"] = new_path
|
||||
|
||||
|
||||
|
|
@ -138,6 +175,13 @@ def update_frame_range(comp, representations):
|
|||
versions = io.find({"type": "version", "_id": {"$in": version_ids}})
|
||||
versions = list(versions)
|
||||
|
||||
versions = [v for v in versions
|
||||
if v["data"].get("startFrame", None) is not None]
|
||||
|
||||
if not versions:
|
||||
log.warning("No versions loaded to match frame range to.\n")
|
||||
return
|
||||
|
||||
start = min(v["data"]["frameStart"] for v in versions)
|
||||
end = max(v["data"]["frameEnd"] for v in versions)
|
||||
|
||||
|
|
@ -180,7 +224,8 @@ def switch(asset_name, filepath=None, new=True):
|
|||
else:
|
||||
fusion = _get_fusion_instance()
|
||||
current_comp = fusion.LoadComp(filepath, quiet=True)
|
||||
assert current_comp is not None, "Fusion could not load '%s'" % filepath
|
||||
assert current_comp is not None, (
|
||||
"Fusion could not load '{}'").format(filepath)
|
||||
|
||||
host = api.registered_host()
|
||||
containers = list(host.ls())
|
||||
|
|
@ -189,8 +234,9 @@ def switch(asset_name, filepath=None, new=True):
|
|||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = pype.switch_item(container,
|
||||
asset_name=asset_name)
|
||||
representation = pype.switch_item(
|
||||
container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
except Exception as e:
|
||||
current_comp.Print("Error in switching! %s\n" % e.message)
|
||||
|
|
@ -223,6 +269,8 @@ def switch(asset_name, filepath=None, new=True):
|
|||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# QUESTION: can we convert this to gui rather then standalone script?
|
||||
# TODO: convert to gui tool
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Switch to a shot within an"
|
||||
|
|
|
|||
|
|
@ -1,87 +0,0 @@
|
|||
"""This module is used for command line publishing of image sequences."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
handler = logging.basicConfig()
|
||||
log = logging.getLogger("Publish Image Sequences")
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
|
||||
def publish(paths, gui=False):
|
||||
"""Publish rendered image sequences based on the job data
|
||||
|
||||
Args:
|
||||
paths (list): a list of paths where to publish from
|
||||
gui (bool, Optional): Choose to show Pyblish GUI, default is False
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
assert isinstance(paths, (list, tuple)), "Must be list of paths"
|
||||
log.info(paths)
|
||||
assert any(paths), "No paths found in the list"
|
||||
# Set the paths to publish for the collector if any provided
|
||||
if paths:
|
||||
os.environ["FILESEQUENCE"] = os.pathsep.join(paths)
|
||||
|
||||
# Install Avalon with shell as current host
|
||||
from avalon import api, shell
|
||||
api.install(shell)
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
pyblish.api.register_target("filesequence")
|
||||
pyblish.api.register_host("shell")
|
||||
|
||||
# Publish items
|
||||
if gui:
|
||||
import pyblish_qml
|
||||
pyblish_qml.show(modal=True)
|
||||
else:
|
||||
|
||||
import pyblish.util
|
||||
context = pyblish.util.publish()
|
||||
|
||||
if not context:
|
||||
log.warning("Nothing collected.")
|
||||
sys.exit(1)
|
||||
|
||||
# Collect errors, {plugin name: error}
|
||||
error_results = [r for r in context.data["results"] if r["error"]]
|
||||
|
||||
if error_results:
|
||||
log.error(" Errors occurred ...")
|
||||
for result in error_results:
|
||||
log.error(error_format.format(**result))
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def __main__():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--paths",
|
||||
nargs="*",
|
||||
default=[],
|
||||
help="The filepaths to publish. This can be a "
|
||||
"directory or a path to a .json publish "
|
||||
"configuration.")
|
||||
parser.add_argument("--gui",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Whether to run Pyblish in GUI mode.")
|
||||
|
||||
kwargs, args = parser.parse_known_args()
|
||||
|
||||
print("Running publish imagesequence...")
|
||||
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
|
||||
publish(kwargs.paths, gui=kwargs.gui)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__main__()
|
||||
111
pype/hosts/fusion/scripts/set_rendermode.py
Normal file
111
pype/hosts/fusion/scripts/set_rendermode.py
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
import avalon.fusion as avalon
|
||||
from avalon.vendor import qtawesome
|
||||
from avalon.vendor.Qt import QtWidgets
|
||||
|
||||
_help = {"local": "Render the comp on your own machine and publish "
|
||||
"it from that the destination folder",
|
||||
"farm": "Submit a Fusion render job to a Render farm to use all other"
|
||||
" computers and add a publish job"}
|
||||
|
||||
|
||||
class SetRenderMode(QtWidgets.QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
QtWidgets.QWidget.__init__(self, parent)
|
||||
|
||||
self._comp = avalon.get_current_comp()
|
||||
self._comp_name = self._get_comp_name()
|
||||
|
||||
self.setWindowTitle("Set Render Mode")
|
||||
self.setFixedSize(300, 175)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout()
|
||||
|
||||
# region comp info
|
||||
comp_info_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
update_btn = QtWidgets.QPushButton(qtawesome.icon("fa.refresh",
|
||||
color="white"), "")
|
||||
update_btn.setFixedWidth(25)
|
||||
update_btn.setFixedHeight(25)
|
||||
|
||||
comp_information = QtWidgets.QLineEdit()
|
||||
comp_information.setEnabled(False)
|
||||
|
||||
comp_info_layout.addWidget(comp_information)
|
||||
comp_info_layout.addWidget(update_btn)
|
||||
# endregion comp info
|
||||
|
||||
# region modes
|
||||
mode_options = QtWidgets.QComboBox()
|
||||
mode_options.addItems(_help.keys())
|
||||
|
||||
mode_information = QtWidgets.QTextEdit()
|
||||
mode_information.setReadOnly(True)
|
||||
# endregion modes
|
||||
|
||||
accept_btn = QtWidgets.QPushButton("Accept")
|
||||
|
||||
layout.addLayout(comp_info_layout)
|
||||
layout.addWidget(mode_options)
|
||||
layout.addWidget(mode_information)
|
||||
layout.addWidget(accept_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
self.comp_information = comp_information
|
||||
self.update_btn = update_btn
|
||||
|
||||
self.mode_options = mode_options
|
||||
self.mode_information = mode_information
|
||||
|
||||
self.accept_btn = accept_btn
|
||||
|
||||
self.connections()
|
||||
self.update()
|
||||
|
||||
# Force updated render mode help text
|
||||
self._update_rendermode_info()
|
||||
|
||||
def connections(self):
|
||||
"""Build connections between code and buttons"""
|
||||
|
||||
self.update_btn.clicked.connect(self.update)
|
||||
self.accept_btn.clicked.connect(self._set_comp_rendermode)
|
||||
self.mode_options.currentIndexChanged.connect(
|
||||
self._update_rendermode_info)
|
||||
|
||||
def update(self):
|
||||
"""Update all information in the UI"""
|
||||
|
||||
self._comp = avalon.get_current_comp()
|
||||
self._comp_name = self._get_comp_name()
|
||||
self.comp_information.setText(self._comp_name)
|
||||
|
||||
# Update current comp settings
|
||||
mode = self._get_comp_rendermode()
|
||||
index = self.mode_options.findText(mode)
|
||||
self.mode_options.setCurrentIndex(index)
|
||||
|
||||
def _update_rendermode_info(self):
|
||||
rendermode = self.mode_options.currentText()
|
||||
self.mode_information.setText(_help[rendermode])
|
||||
|
||||
def _get_comp_name(self):
|
||||
return self._comp.GetAttrs("COMPS_Name")
|
||||
|
||||
def _get_comp_rendermode(self):
|
||||
return self._comp.GetData("pype.rendermode") or "local"
|
||||
|
||||
def _set_comp_rendermode(self):
|
||||
rendermode = self.mode_options.currentText()
|
||||
self._comp.SetData("pype.rendermode", rendermode)
|
||||
|
||||
self._comp.Print("Updated render mode to '%s'\n" % rendermode)
|
||||
self.hide()
|
||||
|
||||
def _validation(self):
|
||||
ui_mode = self.mode_options.currentText()
|
||||
comp_mode = self._get_comp_rendermode()
|
||||
|
||||
return comp_mode == ui_mode
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from avalon import fusion
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all selected backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(True, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from avalon import fusion
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(False, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from avalon import fusion
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all selected loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Loaders to 32bit'):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
15
pype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py
Normal file
15
pype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from avalon import fusion
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
def main():
|
||||
"""Set all loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Loaders to 32bit'):
|
||||
tools = comp.GetToolList(False, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
27
pype/hosts/fusion/utility_scripts/Pype_menu.py
Normal file
27
pype/hosts/fusion/utility_scripts/Pype_menu.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
import pype
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def main(env):
|
||||
from pype.hosts.fusion import menu
|
||||
import avalon.fusion
|
||||
# Registers pype's Global pyblish plugins
|
||||
pype.install()
|
||||
|
||||
# activate resolve from pype
|
||||
avalon.api.install(avalon.fusion)
|
||||
|
||||
log.info(f"Avalon registred hosts: {avalon.api.registered_host()}")
|
||||
|
||||
menu.launch_pype_menu()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = main(os.environ)
|
||||
sys.exit(not bool(result))
|
||||
200
pype/hosts/fusion/utility_scripts/switch_ui.py
Normal file
200
pype/hosts/fusion/utility_scripts/switch_ui.py
Normal file
|
|
@ -0,0 +1,200 @@
|
|||
import glob
|
||||
import logging
|
||||
import os
|
||||
|
||||
import avalon.api as api
|
||||
import avalon.fusion
|
||||
import avalon.io as io
|
||||
import avalon.pipeline as pipeline
|
||||
import avalon.style as style
|
||||
from avalon.vendor import qtawesome as qta
|
||||
from avalon.vendor.Qt import QtWidgets, QtCore
|
||||
|
||||
log = logging.getLogger("Fusion Switch Shot")
|
||||
|
||||
|
||||
class App(QtWidgets.QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
|
||||
################################################
|
||||
# |---------------------| |------------------| #
|
||||
# |Comp | |Asset | #
|
||||
# |[..][ v]| |[ v]| #
|
||||
# |---------------------| |------------------| #
|
||||
# | Update existing comp [ ] | #
|
||||
# |------------------------------------------| #
|
||||
# | Switch | #
|
||||
# |------------------------------------------| #
|
||||
################################################
|
||||
|
||||
QtWidgets.QWidget.__init__(self, parent)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout()
|
||||
|
||||
# Comp related input
|
||||
comp_hlayout = QtWidgets.QHBoxLayout()
|
||||
comp_label = QtWidgets.QLabel("Comp file")
|
||||
comp_label.setFixedWidth(50)
|
||||
comp_box = QtWidgets.QComboBox()
|
||||
|
||||
button_icon = qta.icon("fa.folder", color="white")
|
||||
open_from_dir = QtWidgets.QPushButton()
|
||||
open_from_dir.setIcon(button_icon)
|
||||
|
||||
comp_box.setFixedHeight(25)
|
||||
open_from_dir.setFixedWidth(25)
|
||||
open_from_dir.setFixedHeight(25)
|
||||
|
||||
comp_hlayout.addWidget(comp_label)
|
||||
comp_hlayout.addWidget(comp_box)
|
||||
comp_hlayout.addWidget(open_from_dir)
|
||||
|
||||
# Asset related input
|
||||
asset_hlayout = QtWidgets.QHBoxLayout()
|
||||
asset_label = QtWidgets.QLabel("Shot")
|
||||
asset_label.setFixedWidth(50)
|
||||
|
||||
asset_box = QtWidgets.QComboBox()
|
||||
asset_box.setLineEdit(QtWidgets.QLineEdit())
|
||||
asset_box.setFixedHeight(25)
|
||||
|
||||
refresh_icon = qta.icon("fa.refresh", color="white")
|
||||
refresh_btn = QtWidgets.QPushButton()
|
||||
refresh_btn.setIcon(refresh_icon)
|
||||
|
||||
asset_box.setFixedHeight(25)
|
||||
refresh_btn.setFixedWidth(25)
|
||||
refresh_btn.setFixedHeight(25)
|
||||
|
||||
asset_hlayout.addWidget(asset_label)
|
||||
asset_hlayout.addWidget(asset_box)
|
||||
asset_hlayout.addWidget(refresh_btn)
|
||||
|
||||
# Options
|
||||
options = QtWidgets.QHBoxLayout()
|
||||
options.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
current_comp_check = QtWidgets.QCheckBox()
|
||||
current_comp_check.setChecked(True)
|
||||
current_comp_label = QtWidgets.QLabel("Use current comp")
|
||||
|
||||
options.addWidget(current_comp_label)
|
||||
options.addWidget(current_comp_check)
|
||||
|
||||
accept_btn = QtWidgets.QPushButton("Switch")
|
||||
|
||||
layout.addLayout(options)
|
||||
layout.addLayout(comp_hlayout)
|
||||
layout.addLayout(asset_hlayout)
|
||||
layout.addWidget(accept_btn)
|
||||
|
||||
self._open_from_dir = open_from_dir
|
||||
self._comps = comp_box
|
||||
self._assets = asset_box
|
||||
self._use_current = current_comp_check
|
||||
self._accept_btn = accept_btn
|
||||
self._refresh_btn = refresh_btn
|
||||
|
||||
self.setWindowTitle("Fusion Switch Shot")
|
||||
self.setLayout(layout)
|
||||
|
||||
self.resize(260, 140)
|
||||
self.setMinimumWidth(260)
|
||||
self.setFixedHeight(140)
|
||||
|
||||
self.connections()
|
||||
|
||||
# Update ui to correct state
|
||||
self._on_use_current_comp()
|
||||
self._refresh()
|
||||
|
||||
def connections(self):
|
||||
self._use_current.clicked.connect(self._on_use_current_comp)
|
||||
self._open_from_dir.clicked.connect(self._on_open_from_dir)
|
||||
self._refresh_btn.clicked.connect(self._refresh)
|
||||
self._accept_btn.clicked.connect(self._on_switch)
|
||||
|
||||
def _on_use_current_comp(self):
|
||||
state = self._use_current.isChecked()
|
||||
self._open_from_dir.setEnabled(not state)
|
||||
self._comps.setEnabled(not state)
|
||||
|
||||
def _on_open_from_dir(self):
|
||||
|
||||
start_dir = self._get_context_directory()
|
||||
comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
|
||||
self, "Choose comp", start_dir)
|
||||
|
||||
if not comp_file:
|
||||
return
|
||||
|
||||
# Create completer
|
||||
self.populate_comp_box([comp_file])
|
||||
self._refresh()
|
||||
|
||||
def _refresh(self):
|
||||
# Clear any existing items
|
||||
self._assets.clear()
|
||||
|
||||
asset_names = [a["name"] for a in self.collect_assets()]
|
||||
completer = QtWidgets.QCompleter(asset_names)
|
||||
|
||||
self._assets.setCompleter(completer)
|
||||
self._assets.addItems(asset_names)
|
||||
|
||||
def _on_switch(self):
|
||||
|
||||
if not self._use_current.isChecked():
|
||||
file_name = self._comps.itemData(self._comps.currentIndex())
|
||||
else:
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
file_name = comp.GetAttrs("COMPS_FileName")
|
||||
|
||||
asset = self._assets.currentText()
|
||||
|
||||
import colorbleed.scripts.fusion_switch_shot as switch_shot
|
||||
switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
|
||||
|
||||
def _get_context_directory(self):
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": api.Session["AVALON_PROJECT"]},
|
||||
projection={"config": True})
|
||||
|
||||
template = project["config"]["template"]["work"]
|
||||
dir = pipeline._format_work_template(template, api.Session)
|
||||
|
||||
return dir
|
||||
|
||||
def collect_slap_comps(self, directory):
|
||||
items = glob.glob("{}/*.comp".format(directory))
|
||||
return items
|
||||
|
||||
def collect_assets(self):
|
||||
return list(io.find({"type": "asset", "silo": "film"}))
|
||||
|
||||
def populate_comp_box(self, files):
|
||||
"""Ensure we display the filename only but the path is stored as well
|
||||
|
||||
Args:
|
||||
files (list): list of full file path [path/to/item/item.ext,]
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
for f in files:
|
||||
filename = os.path.basename(f)
|
||||
self._comps.addItem(filename, userData=f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
api.install(avalon.fusion)
|
||||
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
window = App()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
sys.exit(app.exec_())
|
||||
37
pype/hosts/fusion/utility_scripts/update_loader_ranges.py
Normal file
37
pype/hosts/fusion/utility_scripts/update_loader_ranges.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"""Forces Fusion to 'retrigger' the Loader to update.
|
||||
|
||||
Warning:
|
||||
This might change settings like 'Reverse', 'Loop', trims and other
|
||||
settings of the Loader. So use this at your own risk.
|
||||
|
||||
"""
|
||||
from avalon import fusion
|
||||
|
||||
|
||||
def update_loader_ranges():
|
||||
comp = fusion.get_current_comp()
|
||||
with fusion.comp_lock_and_undo_chunk(comp, "Reload clip time ranges"):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
|
||||
# Get tool attributes
|
||||
tool_a = tool.GetAttrs()
|
||||
clipTable = tool_a['TOOLST_Clip_Name']
|
||||
altclipTable = tool_a['TOOLST_AltClip_Name']
|
||||
startTime = tool_a['TOOLNT_Clip_Start']
|
||||
old_global_in = tool.GlobalIn[comp.CurrentTime]
|
||||
|
||||
# Reapply
|
||||
for index, _ in clipTable.items():
|
||||
time = startTime[index]
|
||||
tool.Clip[time] = tool.Clip[time]
|
||||
|
||||
for index, _ in altclipTable.items():
|
||||
time = startTime[index]
|
||||
tool.ProxyFilename[time] = tool.ProxyFilename[time]
|
||||
|
||||
tool.GlobalIn[comp.CurrentTime] = old_global_in
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_loader_ranges()
|
||||
85
pype/hosts/fusion/utils.py
Normal file
85
pype/hosts/fusion/utils.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
#! python3
|
||||
|
||||
"""
|
||||
Fusion tools for setting environment
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "fusion")
|
||||
|
||||
|
||||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Fusion/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.normpath(
|
||||
os.path.join(us_dir, s))
|
||||
log.info(f"Removing `{path}`...")
|
||||
|
||||
# remove file or directory if not in our folders
|
||||
if not os.path.isdir(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.normpath(os.path.join(d, s))
|
||||
dst = os.path.normpath(os.path.join(us_dir, s))
|
||||
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
|
||||
# copy file or directory from our folders to fusion's folder
|
||||
if not os.path.isdir(src):
|
||||
shutil.copy2(src, dst)
|
||||
else:
|
||||
shutil.copytree(src, dst)
|
||||
|
||||
|
||||
def setup(env=None):
|
||||
""" Wrapper installer started from pype.hooks.fusion.FusionPrelaunch()
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Fusion Pype wrapper has been installed")
|
||||
|
|
@ -1,20 +1,19 @@
|
|||
import os
|
||||
import sys
|
||||
from uuid import uuid4
|
||||
|
||||
from avalon import api, io, harmony
|
||||
from avalon.vendor import Qt
|
||||
import avalon.tools.sceneinventory
|
||||
import pyblish.api
|
||||
from avalon import api, io, harmony
|
||||
from avalon.vendor import Qt
|
||||
|
||||
from pype import lib
|
||||
from pype.api import config
|
||||
|
||||
|
||||
signature = str(uuid4())
|
||||
|
||||
|
||||
def set_scene_settings(settings):
|
||||
func = """function %s_func(args)
|
||||
|
||||
signature = harmony.signature("set_scene_settings")
|
||||
func = """function %s(args)
|
||||
{
|
||||
if (args[0]["fps"])
|
||||
{
|
||||
|
|
@ -41,7 +40,7 @@ def set_scene_settings(settings):
|
|||
)
|
||||
}
|
||||
}
|
||||
%s_func
|
||||
%s
|
||||
""" % (signature, signature)
|
||||
harmony.send({"function": func, "args": [settings]})
|
||||
|
||||
|
|
@ -62,9 +61,12 @@ def get_asset_settings():
|
|||
"resolutionHeight": resolution_height
|
||||
}
|
||||
|
||||
harmony_config = config.get_presets()["harmony"]["general"]
|
||||
try:
|
||||
skip_resolution_check = \
|
||||
config.get_presets()["harmony"]["general"]["skip_resolution_check"]
|
||||
except KeyError:
|
||||
skip_resolution_check = []
|
||||
|
||||
skip_resolution_check = harmony_config.get(["skip_resolution_check"], [])
|
||||
if os.getenv('AVALON_TASK') in skip_resolution_check:
|
||||
scene_data.pop("resolutionWidth")
|
||||
scene_data.pop("resolutionHeight")
|
||||
|
|
@ -84,13 +86,15 @@ def ensure_scene_settings():
|
|||
valid_settings[key] = value
|
||||
|
||||
# Warn about missing attributes.
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "Missing attributes:"
|
||||
if invalid_settings:
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication.instance()
|
||||
if not app:
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "Missing attributes:"
|
||||
for item in invalid_settings:
|
||||
msg += f"\n{item}"
|
||||
message_box.setText(msg)
|
||||
|
|
@ -121,15 +125,17 @@ def check_inventory():
|
|||
outdated_containers.append(container)
|
||||
|
||||
# Colour nodes.
|
||||
func = """function %s_func(args){
|
||||
sig = harmony.signature("set_color")
|
||||
func = """function %s(args){
|
||||
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
node.setColor(args[0][i], red_color);
|
||||
}
|
||||
}
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
outdated_nodes = []
|
||||
for container in outdated_containers:
|
||||
if container["loader"] == "ImageSequenceLoader":
|
||||
|
|
@ -158,7 +164,9 @@ def application_launch():
|
|||
|
||||
|
||||
def export_template(backdrops, nodes, filepath):
|
||||
func = """function %s_func(args)
|
||||
|
||||
sig = harmony.signature("set_color")
|
||||
func = """function %s(args)
|
||||
{
|
||||
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
|
|
@ -193,8 +201,8 @@ def export_template(backdrops, nodes, filepath):
|
|||
Action.perform("onActionUpToParent()", "Node View");
|
||||
node.deleteNode(template_group, true, true);
|
||||
}
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
harmony.send({
|
||||
"function": func,
|
||||
"args": [
|
||||
|
|
@ -235,12 +243,14 @@ def install():
|
|||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node enabling on instance toggles."""
|
||||
func = """function %s_func(args)
|
||||
|
||||
sig = harmony.signature("enable_node")
|
||||
func = """function %s(args)
|
||||
{
|
||||
node.setEnable(args[0], args[1])
|
||||
}
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
try:
|
||||
harmony.send(
|
||||
{"function": func, "args": [instance[0], new_value]}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
import os
|
||||
from pype.api import Logger
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pype import PLUGINS_DIR
|
||||
|
||||
from pype import PLUGINS_DIR
|
||||
from pype.api import Logger
|
||||
from .events import register_hiero_events
|
||||
from .menu import (
|
||||
install as menu_install,
|
||||
_update_menu_task_label
|
||||
)
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
|
|
@ -13,13 +19,6 @@ from .workio import (
|
|||
work_root
|
||||
)
|
||||
|
||||
from .menu import (
|
||||
install as menu_install,
|
||||
_update_menu_task_label
|
||||
)
|
||||
|
||||
from .events import register_hiero_events
|
||||
|
||||
__all__ = [
|
||||
# Workfiles API
|
||||
"open_file",
|
||||
|
|
@ -31,17 +30,17 @@ __all__ = [
|
|||
]
|
||||
|
||||
# get logger
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
''' Creating all important host related variables '''
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
|
||||
# plugin root path
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "inventory")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "hiero", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "hiero", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "hiero", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "hiero", "inventory")
|
||||
|
||||
# registering particular pyblish gui but `lite` is recomended!!
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
|
|
@ -50,7 +49,7 @@ if os.getenv("PYBLISH_GUI", None):
|
|||
|
||||
def install():
|
||||
"""
|
||||
Installing Nukestudio integration for avalon
|
||||
Installing Hiero integration for avalon
|
||||
|
||||
Args:
|
||||
config (obj): avalon config module `pype` in our case, it is not
|
||||
|
|
@ -61,8 +60,8 @@ def install():
|
|||
# adding all events
|
||||
_register_events()
|
||||
|
||||
log.info("Registering NukeStudio plug-ins..")
|
||||
pyblish.register_host("nukestudio")
|
||||
log.info("Registering Hiero plug-ins..")
|
||||
pyblish.register_host("hiero")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
|
@ -87,11 +86,11 @@ def install():
|
|||
|
||||
def uninstall():
|
||||
"""
|
||||
Uninstalling Nukestudio integration for avalon
|
||||
Uninstalling Hiero integration for avalon
|
||||
|
||||
"""
|
||||
log.info("Deregistering NukeStudio plug-ins..")
|
||||
pyblish.deregister_host("nukestudio")
|
||||
log.info("Deregistering Hiero plug-ins..")
|
||||
pyblish.deregister_host("hiero")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
|
@ -102,7 +101,7 @@ def _register_events():
|
|||
Adding all callbacks.
|
||||
"""
|
||||
|
||||
# if task changed then change notext of nukestudio
|
||||
# if task changed then change notext of hiero
|
||||
avalon.on("taskChanged", _update_menu_task_label)
|
||||
log.info("Installed event callback for 'taskChanged'..")
|
||||
|
||||
|
|
@ -1,10 +1,12 @@
|
|||
import os
|
||||
|
||||
import hiero.core.events
|
||||
|
||||
from pype.api import Logger
|
||||
from .lib import sync_avalon_data_to_workfile, launch_workfiles_app
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def startupCompleted(event):
|
||||
|
|
@ -1,14 +1,16 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import avalon.api as avalon
|
||||
import hiero
|
||||
import pyblish.api
|
||||
import avalon.api as avalon
|
||||
from avalon.vendor.Qt import (QtWidgets, QtGui)
|
||||
|
||||
import pype.api as pype
|
||||
from pype.api import Logger, Anatomy
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
cached_process = None
|
||||
|
||||
|
|
@ -82,7 +84,7 @@ def sync_avalon_data_to_workfile():
|
|||
|
||||
def launch_workfiles_app(event):
|
||||
"""
|
||||
Event for launching workfiles after nukestudio start
|
||||
Event for launching workfiles after hiero start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
|
|
@ -109,9 +111,9 @@ def reload_config():
|
|||
"pypeapp",
|
||||
"{}.api".format(AVALON_CONFIG),
|
||||
"{}.templates".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.lib".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.nukestudio.tags".format(AVALON_CONFIG)
|
||||
"{}.hosts.hiero.lib".format(AVALON_CONFIG),
|
||||
"{}.hosts.hiero.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.hiero.tags".format(AVALON_CONFIG)
|
||||
):
|
||||
log.info("Reloading module: {}...".format(module))
|
||||
try:
|
||||
|
|
@ -331,7 +333,6 @@ def CreateNukeWorkfile(nodes=None,
|
|||
|
||||
'''
|
||||
import hiero.core
|
||||
from avalon.nuke import imprint
|
||||
from pype.hosts.nuke import (
|
||||
lib as nklib
|
||||
)
|
||||
|
|
@ -1,18 +1,18 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import hiero.core
|
||||
from pype.api import Logger
|
||||
from avalon.api import Session
|
||||
from hiero.ui import findMenuAction
|
||||
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
from pype.api import Logger
|
||||
from .lib import (
|
||||
reload_config,
|
||||
set_workfiles
|
||||
)
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._change_context_menu = None
|
||||
|
|
@ -38,7 +38,7 @@ def _update_menu_task_label(*args):
|
|||
|
||||
def install():
|
||||
"""
|
||||
Installing menu into Nukestudio
|
||||
Installing menu into Hiero
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -1,12 +1,14 @@
|
|||
import re
|
||||
import os
|
||||
import json
|
||||
import hiero
|
||||
import os
|
||||
import re
|
||||
from pprint import pformat
|
||||
|
||||
from pype.api import Logger
|
||||
import hiero
|
||||
from avalon import io
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def tag_data():
|
||||
|
|
@ -65,21 +67,23 @@ def add_tags_from_presets():
|
|||
|
||||
log.debug("Setting default tags on project: {}".format(project.name()))
|
||||
|
||||
# get nukestudio tags.json
|
||||
# get hiero tags.json
|
||||
nks_pres_tags = tag_data()
|
||||
|
||||
# Get project task types.
|
||||
tasks = io.find_one({"type": "project"})["config"]["tasks"]
|
||||
nks_pres_tags["[Tasks]"] = {}
|
||||
for task in tasks:
|
||||
nks_pres_tags["[Tasks]"][task["name"]] = {
|
||||
log.debug("__ tasks: {}".format(pformat(tasks)))
|
||||
for task_type in tasks.keys():
|
||||
nks_pres_tags["[Tasks]"][task_type.lower()] = {
|
||||
"editable": "1",
|
||||
"note": "",
|
||||
"icon": {
|
||||
"path": "icons:TagGood.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "task"
|
||||
"family": "task",
|
||||
"type": task_type
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,14 +1,15 @@
|
|||
import os
|
||||
|
||||
import hiero
|
||||
from avalon import api
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
log = Logger().get_logger(__name__, "hiero")
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return api.HOST_WORKFILE_EXTENSIONS["nukestudio"]
|
||||
return api.HOST_WORKFILE_EXTENSIONS["hiero"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
@ -1,11 +1,12 @@
|
|||
import sys
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from avalon.vendor.Qt import QtWidgets, QtGui
|
||||
from avalon.maya import pipeline
|
||||
from ...lib import BuildWorkfile
|
||||
import maya.cmds as cmds
|
||||
from avalon.maya import pipeline
|
||||
from avalon.vendor.Qt import QtWidgets, QtGui
|
||||
|
||||
from ...lib import BuildWorkfile
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._menu = os.environ['PYPE_STUDIO_NAME']
|
||||
|
|
@ -32,8 +33,19 @@ def deferred():
|
|||
command=lambda *args: BuildWorkfile().process()
|
||||
)
|
||||
|
||||
def add_look_assigner_item():
|
||||
import mayalookassigner
|
||||
cmds.menuItem(
|
||||
"Look assigner",
|
||||
parent=pipeline._menu,
|
||||
command=lambda *args: mayalookassigner.show()
|
||||
)
|
||||
|
||||
log.info("Attempting to install scripts menu..")
|
||||
|
||||
add_build_workfiles_item()
|
||||
add_look_assigner_item()
|
||||
|
||||
try:
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
import scriptsmenu.scriptsmenu as scriptsmenu
|
||||
|
|
@ -42,7 +54,6 @@ def deferred():
|
|||
"Skipping studio.menu install, because "
|
||||
"'scriptsmenu' module seems unavailable."
|
||||
)
|
||||
add_build_workfiles_item()
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from avalon import api, io
|
||||
from pype.modules.clockify.clockify_api import ClockifyAPI
|
||||
|
||||
from pype.api import Logger
|
||||
from pype.modules.clockify.clockify_api import ClockifyAPI
|
||||
|
||||
log = Logger().get_logger(__name__, "clockify_sync")
|
||||
|
||||
|
||||
|
|
@ -30,7 +32,7 @@ class ClockifySync(api.Action):
|
|||
|
||||
projects_info = {}
|
||||
for project in projects_to_sync:
|
||||
task_types = [task['name'] for task in project['config']['tasks']]
|
||||
task_types = project['config']['tasks'].keys()
|
||||
projects_info[project['name']] = task_types
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
|
|
|
|||
|
|
@ -1,31 +1,30 @@
|
|||
import os
|
||||
import re
|
||||
import queue
|
||||
import json
|
||||
import collections
|
||||
import copy
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
import json
|
||||
import os
|
||||
import queue
|
||||
import re
|
||||
|
||||
import avalon
|
||||
import avalon.api
|
||||
from avalon.vendor import toml
|
||||
from pype.api import Logger, Anatomy
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
from pymongo import UpdateOne
|
||||
import ftrack_api
|
||||
from avalon.api import AvalonMongoDB
|
||||
from avalon.vendor import toml
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from pymongo import UpdateOne
|
||||
|
||||
from pype.api import Logger, Anatomy
|
||||
from pype.api import config
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
# Current schemas for avalon types
|
||||
EntitySchemas = {
|
||||
"project": "avalon-core:project-2.0",
|
||||
"project": "avalon-core:project-2.1",
|
||||
"asset": "avalon-core:asset-3.0",
|
||||
"config": "avalon-core:config-1.0"
|
||||
"config": "avalon-core:config-1.1"
|
||||
}
|
||||
|
||||
# Group name of custom attributes
|
||||
|
|
@ -50,7 +49,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
|||
if in_schema:
|
||||
schema_name = in_schema
|
||||
elif entity_type == "project":
|
||||
schema_name = "project-2.0"
|
||||
schema_name = "project-2.1"
|
||||
elif entity_type == "task":
|
||||
schema_name = "task"
|
||||
|
||||
|
|
@ -103,6 +102,14 @@ def get_pype_attr(session, split_hierarchical=True):
|
|||
|
||||
|
||||
def from_dict_to_set(data):
|
||||
"""
|
||||
Converts 'data' into $set part of MongoDB update command.
|
||||
Args:
|
||||
data: (dictionary) - up-to-date data from Ftrack
|
||||
|
||||
Returns:
|
||||
(dictionary) - { "$set" : "{..}"}
|
||||
"""
|
||||
result = {"$set": {}}
|
||||
dict_queue = queue.Queue()
|
||||
dict_queue.put((None, data))
|
||||
|
|
@ -114,7 +121,8 @@ def from_dict_to_set(data):
|
|||
if _key is not None:
|
||||
new_key = "{}.{}".format(_key, key)
|
||||
|
||||
if not isinstance(value, dict):
|
||||
if not isinstance(value, dict) or \
|
||||
(isinstance(value, dict) and not bool(value)): # empty dic
|
||||
result["$set"][new_key] = value
|
||||
continue
|
||||
dict_queue.put((new_key, value))
|
||||
|
|
@ -123,6 +131,8 @@ def from_dict_to_set(data):
|
|||
|
||||
def get_avalon_project_template(project_name):
|
||||
"""Get avalon template
|
||||
Args:
|
||||
project_name: (string)
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
|
|
@ -135,6 +145,16 @@ def get_avalon_project_template(project_name):
|
|||
|
||||
|
||||
def get_project_apps(in_app_list):
|
||||
"""
|
||||
Returns metadata information about apps in 'in_app_list' enhanced
|
||||
from toml files.
|
||||
Args:
|
||||
in_app_list: (list) - names of applications
|
||||
|
||||
Returns:
|
||||
tuple (list, dictionary) - list of dictionaries about apps
|
||||
dictionary of warnings
|
||||
"""
|
||||
apps = []
|
||||
# TODO report
|
||||
missing_toml_msg = "Missing config file for application"
|
||||
|
|
@ -239,6 +259,28 @@ def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
|
|||
return hier_values
|
||||
|
||||
|
||||
def get_task_short_name(task_type):
|
||||
"""
|
||||
Returns short name (code) for 'task_type'. Short name stored in
|
||||
metadata dictionary in project.config per each 'task_type'.
|
||||
Could be used in anatomy, paths etc.
|
||||
If no appropriate short name is found in mapping, 'task_type' is
|
||||
returned back unchanged.
|
||||
|
||||
Currently stores data in:
|
||||
'pype-config/presets/ftrack/project_defaults.json'
|
||||
Args:
|
||||
task_type: (string) - Animation | Modeling ...
|
||||
|
||||
Returns:
|
||||
(string) - anim | model ...
|
||||
"""
|
||||
presets = config.get_presets()['ftrack']['project_defaults']\
|
||||
.get("task_short_names")
|
||||
|
||||
return presets.get(task_type, task_type)
|
||||
|
||||
|
||||
class SyncEntitiesFactory:
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
|
|
@ -378,7 +420,7 @@ class SyncEntitiesFactory:
|
|||
"custom_attributes": {},
|
||||
"hier_attrs": {},
|
||||
"avalon_attrs": {},
|
||||
"tasks": []
|
||||
"tasks": {}
|
||||
})
|
||||
|
||||
for entity in all_project_entities:
|
||||
|
|
@ -389,7 +431,9 @@ class SyncEntitiesFactory:
|
|||
continue
|
||||
|
||||
elif entity_type_low == "task":
|
||||
entities_dict[parent_id]["tasks"].append(entity["name"])
|
||||
# enrich task info with additional metadata
|
||||
task = {"type": entity["type"]["name"]}
|
||||
entities_dict[parent_id]["tasks"][entity["name"]] = task
|
||||
continue
|
||||
|
||||
entity_id = entity["id"]
|
||||
|
|
@ -416,6 +460,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities (assets stored in
|
||||
MongoDB) accessible by its '_id'
|
||||
(mongo intenal ID - example ObjectId("5f48de5830a9467b34b69798"))
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity asset}
|
||||
"""
|
||||
if self._avalon_ents_by_id is None:
|
||||
self._avalon_ents_by_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -425,6 +476,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_ftrack_id(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'ftrackId'
|
||||
(id from ftrack)
|
||||
(example '431ee3f2-e91a-11ea-bfa4-92591a5b5e3e')
|
||||
Returns:
|
||||
(dictionary) - {"(ftrackId)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_ftrack_id is None:
|
||||
self._avalon_ents_by_ftrack_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -437,6 +496,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_name(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'name'
|
||||
(example 'Hero')
|
||||
Returns:
|
||||
(dictionary) - {"(name)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_name is None:
|
||||
self._avalon_ents_by_name = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -446,6 +512,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'visualParent'
|
||||
(example ObjectId("5f48de5830a9467b34b69798"))
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity}
|
||||
"""
|
||||
if self._avalon_ents_by_parent_id is None:
|
||||
self._avalon_ents_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -458,6 +533,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_ents(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(list) of assets
|
||||
"""
|
||||
if self._avalon_archived_ents is None:
|
||||
self._avalon_archived_ents = [
|
||||
ent for ent in self.dbcon.find({"type": "archived_asset"})
|
||||
|
|
@ -466,6 +549,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_name(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_name' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset name
|
||||
"""
|
||||
if self._avalon_archived_by_name is None:
|
||||
self._avalon_archived_by_name = collections.defaultdict(list)
|
||||
for ent in self.avalon_archived_ents:
|
||||
|
|
@ -474,6 +565,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_id' for performance
|
||||
Returns:
|
||||
(dictionary) of assets accessible by asset mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_id is None:
|
||||
self._avalon_archived_by_id = {
|
||||
str(ent["_id"]): ent for ent in self.avalon_archived_ents
|
||||
|
|
@ -482,6 +581,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB per their's parent
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset parent
|
||||
mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_parent_id is None:
|
||||
self._avalon_archived_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_archived_ents:
|
||||
|
|
@ -494,6 +602,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def subsets_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of subsets from Mongo ("type": "subset")
|
||||
grouped by their parent.
|
||||
|
||||
Fills 'self._subsets_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists)
|
||||
"""
|
||||
if self._subsets_by_parent_id is None:
|
||||
self._subsets_by_parent_id = collections.defaultdict(list)
|
||||
for subset in self.dbcon.find({"type": "subset"}):
|
||||
|
|
@ -515,6 +631,11 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def all_ftrack_names(self):
|
||||
"""
|
||||
Returns lists of names of all entities in Ftrack
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
return [
|
||||
ent_dict["name"] for ent_dict in self.entities_dict.values() if (
|
||||
ent_dict.get("name")
|
||||
|
|
@ -534,8 +655,9 @@ class SyncEntitiesFactory:
|
|||
name = entity_dict["name"]
|
||||
entity_type = entity_dict["entity_type"]
|
||||
# Tasks must be checked too
|
||||
for task_name in entity_dict["tasks"]:
|
||||
passed = task_names.get(task_name)
|
||||
for task in entity_dict["tasks"].items():
|
||||
task_name, task = task
|
||||
passed = task_name
|
||||
if passed is None:
|
||||
passed = check_regex(
|
||||
task_name, "task", schema_patterns=_schema_patterns
|
||||
|
|
@ -1014,9 +1136,13 @@ class SyncEntitiesFactory:
|
|||
if not msg or not items:
|
||||
continue
|
||||
self.report_items["warning"][msg] = items
|
||||
|
||||
tasks = {}
|
||||
for tt in task_types:
|
||||
tasks[tt["name"]] = {
|
||||
"short_name": get_task_short_name(tt["name"])
|
||||
}
|
||||
self.entities_dict[id]["final_entity"]["config"] = {
|
||||
"tasks": [{"name": tt["name"]} for tt in task_types],
|
||||
"tasks": tasks,
|
||||
"apps": proj_apps
|
||||
}
|
||||
continue
|
||||
|
|
@ -1029,7 +1155,7 @@ class SyncEntitiesFactory:
|
|||
|
||||
data["parents"] = parents
|
||||
data["hierarchy"] = hierarchy
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", [])
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", {})
|
||||
self.entities_dict[id]["final_entity"]["data"] = data
|
||||
self.entities_dict[id]["final_entity"]["type"] = "asset"
|
||||
|
||||
|
|
@ -1904,10 +2030,10 @@ class SyncEntitiesFactory:
|
|||
filter = {"_id": ObjectId(mongo_id)}
|
||||
change_data = from_dict_to_set(changes)
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
|
||||
if not mongo_changes_bulk:
|
||||
# TODO LOG
|
||||
return
|
||||
log.debug("mongo_changes_bulk:: {}".format(mongo_changes_bulk))
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def reload_parents(self, hierarchy_changing_ids):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import sys
|
||||
import six
|
||||
|
||||
import pyblish.api
|
||||
import six
|
||||
from avalon import io
|
||||
|
||||
try:
|
||||
|
|
@ -143,15 +144,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task.lower() in existing_tasks:
|
||||
task_name = next(iter(task))
|
||||
task_type = task[task_name]["type"]
|
||||
if task_name.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
tasks_to_create.append((task_name, task_type))
|
||||
|
||||
for task in tasks_to_create:
|
||||
for task_name, task_type in tasks_to_create:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=task,
|
||||
name=task_name,
|
||||
task_type=task_type,
|
||||
parent=entity
|
||||
)
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -4,16 +4,16 @@ import avalon.api
|
|||
from avalon import fusion
|
||||
|
||||
|
||||
class CreateTiffSaver(avalon.api.Creator):
|
||||
class CreateOpenEXRSaver(avalon.api.Creator):
|
||||
|
||||
name = "tiffDefault"
|
||||
label = "Create Tiff Saver"
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
hosts = ["fusion"]
|
||||
family = "saver"
|
||||
family = "render"
|
||||
|
||||
def process(self):
|
||||
|
||||
file_format = "TiffFormat"
|
||||
file_format = "OpenEXRFormat"
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ class CreateTiffSaver(avalon.api.Creator):
|
|||
workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
filename = "{}..tiff".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", "preview", filename)
|
||||
filepath = os.path.join(workdir, "render", filename)
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
|
|
@ -43,4 +43,3 @@ class CreateTiffSaver(avalon.api.Creator):
|
|||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
|
||||
saver[file_format]["SaveAlpha"] = 0
|
||||
|
||||
|
|
@ -1,8 +1,11 @@
|
|||
import os
|
||||
import contextlib
|
||||
import os
|
||||
|
||||
from avalon import api
|
||||
import avalon.io as io
|
||||
from avalon import api
|
||||
from avalon import fusion
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
|
@ -113,7 +116,7 @@ def loader_shift(loader, frame, relative=True):
|
|||
class FusionLoadSequence(api.Loader):
|
||||
"""Load image sequence into Fusion"""
|
||||
|
||||
families = ["imagesequence"]
|
||||
families = ["imagesequence", "review"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Load sequence"
|
||||
|
|
@ -134,7 +137,7 @@ class FusionLoadSequence(api.Loader):
|
|||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
path = self._get_first_image(self.fname)
|
||||
path = self._get_first_image(os.path.dirname(self.fname))
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
|
|
|
|||
|
|
@ -43,8 +43,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
|
||||
start, end = get_comp_render_range(comp)
|
||||
context.data["frameStart"] = start
|
||||
context.data["frameEnd"] = end
|
||||
context.data["frameStart"] = int(start)
|
||||
context.data["frameEnd"] = int(end)
|
||||
|
||||
for tool in savers:
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
|
@ -76,8 +76,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"outputDir": os.path.dirname(path),
|
||||
"ext": ext, # todo: should be redundant
|
||||
"label": label,
|
||||
"families": ["saver"],
|
||||
"family": "saver",
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"fps": context.data["fps"],
|
||||
"families": ["render", "review", "ftrack"],
|
||||
"family": "render",
|
||||
"active": active,
|
||||
"publish": active # backwards compatibility
|
||||
})
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin):
|
|||
"""Collect current comp's render Mode
|
||||
|
||||
Options:
|
||||
renderlocal
|
||||
deadline
|
||||
local
|
||||
farm
|
||||
|
||||
Note that this value is set for each comp separately. When you save the
|
||||
comp this information will be stored in that file. If for some reason the
|
||||
|
|
@ -23,22 +23,22 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.4
|
||||
label = "Collect Render Mode"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect all image sequence tools"""
|
||||
options = ["renderlocal", "deadline"]
|
||||
options = ["local", "farm"]
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
raise RuntimeError("No comp previously collected, unable to "
|
||||
"retrieve Fusion version.")
|
||||
|
||||
rendermode = comp.GetData("pype.rendermode") or "renderlocal"
|
||||
rendermode = comp.GetData("pype.rendermode") or "local"
|
||||
assert rendermode in options, "Must be supported render mode"
|
||||
|
||||
self.log.info("Render mode: {0}".format(rendermode))
|
||||
|
||||
# Append family
|
||||
family = "saver.{0}".format(rendermode)
|
||||
family = "render.{0}".format(rendermode)
|
||||
instance.data["families"].append(family)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.deadline"]
|
||||
families = ["render.farm"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -23,7 +23,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
if any(plugin.__name__ == "FusionSubmitDeadline"
|
||||
for plugin in errored_plugins):
|
||||
raise RuntimeError("Skipping incrementing current file because "
|
||||
"submission to deadline failed.")
|
||||
"submission to render farm failed.")
|
||||
|
||||
comp = context.data.get("currentComp")
|
||||
assert comp, "Must have comp"
|
||||
|
|
|
|||
|
|
@ -1,98 +0,0 @@
|
|||
import re
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
def _get_script():
|
||||
"""Get path to the image sequence script"""
|
||||
|
||||
# todo: use a more elegant way to get the python script
|
||||
|
||||
try:
|
||||
from pype.scripts import publish_filesequence
|
||||
except Exception:
|
||||
raise RuntimeError("Expected module 'publish_imagesequence'"
|
||||
"to be available")
|
||||
|
||||
module_path = publish_filesequence.__file__
|
||||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[:-len(".pyc")] + ".py"
|
||||
|
||||
return module_path
|
||||
|
||||
|
||||
class PublishImageSequence(pyblish.api.InstancePlugin):
|
||||
"""Publish the generated local image sequences."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Publish Rendered Image Sequence(s)"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.renderlocal"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Skip this plug-in if the ExtractImageSequence failed
|
||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
||||
if any(plugin.__name__ == "FusionRenderLocal" for plugin in
|
||||
errored_plugins):
|
||||
raise RuntimeError("Fusion local render failed, "
|
||||
"publishing images skipped.")
|
||||
|
||||
subset = instance.data["subset"]
|
||||
ext = instance.data["ext"]
|
||||
|
||||
# Regex to match resulting renders
|
||||
regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
|
||||
ext=re.escape(ext))
|
||||
|
||||
# The instance has most of the information already stored
|
||||
metadata = {
|
||||
"regex": regex,
|
||||
"frameStart": instance.context.data["frameStart"],
|
||||
"frameEnd": instance.context.data["frameEnd"],
|
||||
"families": ["imagesequence"],
|
||||
}
|
||||
|
||||
# Write metadata and store the path in the instance
|
||||
output_directory = instance.data["outputDir"]
|
||||
path = os.path.join(output_directory,
|
||||
"{}_metadata.json".format(subset))
|
||||
with open(path, "w") as f:
|
||||
json.dump(metadata, f)
|
||||
|
||||
assert os.path.isfile(path), ("Stored path is not a file for %s"
|
||||
% instance.data["name"])
|
||||
|
||||
# Suppress any subprocess console
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = subprocess.SW_HIDE
|
||||
|
||||
process = subprocess.Popen(["python", _get_script(),
|
||||
"--paths", path],
|
||||
bufsize=1,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
startupinfo=startupinfo)
|
||||
|
||||
while True:
|
||||
output = process.stdout.readline()
|
||||
# Break when there is no output or a return code has been given
|
||||
if output == '' and process.poll() is not None:
|
||||
process.stdout.close()
|
||||
break
|
||||
if output:
|
||||
line = output.strip()
|
||||
if line.startswith("ERROR"):
|
||||
self.log.error(line)
|
||||
else:
|
||||
self.log.info(line)
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("Process quit with non-zero "
|
||||
"return code: {}".format(process.returncode))
|
||||
|
|
@ -1,9 +1,11 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
from pprint import pformat
|
||||
|
||||
import avalon.fusion as fusion
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class FusionRenderLocal(pyblish.api.InstancePlugin):
|
||||
class Fusionlocal(pyblish.api.InstancePlugin):
|
||||
"""Render the current Fusion composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
|
|
@ -11,15 +13,13 @@ class FusionRenderLocal(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
label = "Render Local"
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.renderlocal"]
|
||||
families = ["render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
|
|
@ -28,15 +28,40 @@ class FusionRenderLocal(pyblish.api.InstancePlugin):
|
|||
context.data[key] = True
|
||||
|
||||
current_comp = context.data["currentComp"]
|
||||
start_frame = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
end_frame = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
frame_start = current_comp.GetAttrs("COMPN_RenderStart")
|
||||
frame_end = current_comp.GetAttrs("COMPN_RenderEnd")
|
||||
path = instance.data["path"]
|
||||
output_dir = instance.data["outputDir"]
|
||||
|
||||
ext = os.path.splitext(os.path.basename(path))[-1]
|
||||
|
||||
self.log.info("Starting render")
|
||||
self.log.info("Start frame: {}".format(start_frame))
|
||||
self.log.info("End frame: {}".format(end_frame))
|
||||
self.log.info("Start frame: {}".format(frame_start))
|
||||
self.log.info("End frame: {}".format(frame_end))
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render()
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
collected_frames = os.listdir(output_dir)
|
||||
repre = {
|
||||
'name': ext[1:],
|
||||
'ext': ext[1:],
|
||||
'frameStart': "%0{}d".format(len(str(frame_end))) % frame_start,
|
||||
'files': collected_frames,
|
||||
"stagingDir": output_dir,
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
# review representation
|
||||
repre_preview = repre.copy()
|
||||
repre_preview["name"] = repre_preview["ext"] = "mp4"
|
||||
repre_preview["tags"] = ["review", "preview", "ftrackreview", "delete"]
|
||||
instance.data["representations"].append(repre_preview)
|
||||
|
||||
self.log.debug(f"_ instance.data: {pformat(instance.data)}")
|
||||
|
||||
if not result:
|
||||
raise RuntimeError("Comp render failed")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class FusionSaveComp(pyblish.api.ContextPlugin):
|
|||
label = "Save current file"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
|
||||
|
||||
class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
|
|
@ -19,10 +18,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["fusion"]
|
||||
families = ["saver.deadline"]
|
||||
families = ["render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
|||
label = "Validate Background Depth 32 bit"
|
||||
actions = [action.RepairAction]
|
||||
hosts = ["fusion"]
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Comp Saved"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
actions = [action.RepairAction]
|
||||
label = "Validate Create Folder Checked"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Filename Has Extension"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Has Input"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Saver Passthrough"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Unique Subsets"
|
||||
families = ["saver"]
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
|
||||
@classmethod
|
||||
|
|
@ -14,7 +14,7 @@ class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
subset = instance.data["subset"]
|
||||
for other_instance in context[:]:
|
||||
for other_instance in context:
|
||||
if other_instance == instance:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ Provides:
|
|||
context -> assetEntity - asset entity from database
|
||||
"""
|
||||
|
||||
from avalon import io, api
|
||||
import pyblish.api
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
||||
|
|
@ -86,3 +86,5 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
frame_end_h = frame_end + context.data["handleEnd"]
|
||||
context.data["frameStartHandle"] = frame_start_h
|
||||
context.data["frameEndHandle"] = frame_end_h
|
||||
|
||||
context.data["fps"] = data["fps"]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import getpass
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -13,7 +14,7 @@ class CollectCurrentUserPype(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
user = os.getenv("PYPE_USERNAME", "").strip()
|
||||
if not user:
|
||||
return
|
||||
user = context.data.get("user", getpass.getuser())
|
||||
|
||||
context.data["user"] = user
|
||||
self.log.debug("Pype user is \"{}\"".format(user))
|
||||
self.log.debug("Colected user \"{}\"".format(user))
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import copy
|
||||
|
||||
import pyblish
|
||||
|
||||
import pype.api
|
||||
import pyblish
|
||||
|
||||
|
||||
class ExtractBurnin(pype.api.Extractor):
|
||||
|
|
@ -23,10 +24,11 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"nuke",
|
||||
"maya",
|
||||
"shell",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"premiere",
|
||||
"standalonepublisher",
|
||||
"harmony"
|
||||
"fusion"
|
||||
]
|
||||
optional = True
|
||||
|
||||
|
|
@ -314,12 +316,15 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"comment": context.data.get("comment") or ""
|
||||
})
|
||||
|
||||
intent_label = context.data.get("intent")
|
||||
intent_label = context.data.get("intent") or ""
|
||||
if intent_label and isinstance(intent_label, dict):
|
||||
intent_label = intent_label.get("label")
|
||||
value = intent_label.get("value")
|
||||
if value:
|
||||
intent_label = intent_label["label"]
|
||||
else:
|
||||
intent_label = ""
|
||||
|
||||
if intent_label:
|
||||
burnin_data["intent"] = intent_label
|
||||
burnin_data["intent"] = intent_label
|
||||
|
||||
temp_data = {
|
||||
"frame_start": frame_start,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""Create entities in Avalon based on collected data."""
|
||||
|
|
@ -59,7 +61,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
data["inputs"] = entity_data.get("inputs", [])
|
||||
|
||||
# Tasks.
|
||||
tasks = entity_data.get("tasks", [])
|
||||
tasks = entity_data.get("tasks", {})
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data["tasks"] = tasks
|
||||
parents = []
|
||||
|
|
@ -99,11 +101,14 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if entity:
|
||||
# Do not override data, only update
|
||||
cur_entity_data = entity.get("data") or {}
|
||||
new_tasks = data.pop("tasks", [])
|
||||
if "tasks" in cur_entity_data and new_tasks:
|
||||
for task_name in new_tasks:
|
||||
if task_name not in cur_entity_data["tasks"]:
|
||||
cur_entity_data["tasks"].append(task_name)
|
||||
new_tasks = data.pop("tasks", {})
|
||||
if "tasks" not in cur_entity_data and not new_tasks:
|
||||
continue
|
||||
for task in new_tasks:
|
||||
task_name = next(iter(task))
|
||||
if task_name in cur_entity_data["tasks"].keys():
|
||||
continue
|
||||
cur_entity_data["tasks"][task_name] = task[task_name]
|
||||
cur_entity_data.update(data)
|
||||
data = cur_entity_data
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import pype.api
|
||||
import pype.lib
|
||||
|
||||
|
|
@ -9,7 +10,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
"""Create jpg thumbnail from sequence using ffmpeg"""
|
||||
|
||||
label = "Extract Jpeg EXR"
|
||||
hosts = ["shell"]
|
||||
hosts = ["shell", "fusion"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
enabled = False
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import json
|
||||
import pyblish.api
|
||||
import os
|
||||
import re
|
||||
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
||||
import pype.api
|
||||
import pype.lib
|
||||
|
||||
|
|
@ -26,10 +28,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"nuke",
|
||||
"maya",
|
||||
"shell",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"premiere",
|
||||
"harmony",
|
||||
"standalonepublisher"
|
||||
"standalonepublisher",
|
||||
"fusion"
|
||||
]
|
||||
|
||||
# Supported extensions
|
||||
|
|
|
|||
90
pype/plugins/global/publish/extract_scanline_exr.py
Normal file
90
pype/plugins/global/publish/extract_scanline_exr.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Convert exrs in representation to tiled exrs usin oiio tools."""
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import pype.api
|
||||
import pype.lib
|
||||
|
||||
|
||||
class ExtractScanlineExr(pyblish.api.InstancePlugin):
|
||||
"""Convert tiled EXRs to scanline using OIIO tool."""
|
||||
|
||||
label = "Extract Scanline EXR"
|
||||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
representations_new = []
|
||||
|
||||
for repre in representations:
|
||||
self.log.info(
|
||||
"Processing representation {}".format(repre.get("name")))
|
||||
tags = repre.get("tags", [])
|
||||
if "toScanline" not in tags:
|
||||
self.log.info(" - missing toScanline tag")
|
||||
continue
|
||||
|
||||
# run only on exrs
|
||||
if repre.get("ext") != "exr":
|
||||
self.log.info("- not EXR files")
|
||||
continue
|
||||
|
||||
if not isinstance(repre['files'], (list, tuple)):
|
||||
input_files = [repre['files']]
|
||||
self.log.info("We have a single frame")
|
||||
else:
|
||||
input_files = repre['files']
|
||||
self.log.info("We have a sequence")
|
||||
|
||||
stagingdir = os.path.normpath(repre.get("stagingDir"))
|
||||
|
||||
oiio_tool_path = os.getenv("PYPE_OIIO_PATH", "")
|
||||
|
||||
for file in input_files:
|
||||
|
||||
original_name = os.path.join(stagingdir, file)
|
||||
temp_name = os.path.join(stagingdir, "__{}".format(file))
|
||||
# move original render to temp location
|
||||
shutil.move(original_name, temp_name)
|
||||
oiio_cmd = []
|
||||
oiio_cmd.append(oiio_tool_path)
|
||||
oiio_cmd.append(
|
||||
os.path.join(stagingdir, temp_name)
|
||||
)
|
||||
oiio_cmd.append("--scanline")
|
||||
oiio_cmd.append("-o")
|
||||
oiio_cmd.append(os.path.join(stagingdir, original_name))
|
||||
|
||||
subprocess_exr = " ".join(oiio_cmd)
|
||||
self.log.info(f"running: {subprocess_exr}")
|
||||
pype.api.subprocess(subprocess_exr)
|
||||
|
||||
# raise error if there is no ouptput
|
||||
if not os.path.exists(os.path.join(stagingdir, original_name)):
|
||||
self.log.error(
|
||||
("File {} was not converted "
|
||||
"by oiio tool!").format(original_name))
|
||||
raise AssertionError("OIIO tool conversion failed")
|
||||
else:
|
||||
try:
|
||||
os.remove(temp_name)
|
||||
except OSError as e:
|
||||
self.log.warning("Unable to delete temp file")
|
||||
self.log.warning(e)
|
||||
|
||||
repre['name'] = 'exr'
|
||||
try:
|
||||
repre['tags'].remove('toScanline')
|
||||
except ValueError:
|
||||
# no `toScanline` tag present
|
||||
pass
|
||||
|
||||
instance.data["representations"] += representations_new
|
||||
|
|
@ -1,20 +1,21 @@
|
|||
import os
|
||||
from os.path import getsize
|
||||
import logging
|
||||
import sys
|
||||
import copy
|
||||
import clique
|
||||
import errno
|
||||
import six
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from os.path import getsize
|
||||
|
||||
from pymongo import DeleteOne, InsertOne
|
||||
import clique
|
||||
import pyblish.api
|
||||
import six
|
||||
from avalon import io
|
||||
from avalon.vendor import filelink
|
||||
from pymongo import DeleteOne, InsertOne
|
||||
|
||||
import pype.api
|
||||
from datetime import datetime
|
||||
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
|
|
@ -521,8 +522,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# get 'files' info for representation and all attached resources
|
||||
self.log.debug("Preparing files information ...")
|
||||
representation["files"] = self.get_files_info(
|
||||
instance,
|
||||
self.integrated_file_sizes)
|
||||
instance,
|
||||
self.integrated_file_sizes)
|
||||
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
destination_list.append(dst)
|
||||
|
|
@ -543,10 +544,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
repre_ids_to_remove.append(repre["_id"])
|
||||
io.delete_many({"_id": {"$in": repre_ids_to_remove}})
|
||||
|
||||
self.log.debug("__ representations: {}".format(representations))
|
||||
for rep in instance.data["representations"]:
|
||||
self.log.debug("__ represNAME: {}".format(rep['name']))
|
||||
self.log.debug("__ represPATH: {}".format(rep['published_path']))
|
||||
self.log.debug("__ rep: {}".format(rep))
|
||||
|
||||
io.insert_many(representations)
|
||||
instance.data["published_representations"] = (
|
||||
published_representations
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from copy import copy
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon.vendor import requests, clique
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def _get_script(path):
|
||||
|
||||
|
|
@ -174,7 +173,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"FTRACK_SERVER",
|
||||
"PYPE_METADATA_FILE",
|
||||
"AVALON_PROJECT",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"PYPE_LOG_NO_COLORS",
|
||||
"PYPE_USERNAME"
|
||||
]
|
||||
|
||||
# custom deadline atributes
|
||||
|
|
@ -193,7 +193,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"slate": ["slateFrame"],
|
||||
"review": ["lutPath"],
|
||||
"render2d": ["bakeScriptPath", "bakeRenderPath",
|
||||
"bakeWriteNodeName", "version"]
|
||||
"bakeWriteNodeName", "version"],
|
||||
"renderlayer": ["convertToScanline"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
|
|
@ -297,6 +298,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
environment["PYPE_METADATA_FILE"] = roothless_metadata_path
|
||||
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
|
||||
environment["PYPE_LOG_NO_COLORS"] = "1"
|
||||
environment["PYPE_USERNAME"] = instance.context.data["user"]
|
||||
try:
|
||||
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
|
||||
except KeyError:
|
||||
|
|
@ -491,6 +493,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"tags": ["review"] if preview else []
|
||||
}
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance_data.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
# poor man exclusion
|
||||
if ext in self.skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
|
@ -581,6 +588,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if instance.get("multipartExr", False):
|
||||
rep["tags"].append("multipartExr")
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
representations.append(rep)
|
||||
|
||||
self._solve_families(instance, preview)
|
||||
|
|
@ -727,6 +739,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"family": "prerender",
|
||||
"families": []})
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version")
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import pyblish.api
|
||||
|
||||
import pype.api
|
||||
|
||||
|
||||
|
|
@ -46,7 +47,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
|
|||
"houdini",
|
||||
"maya",
|
||||
"nuke",
|
||||
"nukestudio",
|
||||
"hiero",
|
||||
"photoshop",
|
||||
"premiere",
|
||||
"resolve",
|
||||
|
|
|
|||
|
|
@ -13,13 +13,14 @@ class CreateRender(harmony.Creator):
|
|||
super(CreateRender, self).__init__(*args, **kwargs)
|
||||
|
||||
def setup_node(self, node):
|
||||
func = """function func(args)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
node.setTextAttr(args[0], "DRAWING_TYPE", 1, "PNG4");
|
||||
node.setTextAttr(args[0], "DRAWING_NAME", 1, args[1]);
|
||||
node.setTextAttr(args[0], "MOVIE_PATH", 1, args[1]);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
path = "{0}/{0}".format(node.split("/")[-1])
|
||||
harmony.send({"function": func, "args": [node, path]})
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from avalon import api, harmony
|
||||
|
||||
|
||||
sig = harmony.signature()
|
||||
func = """
|
||||
function getUniqueColumnName( column_prefix )
|
||||
{
|
||||
|
|
@ -18,14 +18,14 @@ function getUniqueColumnName( column_prefix )
|
|||
return column_name;
|
||||
}
|
||||
|
||||
function func(args)
|
||||
function %s(args)
|
||||
{
|
||||
var uniqueColumnName = getUniqueColumnName(args[0]);
|
||||
column.add(uniqueColumnName , "SOUND");
|
||||
column.importSound(uniqueColumnName, 1, args[1]);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
|
||||
|
||||
class ImportAudioLoader(api.Loader):
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
import json
|
||||
import os
|
||||
import uuid
|
||||
|
||||
import clique
|
||||
|
||||
from avalon import api, harmony
|
||||
|
||||
import pype.lib
|
||||
import json
|
||||
|
||||
copy_files = """function copyFile(srcFilename, dstFilename)
|
||||
{
|
||||
|
|
@ -256,7 +254,9 @@ class BackgroundLoader(api.Loader):
|
|||
container_nodes = []
|
||||
|
||||
for layer in sorted(layers):
|
||||
file_to_import = [os.path.join(bg_folder, layer).replace("\\", "/")]
|
||||
file_to_import = [
|
||||
os.path.join(bg_folder, layer).replace("\\", "/")
|
||||
]
|
||||
|
||||
read_node = harmony.send(
|
||||
{
|
||||
|
|
@ -301,8 +301,10 @@ class BackgroundLoader(api.Loader):
|
|||
print(container)
|
||||
|
||||
for layer in sorted(layers):
|
||||
file_to_import = [os.path.join(bg_folder, layer).replace("\\", "/")]
|
||||
print(20*"#")
|
||||
file_to_import = [
|
||||
os.path.join(bg_folder, layer).replace("\\", "/")
|
||||
]
|
||||
print(20 * "#")
|
||||
print(f"FILE TO REPLACE: {file_to_import}")
|
||||
print(f"LAYER: {layer}")
|
||||
node = harmony.find_node_by_name(layer, "READ")
|
||||
|
|
@ -324,9 +326,9 @@ class BackgroundLoader(api.Loader):
|
|||
)["result"]
|
||||
container['nodes'].append(read_node)
|
||||
|
||||
|
||||
# Colour node.
|
||||
func = """function func(args){
|
||||
sig = harmony.signature("set_color")
|
||||
func = """function %s(args){
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
|
|
@ -339,8 +341,8 @@ class BackgroundLoader(api.Loader):
|
|||
}
|
||||
}
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
if pype.lib.is_latest(representation):
|
||||
harmony.send({"function": func, "args": [node, "green"]})
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,8 +2,8 @@ import os
|
|||
import uuid
|
||||
|
||||
import clique
|
||||
|
||||
from avalon import api, harmony
|
||||
|
||||
import pype.lib
|
||||
|
||||
copy_files = """function copyFile(srcFilename, dstFilename)
|
||||
|
|
@ -301,7 +301,8 @@ class ImageSequenceLoader(api.Loader):
|
|||
)
|
||||
|
||||
# Colour node.
|
||||
func = """function func(args){
|
||||
sig = harmony.signature("copyFile")
|
||||
func = """function %s(args){
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
|
|
@ -314,8 +315,8 @@ class ImageSequenceLoader(api.Loader):
|
|||
}
|
||||
}
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
if pype.lib.is_latest(representation):
|
||||
harmony.send({"function": func, "args": [node, "green"]})
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import tempfile
|
||||
import zipfile
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
from avalon import api, harmony
|
||||
|
||||
|
|
@ -21,15 +21,16 @@ class ImportTemplateLoader(api.Loader):
|
|||
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
||||
zip_ref.extractall(template_path)
|
||||
|
||||
func = """function func(args)
|
||||
sig = harmony.signature("paste")
|
||||
func = """function %s(args)
|
||||
{
|
||||
var template_path = args[0];
|
||||
var drag_object = copyPaste.pasteTemplateIntoGroup(
|
||||
template_path, "Top", 1
|
||||
);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
|
||||
harmony.send({"function": func, "args": [template_path]})
|
||||
|
||||
|
|
|
|||
|
|
@ -13,15 +13,16 @@ class CollectCurrentFile(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
func = """function func()
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return (
|
||||
scene.currentProjectPath() + "/" +
|
||||
scene.currentVersionName() + ".xstage"
|
||||
);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
|
||||
current_file = harmony.send({"function": func})["result"]
|
||||
context.data["currentFile"] = os.path.normpath(current_file)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
import json
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
|
|
@ -13,7 +13,8 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
func = """function func()
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
|
||||
|
|
@ -26,8 +27,8 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
|
||||
return palettes;
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
palettes = harmony.send({"function": func})["result"]
|
||||
|
||||
for name, id in palettes.items():
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
|
||||
from avalon import harmony
|
||||
|
||||
import pype.api
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
|
@ -13,14 +14,15 @@ class ExtractPalette(pype.api.Extractor):
|
|||
families = ["harmony.palette"]
|
||||
|
||||
def process(self, instance):
|
||||
func = """function func(args)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
var palette = palette_list.getPaletteById(args[0]);
|
||||
return (palette.getPath() + "/" + palette.getName() + ".plt");
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
palette_file = harmony.send(
|
||||
{"function": func, "args": [instance.data["id"]]}
|
||||
)["result"]
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import os
|
||||
import tempfile
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
import pype.lib
|
||||
import tempfile
|
||||
|
||||
import clique
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
|
||||
import pype.lib
|
||||
|
||||
|
||||
class ExtractRender(pyblish.api.InstancePlugin):
|
||||
|
|
@ -21,7 +21,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
# Collect scene data.
|
||||
func = """function func(write_node)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(write_node)
|
||||
{
|
||||
return [
|
||||
about.getApplicationPath(),
|
||||
|
|
@ -33,8 +34,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
sound.getSoundtrackAll().path()
|
||||
]
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
result = harmony.send(
|
||||
{"function": func, "args": [instance[0]]}
|
||||
)["result"]
|
||||
|
|
@ -44,18 +45,18 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
frame_start = result[4]
|
||||
frame_end = result[5]
|
||||
audio_path = result[6]
|
||||
if audio_path:
|
||||
instance.data["audio"] = [{"filename": audio_path}]
|
||||
|
||||
instance.data["fps"] = frame_rate
|
||||
|
||||
# Set output path to temp folder.
|
||||
path = tempfile.mkdtemp()
|
||||
func = """function func(args)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
node.setTextAttr(args[0], "DRAWING_NAME", 1, args[1]);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
result = harmony.send(
|
||||
{
|
||||
"function": func,
|
||||
|
|
@ -89,7 +90,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
if len(collections) > 1:
|
||||
for col in collections:
|
||||
if len(list(col)) > 1:
|
||||
collection = col
|
||||
collection = col
|
||||
else:
|
||||
collection = collections[0]
|
||||
|
||||
|
|
@ -137,6 +138,9 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
}
|
||||
instance.data["representations"] = [representation, thumbnail]
|
||||
|
||||
if audio_path and os.path.exists(audio_path):
|
||||
instance.data["audio"] = [{"filename": audio_path}]
|
||||
|
||||
# Required for extract_review plugin (L222 onwards).
|
||||
instance.data["frameStart"] = frame_start
|
||||
instance.data["frameEnd"] = frame_end
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
from avalon import harmony
|
||||
|
||||
import pype.api
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
|
|
@ -30,7 +31,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
unique_backdrops = [backdrops[x] for x in set(backdrops.keys())]
|
||||
|
||||
# Get non-connected nodes within backdrops.
|
||||
all_nodes = avalon.harmony.send(
|
||||
all_nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
for node in [x for x in all_nodes if x not in dependencies]:
|
||||
|
|
@ -66,7 +67,8 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
instance.data["representations"] = [representation]
|
||||
|
||||
def get_backdrops(self, node):
|
||||
func = """function func(probe_node)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(probe_node)
|
||||
{
|
||||
var backdrops = Backdrop.backdrops("Top");
|
||||
var valid_backdrops = [];
|
||||
|
|
@ -92,14 +94,15 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
}
|
||||
return valid_backdrops;
|
||||
}
|
||||
func
|
||||
"""
|
||||
return avalon.harmony.send(
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
return harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)["result"]
|
||||
|
||||
def get_dependencies(self, node, dependencies):
|
||||
func = """function func(args)
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
var target_node = args[0];
|
||||
var numInput = node.numberOfInputPorts(target_node);
|
||||
|
|
@ -110,10 +113,10 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
}
|
||||
return dependencies;
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
|
||||
current_dependencies = avalon.harmony.send(
|
||||
current_dependencies = harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)["result"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
from avalon import harmony
|
||||
|
||||
import pype.api
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
|
|
@ -15,10 +16,10 @@ class ExtractWorkfile(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
# Export template.
|
||||
backdrops = avalon.harmony.send(
|
||||
backdrops = harmony.send(
|
||||
{"function": "Backdrop.backdrops", "args": ["Top"]}
|
||||
)["result"]
|
||||
nodes = avalon.harmony.send(
|
||||
nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
class ValidateAudio(pyblish.api.InstancePlugin):
|
||||
"""Ensures that there is an audio file in the scene. If you are sure that you want to send render without audio, you can disable this validator before clicking on "publish" """
|
||||
"""Ensures that there is an audio file in the scene.
|
||||
|
||||
If you are sure that you want to send render without audio, you can
|
||||
disable this validator before clicking on "publish"
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Audio"
|
||||
|
|
@ -26,7 +28,7 @@ class ValidateAudio(pyblish.api.InstancePlugin):
|
|||
}
|
||||
func
|
||||
"""
|
||||
result = avalon.harmony.send(
|
||||
result = harmony.send(
|
||||
{"function": func, "args": [instance[0]]}
|
||||
)["result"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import json
|
||||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
|
|
@ -46,7 +46,8 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
for string in self.frame_check_filter):
|
||||
expected_settings.pop("frameEnd")
|
||||
|
||||
func = """function func()
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return {
|
||||
"fps": scene.getFrameRate(),
|
||||
|
|
@ -56,9 +57,9 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
"resolutionHeight": scene.defaultResolutionY()
|
||||
};
|
||||
}
|
||||
func
|
||||
"""
|
||||
current_settings = avalon.harmony.send({"function": func})["result"]
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
current_settings = harmony.send({"function": func})["result"]
|
||||
|
||||
invalid_settings = []
|
||||
for key, value in expected_settings.items():
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CollectClipMetadata(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Metadata"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
item = instance.data["item"]
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import pyblish.api
|
||||
import opentimelineio.opentime as otio_ot
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectClipTimecodes(pyblish.api.InstancePlugin):
|
||||
|
|
@ -10,7 +10,7 @@ class CollectClipTimecodes(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Timecodes"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
import pyblish.api
|
||||
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
class CollectWorkfileVersion(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file version into context"""
|
||||
|
||||
|
|
@ -1,14 +1,15 @@
|
|||
from pyblish import api
|
||||
import os
|
||||
import time
|
||||
|
||||
from pyblish import api
|
||||
|
||||
|
||||
class ExtractPlateCheck(api.ContextPlugin):
|
||||
"""Collect all Track items selection."""
|
||||
|
||||
order = api.ExtractorOrder + 0.01
|
||||
label = "Plates Export Waiting"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["encode"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -6,7 +6,7 @@ class ExtractTasks(api.InstancePlugin):
|
|||
|
||||
order = api.ExtractorOrder
|
||||
label = "Tasks"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ class ValidateProjectRoot(api.ContextPlugin):
|
|||
|
||||
order = api.ValidatorOrder
|
||||
label = "Project Root"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
actions = [RepairProjectRoot]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -5,7 +5,7 @@ class ValidateResolvedPaths(api.ContextPlugin):
|
|||
|
||||
order = api.ValidatorOrder
|
||||
label = "Resolved Paths"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
import os
|
||||
|
|
@ -13,7 +13,7 @@ class ValidateOutputRange(api.InstancePlugin):
|
|||
order = api.ValidatorOrder
|
||||
families = ["trackItem.task"]
|
||||
label = "Output Range"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -43,7 +43,7 @@ class ValidateImageSequence(api.InstancePlugin):
|
|||
families = ["trackItem.task", "img"]
|
||||
match = api.Subset
|
||||
label = "Image Sequence"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -10,7 +10,7 @@ class ValidateClip(api.InstancePlugin):
|
|||
families = ["clip"]
|
||||
# match = api.Exact
|
||||
label = "Validate Track Item"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
18
pype/plugins/hiero/_unused/validate_viewer_lut.py
Normal file
18
pype/plugins/hiero/_unused/validate_viewer_lut.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from pyblish import api
|
||||
|
||||
|
||||
class ValidateViewerLut(api.ContextPlugin):
|
||||
"""Validate viewer lut in Hiero is the same as in Nuke."""
|
||||
|
||||
order = api.ValidatorOrder
|
||||
label = "Viewer LUT"
|
||||
hosts = ["hiero"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
# nuke_lut = nuke.ViewerProcess.node()["current"].value()
|
||||
hiero_lut = context.data["activeProject"].lutSettingViewer()
|
||||
self.log.info("__ hiero_lut: {}".format(hiero_lut))
|
||||
|
||||
msg = "Viewer LUT can only be RGB"
|
||||
assert "RGB" in hiero_lut, msg
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
from avalon import api
|
||||
import hiero
|
||||
from pype.hosts.nukestudio import lib
|
||||
from avalon import api
|
||||
|
||||
from pype.hosts.hiero import lib
|
||||
|
||||
reload(lib)
|
||||
|
||||
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from pyblish import api
|
||||
from avalon import io
|
||||
from pyblish import api
|
||||
|
||||
|
||||
class CollectAssetBuilds(api.ContextPlugin):
|
||||
|
|
@ -14,7 +14,7 @@ class CollectAssetBuilds(api.ContextPlugin):
|
|||
# Run just after CollectClip
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect AssetBuilds"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
asset_builds = {}
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
from pyblish import api
|
||||
import os
|
||||
|
||||
from pyblish import api
|
||||
|
||||
|
||||
class CollectAudio(api.InstancePlugin):
|
||||
"""Collect audio from tags.
|
||||
|
||||
|
|
@ -14,7 +16,7 @@ class CollectAudio(api.InstancePlugin):
|
|||
# Run just before CollectSubsets
|
||||
order = api.CollectorOrder + 0.1021
|
||||
label = "Collect Audio"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -1,14 +1,15 @@
|
|||
from pyblish import api
|
||||
import hiero
|
||||
import math
|
||||
|
||||
import hiero
|
||||
from pyblish import api
|
||||
|
||||
|
||||
class CollectCalculateRetime(api.InstancePlugin):
|
||||
"""Calculate Retiming of selected track items."""
|
||||
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect Calculate Retiming"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['retime']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipResolution(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Clip Resoluton"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
sequence = instance.context.data['activeSequence']
|
||||
|
|
@ -1,15 +1,14 @@
|
|||
import os
|
||||
|
||||
from pyblish import api
|
||||
import hiero
|
||||
import nuke
|
||||
from pyblish import api
|
||||
|
||||
|
||||
class CollectClips(api.ContextPlugin):
|
||||
"""Collect all Track items selection."""
|
||||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Clips"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
# create asset_names conversion table
|
||||
|
|
@ -7,8 +7,6 @@ class CollectProjectColorspace(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Project's color settings"
|
||||
def process(self, context):
|
||||
import hiero
|
||||
|
||||
project = context.data["activeProject"]
|
||||
colorspace = {}
|
||||
colorspace["useOCIOEnvironmentOverride"] = project.useOCIOEnvironmentOverride()
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectVideoTracksLuts(pyblish.api.InstancePlugin):
|
||||
"""Collect video tracks effects into context."""
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
label = "Collect Frame Ranges"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ class CollectFramerate(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.01
|
||||
label = "Collect Framerate"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
sequence = context.data["activeSequence"]
|
||||
|
|
@ -6,7 +6,7 @@ class CollectClipHandles(api.ContextPlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.0121
|
||||
label = "Collect Handles"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
assets_shared = context.data.get("assetsShared")
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import pyblish.api
|
||||
import avalon.api as avalon
|
||||
import re
|
||||
|
||||
import avalon.api as avalon
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
||||
"""Collecting hierarchy context from `parents` and `hierarchy` data
|
||||
|
|
@ -13,7 +14,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Hierarchy Clip"
|
||||
order = pyblish.api.CollectorOrder + 0.101
|
||||
order = pyblish.api.CollectorOrder + 0.102
|
||||
families = ["clip"]
|
||||
|
||||
def convert_to_entity(self, key, value):
|
||||
|
|
@ -46,7 +47,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
clip_out = instance.data["clipOut"]
|
||||
fps = context.data["fps"]
|
||||
|
||||
# build data for inner nukestudio project property
|
||||
# build data for inner hiero project property
|
||||
data = {
|
||||
"sequence": (
|
||||
context.data['activeSequence'].name().replace(' ', '_')
|
||||
|
|
@ -6,7 +6,7 @@ class CollectLeaderClip(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.0111
|
||||
label = "Collect Leader Clip"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -16,7 +16,7 @@ class CollectPlates(api.InstancePlugin):
|
|||
# Run just before CollectSubsets
|
||||
order = api.CollectorOrder + 0.1021
|
||||
label = "Collect Plates"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -85,7 +85,7 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
|
||||
order = api.CollectorOrder + 0.48
|
||||
label = "Collect Plates Data"
|
||||
hosts = ["nukestudio"]
|
||||
hosts = ["hiero"]
|
||||
families = ["plate"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -192,16 +192,17 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
instance.data["representations"].append(
|
||||
plates_mov_representation)
|
||||
|
||||
thumb_file = head + ".png"
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
thumb_file = "{}_{}{}".format(head, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
thumb_frame = instance.data["sourceIn"] + ((instance.data["sourceOut"] - instance.data["sourceIn"])/2)
|
||||
|
||||
thumbnail = item.thumbnail(thumb_frame).save(
|
||||
thumb_path,
|
||||
format='png'
|
||||
)
|
||||
self.log.debug("__ sourceIn: `{}`".format(instance.data["sourceIn"]))
|
||||
self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(thumbnail, thumb_frame))
|
||||
self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(
|
||||
thumbnail, thumb_frame))
|
||||
|
||||
thumb_representation = {
|
||||
'files': thumb_file,
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue