mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'ynput:develop' into bugfix/OP-3022-Look-publishing-and-srgb-colorspace-in-Maya-2022
This commit is contained in:
commit
2bd0b5cb6a
87 changed files with 2697 additions and 859 deletions
15
.github/pull_request_template.md
vendored
15
.github/pull_request_template.md
vendored
|
|
@ -1,16 +1,9 @@
|
|||
## Brief description
|
||||
First sentence is brief description.
|
||||
|
||||
## Description
|
||||
Next paragraf is more elaborate text with more info. This will be displayed for example in collapsed form under the first sentence in a changelog.
|
||||
## Changelog Description
|
||||
Paragraphs contain detailed information on the changes made to the product or service, providing an in-depth description of the updates and enhancements. They can be used to explain the reasoning behind the changes, or to highlight the importance of the new features. Paragraphs can often include links to further information or support documentation.
|
||||
|
||||
## Additional info
|
||||
The rest will be ignored in changelog and should contain any additional
|
||||
technical information.
|
||||
|
||||
## Documentation (add _"type: documentation"_ label)
|
||||
[feature_documentation](future_url_after_it_will_be_merged)
|
||||
Paragraphs of text giving context of additional technical information or code examples.
|
||||
|
||||
## Testing notes:
|
||||
1. start with this step
|
||||
2. follow this step
|
||||
2. follow this step
|
||||
|
|
|
|||
|
|
@ -39,7 +39,6 @@ class HostDirmap(object):
|
|||
self._project_settings = project_settings
|
||||
self._sync_module = sync_module # to limit reinit of Modules
|
||||
self._log = None
|
||||
self._mapping = None # cache mapping
|
||||
|
||||
@property
|
||||
def sync_module(self):
|
||||
|
|
@ -70,29 +69,28 @@ class HostDirmap(object):
|
|||
"""Run host dependent remapping from source_path to destination_path"""
|
||||
pass
|
||||
|
||||
def process_dirmap(self):
|
||||
def process_dirmap(self, mapping=None):
|
||||
# type: (dict) -> None
|
||||
"""Go through all paths in Settings and set them using `dirmap`.
|
||||
|
||||
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||
Local Settings when artist is syncing workfile locally.
|
||||
|
||||
Args:
|
||||
project_settings (dict): Settings for current project.
|
||||
"""
|
||||
|
||||
if not self._mapping:
|
||||
self._mapping = self.get_mappings(self.project_settings)
|
||||
if not self._mapping:
|
||||
if not mapping:
|
||||
mapping = self.get_mappings()
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
self.log.info("Processing directory mapping ...")
|
||||
self.on_enable_dirmap()
|
||||
self.log.info("mapping:: {}".format(self._mapping))
|
||||
|
||||
for k, sp in enumerate(self._mapping["source-path"]):
|
||||
dst = self._mapping["destination-path"][k]
|
||||
for k, sp in enumerate(mapping["source-path"]):
|
||||
dst = mapping["destination-path"][k]
|
||||
try:
|
||||
# add trailing slash if missing
|
||||
sp = os.path.join(sp, '')
|
||||
dst = os.path.join(dst, '')
|
||||
print("{} -> {}".format(sp, dst))
|
||||
self.dirmap_routine(sp, dst)
|
||||
except IndexError:
|
||||
|
|
@ -110,28 +108,24 @@ class HostDirmap(object):
|
|||
)
|
||||
continue
|
||||
|
||||
def get_mappings(self, project_settings):
|
||||
def get_mappings(self):
|
||||
"""Get translation from source-path to destination-path.
|
||||
|
||||
It checks if Site Sync is enabled and user chose to use local
|
||||
site, in that case configuration in Local Settings takes precedence
|
||||
"""
|
||||
|
||||
local_mapping = self._get_local_sync_dirmap(project_settings)
|
||||
dirmap_label = "{}-dirmap".format(self.host_name)
|
||||
if (
|
||||
not self.project_settings[self.host_name].get(dirmap_label)
|
||||
and not local_mapping
|
||||
):
|
||||
return {}
|
||||
mapping_settings = self.project_settings[self.host_name][dirmap_label]
|
||||
mapping_enabled = mapping_settings["enabled"] or bool(local_mapping)
|
||||
mapping_sett = self.project_settings[self.host_name].get(dirmap_label,
|
||||
{})
|
||||
local_mapping = self._get_local_sync_dirmap()
|
||||
mapping_enabled = mapping_sett.get("enabled") or bool(local_mapping)
|
||||
if not mapping_enabled:
|
||||
return {}
|
||||
|
||||
mapping = (
|
||||
local_mapping
|
||||
or mapping_settings["paths"]
|
||||
or mapping_sett["paths"]
|
||||
or {}
|
||||
)
|
||||
|
||||
|
|
@ -141,28 +135,27 @@ class HostDirmap(object):
|
|||
or not mapping.get("source-path")
|
||||
):
|
||||
return {}
|
||||
self.log.info("Processing directory mapping ...")
|
||||
self.log.info("mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
||||
def _get_local_sync_dirmap(self, project_settings):
|
||||
def _get_local_sync_dirmap(self):
|
||||
"""
|
||||
Returns dirmap if synch to local project is enabled.
|
||||
|
||||
Only valid mapping is from roots of remote site to local site set
|
||||
in Local Settings.
|
||||
|
||||
Args:
|
||||
project_settings (dict)
|
||||
Returns:
|
||||
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||
"""
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
|
||||
mapping = {}
|
||||
|
||||
if not project_settings["global"]["sync_server"]["enabled"]:
|
||||
if (not self.sync_module.enabled or
|
||||
project_name not in self.sync_module.get_enabled_projects()):
|
||||
return mapping
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
|
||||
active_site = self.sync_module.get_local_normalized_site(
|
||||
self.sync_module.get_active_site(project_name))
|
||||
remote_site = self.sync_module.get_local_normalized_site(
|
||||
|
|
@ -171,11 +164,7 @@ class HostDirmap(object):
|
|||
"active {} - remote {}".format(active_site, remote_site)
|
||||
)
|
||||
|
||||
if (
|
||||
active_site == "local"
|
||||
and project_name in self.sync_module.get_enabled_projects()
|
||||
and active_site != remote_site
|
||||
):
|
||||
if active_site == "local" and active_site != remote_site:
|
||||
sync_settings = self.sync_module.get_sync_project_setting(
|
||||
project_name,
|
||||
exclude_locals=False,
|
||||
|
|
@ -188,7 +177,15 @@ class HostDirmap(object):
|
|||
|
||||
self.log.debug("local overrides {}".format(active_overrides))
|
||||
self.log.debug("remote overrides {}".format(remote_overrides))
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
remote_provider = self.sync_module.get_provider_for_site(
|
||||
project_name, remote_site
|
||||
)
|
||||
# dirmap has sense only with regular disk provider, in the workfile
|
||||
# wont be root on cloud or sftp provider
|
||||
if remote_provider != "local_drive":
|
||||
remote_site = "studio"
|
||||
for root_name, active_site_dir in active_overrides.items():
|
||||
remote_site_dir = (
|
||||
remote_overrides.get(root_name)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,11 @@
|
|||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
|
||||
FusionHost,
|
||||
ls,
|
||||
|
||||
imprint_container,
|
||||
parse_container
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
parse_container,
|
||||
list_instances,
|
||||
remove_instance
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -30,21 +21,11 @@ from .menu import launch_openpype_menu
|
|||
|
||||
__all__ = [
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
|
||||
"imprint_container",
|
||||
"parse_container",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
# lib
|
||||
"maintained_selection",
|
||||
"update_frame_range",
|
||||
|
|
|
|||
54
openpype/hosts/fusion/api/action.py
Normal file
54
openpype/hosts/fusion/api/action.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
from openpype.hosts.fusion.api.lib import get_current_comp
|
||||
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
if not invalid:
|
||||
# Assume relevant comp is current comp and clear selection
|
||||
self.log.info("No invalid tools found.")
|
||||
comp = get_current_comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
flow.Select() # No args equals clearing selection
|
||||
return
|
||||
|
||||
# Assume a single comp
|
||||
first_tool = invalid[0]
|
||||
comp = first_tool.Comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
flow.Select() # No args equals clearing selection
|
||||
names = set()
|
||||
for tool in invalid:
|
||||
flow.Select(tool, True)
|
||||
names.add(tool.Name)
|
||||
self.log.info("Selecting invalid tools: %s" % ", ".join(sorted(names)))
|
||||
|
|
@ -7,11 +7,11 @@ from openpype.style import load_stylesheet
|
|||
from openpype.lib import register_event_callback
|
||||
from openpype.hosts.fusion.scripts import (
|
||||
set_rendermode,
|
||||
duplicate_with_inputs
|
||||
duplicate_with_inputs,
|
||||
)
|
||||
from openpype.hosts.fusion.api.lib import (
|
||||
set_asset_framerange,
|
||||
set_asset_resolution
|
||||
set_asset_resolution,
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.resources import get_openpype_icon_filepath
|
||||
|
|
@ -45,17 +45,19 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
self.setWindowTitle("OpenPype")
|
||||
|
||||
asset_label = QtWidgets.QLabel("Context", self)
|
||||
asset_label.setStyleSheet("""QLabel {
|
||||
asset_label.setStyleSheet(
|
||||
"""QLabel {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
color: #5f9fb8;
|
||||
}""")
|
||||
}"""
|
||||
)
|
||||
asset_label.setAlignment(QtCore.Qt.AlignHCenter)
|
||||
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles...", self)
|
||||
create_btn = QtWidgets.QPushButton("Create...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish...", self)
|
||||
load_btn = QtWidgets.QPushButton("Load...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish...", self)
|
||||
manager_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library...", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode...", self)
|
||||
|
|
@ -108,7 +110,8 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendermode_clicked)
|
||||
duplicate_with_inputs_btn.clicked.connect(
|
||||
self.on_duplicate_with_inputs_clicked)
|
||||
self.on_duplicate_with_inputs_clicked
|
||||
)
|
||||
set_resolution_btn.clicked.connect(self.on_set_resolution_clicked)
|
||||
set_framerange_btn.clicked.connect(self.on_set_framerange_clicked)
|
||||
|
||||
|
|
@ -130,7 +133,6 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
self.asset_label.setText(label)
|
||||
|
||||
def register_callback(self, name, fn):
|
||||
|
||||
# Create a wrapper callback that we only store
|
||||
# for as long as we want it to persist as callback
|
||||
def _callback(*args):
|
||||
|
|
@ -146,10 +148,10 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
host_tools.show_workfiles()
|
||||
|
||||
def on_create_clicked(self):
|
||||
host_tools.show_creator()
|
||||
host_tools.show_publisher(tab="create")
|
||||
|
||||
def on_publish_clicked(self):
|
||||
host_tools.show_publish()
|
||||
host_tools.show_publisher(tab="publish")
|
||||
|
||||
def on_load_clicked(self):
|
||||
host_tools.show_loader(use_context=True)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ Basic avalon integration
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
from qtpy import QtCore
|
||||
|
|
@ -17,15 +18,14 @@ from openpype.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
from openpype.hosts.fusion import FUSION_HOST_DIR
|
||||
from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
|
||||
from .lib import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk,
|
||||
|
|
@ -66,94 +66,98 @@ class FusionLogHandler(logging.Handler):
|
|||
self.print(entry)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "fusion"
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
def install(self):
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
|
||||
It is called automatically when installing via
|
||||
`openpype.pipeline.install_host(openpype.hosts.fusion.api)`
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
It is called automatically when installing via
|
||||
`openpype.pipeline.install_host(openpype.hosts.fusion.api)`
|
||||
|
||||
"""
|
||||
# Remove all handlers associated with the root logger object, because
|
||||
# that one always logs as "warnings" incorrectly.
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
# Attach default logging handler that prints to active comp
|
||||
logger = logging.getLogger()
|
||||
formatter = logging.Formatter(fmt="%(message)s\n")
|
||||
handler = FusionLogHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
"""
|
||||
# Remove all handlers associated with the root logger object, because
|
||||
# that one always logs as "warnings" incorrectly.
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
pyblish.api.register_host("fusion")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
# Attach default logging handler that prints to active comp
|
||||
logger = logging.getLogger()
|
||||
formatter = logging.Formatter(fmt="%(message)s\n")
|
||||
handler = FusionLogHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
pyblish.api.register_host("fusion")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
# Register events
|
||||
register_event_callback("open", on_after_open)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("new", on_new)
|
||||
# Register events
|
||||
register_event_callback("open", on_after_open)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("new", on_new)
|
||||
|
||||
# region workfile io api
|
||||
def has_unsaved_changes(self):
|
||||
comp = get_current_comp()
|
||||
return comp.GetAttrs()["COMPB_Modified"]
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all that was installed
|
||||
def get_workfile_extensions(self):
|
||||
return [".comp"]
|
||||
|
||||
This is where you undo everything that was done in `install()`.
|
||||
That means, removing menus, deregistering families and data
|
||||
and everything. It should be as though `install()` was never run,
|
||||
because odds are calling this function means the user is interested
|
||||
in re-installing shortly afterwards. If, for example, he has been
|
||||
modifying the menu or registered families.
|
||||
def save_workfile(self, dst_path=None):
|
||||
comp = get_current_comp()
|
||||
comp.Save(dst_path)
|
||||
|
||||
"""
|
||||
pyblish.api.deregister_host("fusion")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering Fusion plug-ins..")
|
||||
def open_workfile(self, filepath):
|
||||
# Hack to get fusion, see
|
||||
# openpype.hosts.fusion.api.pipeline.get_current_comp()
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
return fusion.LoadComp(filepath)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
def get_current_workfile(self):
|
||||
comp = get_current_comp()
|
||||
current_filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
if not current_filepath:
|
||||
return None
|
||||
|
||||
return current_filepath
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
def work_root(self, session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return os.path.join(work_dir, scene_dir)
|
||||
else:
|
||||
return work_dir
|
||||
# endregion
|
||||
|
||||
savers = [tool for tool in instance if
|
||||
getattr(tool, "ID", None) == "Saver"]
|
||||
if not savers:
|
||||
return
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
from .lib import maintained_selection
|
||||
return maintained_selection()
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
passthrough = not new_value
|
||||
with comp_lock_and_undo_chunk(comp,
|
||||
undo_queue_name="Change instance "
|
||||
"active state"):
|
||||
for tool in savers:
|
||||
attrs = tool.GetAttrs()
|
||||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
comp = get_current_comp()
|
||||
comp.SetData("openpype", data)
|
||||
|
||||
def get_context_data(self):
|
||||
comp = get_current_comp()
|
||||
return comp.GetData("openpype") or {}
|
||||
|
||||
|
||||
def on_new(event):
|
||||
|
|
@ -283,9 +287,51 @@ def parse_container(tool):
|
|||
return container
|
||||
|
||||
|
||||
# TODO: Function below is currently unused prototypes
|
||||
def list_instances(creator_id=None):
|
||||
"""Return created instances in current workfile which will be published.
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
|
||||
comp = get_current_comp()
|
||||
tools = comp.GetToolList(False).values()
|
||||
|
||||
instance_signature = {
|
||||
"id": "pyblish.avalon.instance",
|
||||
"identifier": creator_id
|
||||
}
|
||||
instances = []
|
||||
for tool in tools:
|
||||
|
||||
data = tool.GetData('openpype')
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
|
||||
if data.get("id") != instance_signature["id"]:
|
||||
continue
|
||||
|
||||
if creator_id and data.get("identifier") != creator_id:
|
||||
continue
|
||||
|
||||
instances.append(tool)
|
||||
|
||||
return instances
|
||||
|
||||
|
||||
# TODO: Function below is currently unused prototypes
|
||||
def remove_instance(instance):
|
||||
"""Remove instance from current workfile.
|
||||
|
||||
Args:
|
||||
instance (dict): instance representation from subsetmanager model
|
||||
"""
|
||||
# Assume instance is a Fusion tool directly
|
||||
instance["tool"].Delete()
|
||||
|
||||
|
||||
class FusionEventThread(QtCore.QThread):
|
||||
"""QThread which will periodically ping Fusion app for any events.
|
||||
|
||||
The fusion.UIManager must be set up to be notified of events before they'll
|
||||
be reported by this thread, for example:
|
||||
fusion.UIManager.AddNotify("Comp_Save", None)
|
||||
|
|
|
|||
|
|
@ -1,45 +0,0 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import sys
|
||||
import os
|
||||
|
||||
from .lib import get_current_comp
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [".comp"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
comp = get_current_comp()
|
||||
return comp.GetAttrs()["COMPB_Modified"]
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
comp = get_current_comp()
|
||||
comp.Save(filepath)
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
# Hack to get fusion, see
|
||||
# openpype.hosts.fusion.api.pipeline.get_current_comp()
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
|
||||
return fusion.LoadComp(filepath)
|
||||
|
||||
|
||||
def current_file():
|
||||
comp = get_current_comp()
|
||||
current_filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
if not current_filepath:
|
||||
return None
|
||||
|
||||
return current_filepath
|
||||
|
||||
|
||||
def work_root(session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return os.path.join(work_dir, scene_dir)
|
||||
else:
|
||||
return work_dir
|
||||
|
|
@ -13,11 +13,11 @@ def main(env):
|
|||
# However the contents of that folder can conflict with Qt library dlls
|
||||
# so we make sure to move out of it to avoid DLL Load Failed errors.
|
||||
os.chdir("..")
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.hosts.fusion.api import FusionHost
|
||||
from openpype.hosts.fusion.api import menu
|
||||
|
||||
# activate resolve from pype
|
||||
install_host(api)
|
||||
install_host(FusionHost())
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
log.info(f"Registered host: {registered_host()}")
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import platform
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
from openpype.pipeline.colorspace import get_imageio_config
|
||||
from openpype.pipeline.template_data import get_template_data_with_names
|
||||
|
||||
|
||||
class FusionPreLaunchOCIO(PreLaunchHook):
|
||||
|
|
@ -11,24 +11,22 @@ class FusionPreLaunchOCIO(PreLaunchHook):
|
|||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
||||
# get image io
|
||||
project_settings = self.data["project_settings"]
|
||||
template_data = get_template_data_with_names(
|
||||
project_name=self.data["project_name"],
|
||||
asset_name=self.data["asset_name"],
|
||||
task_name=self.data["task_name"],
|
||||
host_name=self.host_name,
|
||||
system_settings=self.data["system_settings"]
|
||||
)
|
||||
|
||||
# make sure anatomy settings are having flame key
|
||||
imageio_fusion = project_settings["fusion"]["imageio"]
|
||||
|
||||
ocio = imageio_fusion.get("ocio")
|
||||
enabled = ocio.get("enabled", False)
|
||||
if not enabled:
|
||||
return
|
||||
|
||||
platform_key = platform.system().lower()
|
||||
ocio_path = ocio["configFilePath"][platform_key]
|
||||
if not ocio_path:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Fusion OCIO is enabled in project settings but no OCIO config"
|
||||
f"path is set for your current platform: {platform_key}"
|
||||
)
|
||||
config_data = get_imageio_config(
|
||||
project_name=self.data["project_name"],
|
||||
host_name=self.host_name,
|
||||
project_settings=self.data["project_settings"],
|
||||
anatomy_data=template_data,
|
||||
anatomy=self.data["anatomy"]
|
||||
)
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
self.log.info(f"Setting OCIO config path: {ocio_path}")
|
||||
self.launch_context.env["OCIO"] = os.pathsep.join(ocio_path)
|
||||
self.launch_context.env["OCIO"] = ocio_path
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
legacy_io
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class CreateOpenEXRSaver(LegacyCreator):
|
||||
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
hosts = ["fusion"]
|
||||
family = "render"
|
||||
defaults = ["Main"]
|
||||
|
||||
def process(self):
|
||||
|
||||
file_format = "OpenEXRFormat"
|
||||
|
||||
comp = get_current_comp()
|
||||
|
||||
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])
|
||||
|
||||
filename = "{}..exr".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", filename)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
saver = comp.AddTool("Saver", *args)
|
||||
saver.SetAttrs({"TOOLS_Name": self.name})
|
||||
|
||||
# Setting input attributes is different from basic attributes
|
||||
# Not confused with "MainInputAttributes" which
|
||||
saver["Clip"] = filepath
|
||||
saver["OutputFormat"] = file_format
|
||||
|
||||
# Check file format settings are available
|
||||
if saver[file_format] is None:
|
||||
raise RuntimeError("File format is not set to {}, "
|
||||
"this is a bug".format(file_format))
|
||||
|
||||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
|
||||
saver[file_format]["SaveAlpha"] = 0
|
||||
215
openpype/hosts/fusion/plugins/create/create_saver.py
Normal file
215
openpype/hosts/fusion/plugins/create/create_saver.py
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import os
|
||||
|
||||
import qtawesome
|
||||
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from openpype.lib import BoolDef
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
Creator,
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
|
||||
class CreateSaver(Creator):
|
||||
identifier = "io.openpype.creators.fusion.saver"
|
||||
name = "saver"
|
||||
label = "Saver"
|
||||
family = "render"
|
||||
default_variants = ["Main"]
|
||||
|
||||
description = "Fusion Saver to generate image sequence"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
||||
# TODO: Add pre_create attributes to choose file format?
|
||||
file_format = "OpenEXRFormat"
|
||||
|
||||
comp = get_current_comp()
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
saver = comp.AddTool("Saver", *args)
|
||||
|
||||
instance_data["subset"] = subset_name
|
||||
self._update_tool_with_data(saver, data=instance_data)
|
||||
|
||||
saver["OutputFormat"] = file_format
|
||||
|
||||
# Check file format settings are available
|
||||
if saver[file_format] is None:
|
||||
raise RuntimeError(
|
||||
f"File format is not set to {file_format}, this is a bug"
|
||||
)
|
||||
|
||||
# Set file format attributes
|
||||
saver[file_format]["Depth"] = 0 # Auto | float16 | float32
|
||||
# TODO Is this needed?
|
||||
saver[file_format]["SaveAlpha"] = 1
|
||||
|
||||
self._imprint(saver, instance_data)
|
||||
|
||||
# Register the CreatedInstance
|
||||
instance = CreatedInstance(
|
||||
family=self.family,
|
||||
subset_name=subset_name,
|
||||
data=instance_data,
|
||||
creator=self)
|
||||
|
||||
# Insert the transient data
|
||||
instance.transient_data["tool"] = saver
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
return instance
|
||||
|
||||
def collect_instances(self):
|
||||
|
||||
comp = get_current_comp()
|
||||
tools = comp.GetToolList(False, "Saver").values()
|
||||
for tool in tools:
|
||||
|
||||
data = self.get_managed_tool_data(tool)
|
||||
if not data:
|
||||
data = self._collect_unmanaged_saver(tool)
|
||||
|
||||
# Add instance
|
||||
created_instance = CreatedInstance.from_existing(data, self)
|
||||
|
||||
# Collect transient data
|
||||
created_instance.transient_data["tool"] = tool
|
||||
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def get_icon(self):
|
||||
return qtawesome.icon("fa.eye", color="white")
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
|
||||
new_data = created_inst.data_to_store()
|
||||
tool = created_inst.transient_data["tool"]
|
||||
self._update_tool_with_data(tool, new_data)
|
||||
self._imprint(tool, new_data)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
# Remove the tool from the scene
|
||||
|
||||
tool = instance.transient_data["tool"]
|
||||
if tool:
|
||||
tool.Delete()
|
||||
|
||||
# Remove the collected CreatedInstance to remove from UI directly
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def _imprint(self, tool, data):
|
||||
# Save all data in a "openpype.{key}" = value data
|
||||
|
||||
active = data.pop("active", None)
|
||||
if active is not None:
|
||||
# Use active value to set the passthrough state
|
||||
tool.SetAttrs({"TOOLB_PassThrough": not active})
|
||||
|
||||
for key, value in data.items():
|
||||
tool.SetData(f"openpype.{key}", value)
|
||||
|
||||
def _update_tool_with_data(self, tool, data):
|
||||
"""Update tool node name and output path based on subset data"""
|
||||
if "subset" not in data:
|
||||
return
|
||||
|
||||
original_subset = tool.GetData("openpype.subset")
|
||||
subset = data["subset"]
|
||||
if original_subset != subset:
|
||||
# Subset change detected
|
||||
# Update output filepath
|
||||
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])
|
||||
filename = f"{subset}..exr"
|
||||
filepath = os.path.join(workdir, "render", subset, filename)
|
||||
tool["Clip"] = filepath
|
||||
|
||||
# Rename tool
|
||||
if tool.Name != subset:
|
||||
print(f"Renaming {tool.Name} -> {subset}")
|
||||
tool.SetAttrs({"TOOLS_Name": subset})
|
||||
|
||||
def _collect_unmanaged_saver(self, tool):
|
||||
|
||||
# TODO: this should not be done this way - this should actually
|
||||
# get the data as stored on the tool explicitly (however)
|
||||
# that would disallow any 'regular saver' to be collected
|
||||
# unless the instance data is stored on it to begin with
|
||||
|
||||
print("Collecting unmanaged saver..")
|
||||
comp = tool.Comp()
|
||||
|
||||
# Allow regular non-managed savers to also be picked up
|
||||
project = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset = legacy_io.Session["AVALON_ASSET"]
|
||||
task = legacy_io.Session["AVALON_TASK"]
|
||||
|
||||
asset_doc = get_asset_by_name(project_name=project,
|
||||
asset_name=asset)
|
||||
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
fname = os.path.basename(path)
|
||||
fname, _ext = os.path.splitext(fname)
|
||||
variant = fname.rstrip(".")
|
||||
subset = self.get_subset_name(
|
||||
variant=variant,
|
||||
task_name=task,
|
||||
asset_doc=asset_doc,
|
||||
project_name=project,
|
||||
)
|
||||
|
||||
attrs = tool.GetAttrs()
|
||||
passthrough = attrs["TOOLB_PassThrough"]
|
||||
return {
|
||||
# Required data
|
||||
"project": project,
|
||||
"asset": asset,
|
||||
"subset": subset,
|
||||
"task": task,
|
||||
"variant": variant,
|
||||
"active": not passthrough,
|
||||
"family": self.family,
|
||||
|
||||
# Unique identifier for instance and this creator
|
||||
"id": "pyblish.avalon.instance",
|
||||
"creator_identifier": self.identifier
|
||||
}
|
||||
|
||||
def get_managed_tool_data(self, tool):
|
||||
"""Return data of the tool if it matches creator identifier"""
|
||||
data = tool.GetData('openpype')
|
||||
if not isinstance(data, dict):
|
||||
return
|
||||
|
||||
required = {
|
||||
"id": "pyblish.avalon.instance",
|
||||
"creator_identifier": self.identifier
|
||||
}
|
||||
for key, value in required.items():
|
||||
if key not in data or data[key] != value:
|
||||
return
|
||||
|
||||
# Get active state from the actual tool state
|
||||
attrs = tool.GetAttrs()
|
||||
passthrough = attrs["TOOLB_PassThrough"]
|
||||
data["active"] = not passthrough
|
||||
|
||||
return data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"review",
|
||||
default=True,
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
109
openpype/hosts/fusion/plugins/create/create_workfile.py
Normal file
109
openpype/hosts/fusion/plugins/create/create_workfile.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
import qtawesome
|
||||
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp
|
||||
)
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io,
|
||||
)
|
||||
|
||||
|
||||
class FusionWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
label = "Workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
create_allow_context_change = False
|
||||
|
||||
data_key = "openpype_workfile"
|
||||
|
||||
def collect_instances(self):
|
||||
|
||||
comp = get_current_comp()
|
||||
data = comp.GetData(self.data_key)
|
||||
if not data:
|
||||
return
|
||||
|
||||
instance = CreatedInstance(
|
||||
family=self.family,
|
||||
subset_name=data["subset"],
|
||||
data=data,
|
||||
creator=self
|
||||
)
|
||||
instance.transient_data["comp"] = comp
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
comp = created_inst.transient_data["comp"]
|
||||
if not hasattr(comp, "SetData"):
|
||||
# Comp is not alive anymore, likely closed by the user
|
||||
self.log.error("Workfile comp not found for existing instance."
|
||||
" Comp might have been closed in the meantime.")
|
||||
continue
|
||||
|
||||
# Imprint data into the comp
|
||||
data = created_inst.data_to_store()
|
||||
comp.SetData(self.data_key, data)
|
||||
|
||||
def create(self, options=None):
|
||||
|
||||
comp = get_current_comp()
|
||||
if not comp:
|
||||
self.log.error("Unable to find current comp")
|
||||
return
|
||||
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name, None
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
new_instance.transient_data["comp"] = comp
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
elif (
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
||||
def get_icon(self):
|
||||
return qtawesome.icon("fa.file-o", color="white")
|
||||
|
|
@ -1,11 +1,9 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from openpype.client import get_version_by_id
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
legacy_io,
|
||||
get_representation_path,
|
||||
import openpype.pipeline.load as load
|
||||
from openpype.pipeline.load import (
|
||||
get_representation_context,
|
||||
get_representation_path_from_context
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
imprint_container,
|
||||
|
|
@ -148,7 +146,7 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
path = self._get_first_image(os.path.dirname(self.fname))
|
||||
path = get_representation_path_from_context(context)
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
|
|
@ -217,13 +215,11 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
root = os.path.dirname(get_representation_path(representation))
|
||||
path = self._get_first_image(root)
|
||||
context = get_representation_context(representation)
|
||||
path = get_representation_path_from_context(context)
|
||||
|
||||
# Get start frame from version data
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_version_by_id(project_name, representation["parent"])
|
||||
start = self._get_start(version, tool)
|
||||
start = self._get_start(context["version"], tool)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update Loader"):
|
||||
|
||||
|
|
@ -256,11 +252,6 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
with comp_lock_and_undo_chunk(comp, "Remove Loader"):
|
||||
tool.Delete()
|
||||
|
||||
def _get_first_image(self, root):
|
||||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
|
||||
def _get_start(self, version_doc, tool):
|
||||
"""Return real start frame of published files (incl. handles)"""
|
||||
data = version_doc["data"]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.fusion.api import get_current_comp
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
def get_comp_render_range(comp):
|
||||
"""Return comp's start-end render range and global start-end range."""
|
||||
comp_attrs = comp.GetAttrs()
|
||||
start = comp_attrs["COMPN_RenderStart"]
|
||||
end = comp_attrs["COMPN_RenderEnd"]
|
||||
global_start = comp_attrs["COMPN_GlobalStart"]
|
||||
global_end = comp_attrs["COMPN_GlobalEnd"]
|
||||
|
||||
# Whenever render ranges are undefined fall back
|
||||
# to the comp's global start and end
|
||||
if start == -1000000000:
|
||||
start = global_start
|
||||
if end == -1000000000:
|
||||
end = global_end
|
||||
|
||||
return start, end, global_start, global_end
|
||||
|
||||
|
||||
class CollectFusionCompFrameRanges(pyblish.api.ContextPlugin):
|
||||
"""Collect current comp"""
|
||||
|
||||
# We run this after CollectorOrder - 0.1 otherwise it gets
|
||||
# overridden by global plug-in `CollectContextEntities`
|
||||
order = pyblish.api.CollectorOrder - 0.05
|
||||
label = "Collect Comp Frame Ranges"
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
"""Collect all image sequence tools"""
|
||||
|
||||
comp = context.data["currentComp"]
|
||||
|
||||
# Store comp render ranges
|
||||
start, end, global_start, global_end = get_comp_render_range(comp)
|
||||
context.data["frameStart"] = int(start)
|
||||
context.data["frameEnd"] = int(end)
|
||||
context.data["frameStartHandle"] = int(global_start)
|
||||
context.data["frameEndHandle"] = int(global_end)
|
||||
context.data["handleStart"] = int(start) - int(global_start)
|
||||
context.data["handleEnd"] = int(global_end) - int(end)
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
|
|
@ -97,10 +95,15 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
|||
label = "Collect Inputs"
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
hosts = ["fusion"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Get all upstream and include itself
|
||||
if not any(instance[:]):
|
||||
self.log.debug("No tool found in instance, skipping..")
|
||||
return
|
||||
|
||||
tool = instance[0]
|
||||
nodes = list(iter_upstream(tool))
|
||||
nodes.append(tool)
|
||||
|
|
@ -108,7 +111,6 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
|||
# Collect containers for the given set of nodes
|
||||
containers = collect_input_containers(nodes)
|
||||
|
||||
inputs = [ObjectId(c["representation"]) for c in containers]
|
||||
inputs = [c["representation"] for c in containers]
|
||||
instance.data["inputRepresentations"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
|
|
|||
|
|
@ -3,25 +3,7 @@ import os
|
|||
import pyblish.api
|
||||
|
||||
|
||||
def get_comp_render_range(comp):
|
||||
"""Return comp's start-end render range and global start-end range."""
|
||||
comp_attrs = comp.GetAttrs()
|
||||
start = comp_attrs["COMPN_RenderStart"]
|
||||
end = comp_attrs["COMPN_RenderEnd"]
|
||||
global_start = comp_attrs["COMPN_GlobalStart"]
|
||||
global_end = comp_attrs["COMPN_GlobalEnd"]
|
||||
|
||||
# Whenever render ranges are undefined fall back
|
||||
# to the comp's global start and end
|
||||
if start == -1000000000:
|
||||
start = global_start
|
||||
if end == -1000000000:
|
||||
end = global_end
|
||||
|
||||
return start, end, global_start, global_end
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Fusion saver instances
|
||||
|
||||
This additionally stores the Comp start and end render range in the
|
||||
|
|
@ -30,77 +12,68 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Instances"
|
||||
label = "Collect Instances Data"
|
||||
hosts = ["fusion"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
"""Collect all image sequence tools"""
|
||||
|
||||
from openpype.hosts.fusion.api.lib import get_frame_path
|
||||
context = instance.context
|
||||
|
||||
comp = context.data["currentComp"]
|
||||
# Include creator attributes directly as instance data
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
# Get all savers in the comp
|
||||
tools = comp.GetToolList(False).values()
|
||||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
# Include start and end render frame in label
|
||||
subset = instance.data["subset"]
|
||||
start = context.data["frameStart"]
|
||||
end = context.data["frameEnd"]
|
||||
label = "{subset} ({start}-{end})".format(subset=subset,
|
||||
start=int(start),
|
||||
end=int(end))
|
||||
instance.data.update({
|
||||
"label": label,
|
||||
|
||||
start, end, global_start, global_end = get_comp_render_range(comp)
|
||||
context.data["frameStart"] = int(start)
|
||||
context.data["frameEnd"] = int(end)
|
||||
context.data["frameStartHandle"] = int(global_start)
|
||||
context.data["frameEndHandle"] = int(global_end)
|
||||
# todo: Allow custom frame range per instance
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"frameStartHandle": context.data["frameStartHandle"],
|
||||
"frameEndHandle": context.data["frameStartHandle"],
|
||||
"handleStart": context.data["handleStart"],
|
||||
"handleEnd": context.data["handleEnd"],
|
||||
"fps": context.data["fps"],
|
||||
})
|
||||
|
||||
for tool in savers:
|
||||
# Add review family if the instance is marked as 'review'
|
||||
# This could be done through a 'review' Creator attribute.
|
||||
if instance.data.get("review", False):
|
||||
self.log.info("Adding review family..")
|
||||
instance.data["families"].append("review")
|
||||
|
||||
if instance.data["family"] == "render":
|
||||
# TODO: This should probably move into a collector of
|
||||
# its own for the "render" family
|
||||
from openpype.hosts.fusion.api.lib import get_frame_path
|
||||
comp = context.data["currentComp"]
|
||||
|
||||
# This is only the case for savers currently but not
|
||||
# for workfile instances. So we assume saver here.
|
||||
tool = instance.data["transientData"]["tool"]
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
||||
tool_attrs = tool.GetAttrs()
|
||||
active = not tool_attrs["TOOLB_PassThrough"]
|
||||
|
||||
if not path:
|
||||
self.log.warning("Skipping saver because it "
|
||||
"has no path set: {}".format(tool.Name))
|
||||
continue
|
||||
|
||||
filename = os.path.basename(path)
|
||||
head, padding, tail = get_frame_path(filename)
|
||||
ext = os.path.splitext(path)[1]
|
||||
assert tail == ext, ("Tail does not match %s" % ext)
|
||||
subset = head.rstrip("_. ") # subset is head of the filename
|
||||
|
||||
# Include start and end render frame in label
|
||||
label = "{subset} ({start}-{end})".format(subset=subset,
|
||||
start=int(start),
|
||||
end=int(end))
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"asset": os.environ["AVALON_ASSET"], # todo: not a constant
|
||||
"subset": subset,
|
||||
"path": path,
|
||||
"outputDir": os.path.dirname(path),
|
||||
"ext": ext, # todo: should be redundant
|
||||
"label": label,
|
||||
"task": context.data["task"],
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"frameStartHandle": context.data["frameStartHandle"],
|
||||
"frameEndHandle": context.data["frameStartHandle"],
|
||||
"fps": context.data["fps"],
|
||||
"families": ["render", "review"],
|
||||
"family": "render",
|
||||
"active": active,
|
||||
"publish": active # backwards compatibility
|
||||
"ext": ext, # todo: should be redundant?
|
||||
|
||||
# Backwards compatibility: embed tool in instance.data
|
||||
"tool": tool
|
||||
})
|
||||
|
||||
# Add tool itself as member
|
||||
instance.append(tool)
|
||||
|
||||
self.log.info("Found: \"%s\" " % path)
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=self.sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
|
|||
26
openpype/hosts/fusion/plugins/publish/collect_workfile.py
Normal file
26
openpype/hosts/fusion/plugins/publish/collect_workfile.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFusionWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Collect Fusion workfile representation."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Workfile"
|
||||
hosts = ["fusion"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
current_file = instance.context.data["currentFile"]
|
||||
|
||||
folder, file = os.path.split(current_file)
|
||||
filename, ext = os.path.splitext(file)
|
||||
|
||||
instance.data['representations'] = [{
|
||||
'name': ext.lstrip("."),
|
||||
'ext': ext.lstrip("."),
|
||||
'files': file,
|
||||
"stagingDir": folder,
|
||||
}]
|
||||
|
|
@ -11,7 +11,7 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["fusion"]
|
||||
families = ["render.farm"]
|
||||
families = ["workfile"]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -1,9 +1,11 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.fusion.api import comp_lock_and_undo_chunk
|
||||
|
||||
|
||||
class Fusionlocal(pyblish.api.InstancePlugin):
|
||||
class Fusionlocal(pyblish.api.InstancePlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Render the current Fusion composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
|
|
@ -17,18 +19,20 @@ class Fusionlocal(pyblish.api.InstancePlugin):
|
|||
families = ["render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This plug-in runs only once and thus assumes all instances
|
||||
# currently will render the same frame range
|
||||
context = instance.context
|
||||
key = f"__hasRun{self.__class__.__name__}"
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
|
||||
context.data[key] = True
|
||||
|
||||
# Start render
|
||||
self.render_once(context)
|
||||
|
||||
# Log render status
|
||||
self.log.info(
|
||||
"Rendered '{nm}' for asset '{ast}' under the task '{tsk}'".format(
|
||||
nm=instance.data["name"],
|
||||
ast=instance.data["asset"],
|
||||
tsk=instance.data["task"],
|
||||
)
|
||||
)
|
||||
|
||||
frame_start = context.data["frameStartHandle"]
|
||||
frame_end = context.data["frameEndHandle"]
|
||||
path = instance.data["path"]
|
||||
|
|
@ -41,40 +45,56 @@ class Fusionlocal(pyblish.api.InstancePlugin):
|
|||
for frame in range(frame_start, frame_end + 1)
|
||||
]
|
||||
repre = {
|
||||
'name': ext[1:],
|
||||
'ext': ext[1:],
|
||||
'frameStart': f"%0{len(str(frame_end))}d" % frame_start,
|
||||
'files': files,
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"frameStart": f"%0{len(str(frame_end))}d" % frame_start,
|
||||
"files": files,
|
||||
"stagingDir": output_dir,
|
||||
}
|
||||
|
||||
self.set_representation_colorspace(
|
||||
representation=repre,
|
||||
context=context,
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
# review representation
|
||||
repre_preview = repre.copy()
|
||||
repre_preview["name"] = repre_preview["ext"] = "mp4"
|
||||
repre_preview["tags"] = ["review", "ftrackreview", "delete"]
|
||||
instance.data["representations"].append(repre_preview)
|
||||
if instance.data.get("review", False):
|
||||
repre["tags"] = ["review", "ftrackreview"]
|
||||
|
||||
def render_once(self, context):
|
||||
"""Render context comp only once, even with more render instances"""
|
||||
|
||||
current_comp = context.data["currentComp"]
|
||||
frame_start = context.data["frameStartHandle"]
|
||||
frame_end = context.data["frameEndHandle"]
|
||||
# This plug-in assumes all render nodes get rendered at the same time
|
||||
# to speed up the rendering. The check below makes sure that we only
|
||||
# execute the rendering once and not for each instance.
|
||||
key = f"__hasRun{self.__class__.__name__}"
|
||||
if key not in context.data:
|
||||
# We initialize as false to indicate it wasn't successful yet
|
||||
# so we can keep track of whether Fusion succeeded
|
||||
context.data[key] = False
|
||||
|
||||
self.log.info("Starting render")
|
||||
self.log.info(f"Start frame: {frame_start}")
|
||||
self.log.info(f"End frame: {frame_end}")
|
||||
current_comp = context.data["currentComp"]
|
||||
frame_start = context.data["frameStartHandle"]
|
||||
frame_end = context.data["frameEndHandle"]
|
||||
|
||||
with comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render({
|
||||
"Start": frame_start,
|
||||
"End": frame_end,
|
||||
"Wait": True
|
||||
})
|
||||
self.log.info("Starting Fusion render")
|
||||
self.log.info(f"Start frame: {frame_start}")
|
||||
self.log.info(f"End frame: {frame_end}")
|
||||
|
||||
if not result:
|
||||
with comp_lock_and_undo_chunk(current_comp):
|
||||
result = current_comp.Render(
|
||||
{
|
||||
"Start": frame_start,
|
||||
"End": frame_end,
|
||||
"Wait": True,
|
||||
}
|
||||
)
|
||||
|
||||
context.data[key] = bool(result)
|
||||
|
||||
if context.data[key] is False:
|
||||
raise RuntimeError("Comp render failed")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class FusionSaveComp(pyblish.api.ContextPlugin):
|
|||
label = "Save current file"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["fusion"]
|
||||
families = ["render"]
|
||||
families = ["render", "workfile"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
||||
|
|
@ -8,11 +11,12 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Background Depth 32 bit"
|
||||
actions = [RepairAction]
|
||||
hosts = ["fusion"]
|
||||
families = ["render"]
|
||||
optional = True
|
||||
|
||||
actions = [SelectInvalidAction, RepairAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
|
|
@ -29,8 +33,10 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found %i nodes which are not set to float32"
|
||||
% len(invalid))
|
||||
raise PublishValidationError(
|
||||
"Found {} Backgrounds tools which"
|
||||
" are not set to float32".format(len(invalid)),
|
||||
title=self.label)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
|
||||
|
|
@ -19,10 +20,12 @@ class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
|
|||
|
||||
filename = attrs["COMPS_FileName"]
|
||||
if not filename:
|
||||
raise RuntimeError("Comp is not saved.")
|
||||
raise PublishValidationError("Comp is not saved.",
|
||||
title=self.label)
|
||||
|
||||
if not os.path.exists(filename):
|
||||
raise RuntimeError("Comp file does not exist: %s" % filename)
|
||||
raise PublishValidationError(
|
||||
"Comp file does not exist: %s" % filename, title=self.label)
|
||||
|
||||
if attrs["COMPB_Modified"]:
|
||||
self.log.warning("Comp is modified. Save your comp to ensure your "
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
||||
|
|
@ -15,6 +18,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
|||
label = "Validate Create Folder Checked"
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -31,8 +35,9 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found Saver with Create Folder During "
|
||||
"Render checked off")
|
||||
raise PublishValidationError(
|
||||
"Found Saver with Create Folder During Render checked off",
|
||||
title=self.label)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
|
||||
|
|
@ -16,11 +19,13 @@ class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
|
|||
label = "Validate Filename Has Extension"
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found Saver without an extension")
|
||||
raise PublishValidationError("Found Saver without an extension",
|
||||
title=self.label)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
||||
|
|
@ -12,6 +15,7 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
|||
label = "Validate Saver Has Input"
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -25,5 +29,8 @@ class ValidateSaverHasInput(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Saver has no incoming connection: "
|
||||
"{} ({})".format(instance, invalid[0].Name))
|
||||
saver_name = invalid[0].Name
|
||||
raise PublishValidationError(
|
||||
"Saver has no incoming connection: {} ({})".format(instance,
|
||||
saver_name),
|
||||
title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
||||
|
|
@ -8,6 +11,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
|||
label = "Validate Saver Passthrough"
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
@ -27,8 +31,9 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
|||
if invalid_instances:
|
||||
self.log.info("Reset pyblish to collect your current scene state, "
|
||||
"that should fix error.")
|
||||
raise RuntimeError("Invalid instances: "
|
||||
"{0}".format(invalid_instances))
|
||||
raise PublishValidationError(
|
||||
"Invalid instances: {0}".format(invalid_instances),
|
||||
title=self.label)
|
||||
|
||||
def is_invalid(self, instance):
|
||||
|
||||
|
|
@ -36,7 +41,7 @@ class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
|
|||
attr = saver.GetAttrs()
|
||||
active = not attr["TOOLB_PassThrough"]
|
||||
|
||||
if active != instance.data["publish"]:
|
||||
if active != instance.data.get("publish", True):
|
||||
self.log.info("Saver has different passthrough state than "
|
||||
"Pyblish: {} ({})".format(instance, saver.Name))
|
||||
return [saver]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,55 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.hosts.fusion.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateUniqueSubsets(pyblish.api.ContextPlugin):
|
||||
"""Ensure all instances have a unique subset name"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Unique Subsets"
|
||||
families = ["render"]
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
|
||||
# Collect instances per subset per asset
|
||||
instances_per_subset_asset = defaultdict(lambda: defaultdict(list))
|
||||
for instance in context:
|
||||
asset = instance.data.get("asset", context.data.get("asset"))
|
||||
subset = instance.data.get("subset", context.data.get("subset"))
|
||||
instances_per_subset_asset[asset][subset].append(instance)
|
||||
|
||||
# Find which asset + subset combination has more than one instance
|
||||
# Those are considered invalid because they'd integrate to the same
|
||||
# destination.
|
||||
invalid = []
|
||||
for asset, instances_per_subset in instances_per_subset_asset.items():
|
||||
for subset, instances in instances_per_subset.items():
|
||||
if len(instances) > 1:
|
||||
cls.log.warning(
|
||||
"{asset} > {subset} used by more than "
|
||||
"one instance: {instances}".format(
|
||||
asset=asset,
|
||||
subset=subset,
|
||||
instances=instances
|
||||
)
|
||||
)
|
||||
invalid.extend(instances)
|
||||
|
||||
# Return tools for the invalid instances so they can be selected
|
||||
invalid = [instance.data["tool"] for instance in invalid]
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, context):
|
||||
invalid = self.get_invalid(context)
|
||||
if invalid:
|
||||
raise PublishValidationError("Multiple instances are set to "
|
||||
"the same asset > subset.",
|
||||
title=self.label)
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
|
|
@ -117,7 +115,6 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
|||
# Collect containers for the given set of nodes
|
||||
containers = collect_input_containers(nodes)
|
||||
|
||||
inputs = [ObjectId(c["representation"]) for c in containers]
|
||||
inputs = [c["representation"] for c in containers]
|
||||
instance.data["inputRepresentations"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
|
|
|||
|
|
@ -336,7 +336,8 @@ class RenderSettings(object):
|
|||
)
|
||||
|
||||
# Set render file format to exr
|
||||
cmds.setAttr("{}.imageFormatStr".format(node), "exr", type="string")
|
||||
ext = vray_render_presets["image_format"]
|
||||
cmds.setAttr("{}.imageFormatStr".format(node), ext, type="string")
|
||||
|
||||
# animType
|
||||
cmds.setAttr("{}.animType".format(node), 1)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
|||
class MayaTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for maya"""
|
||||
|
||||
use_legacy_creators = True
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
"rig",
|
||||
"camerarig",
|
||||
"staticMesh",
|
||||
"skeletalMesh",
|
||||
"mvLook"]
|
||||
|
||||
representations = ["ma", "abc", "fbx", "mb"]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import copy
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from maya import cmds
|
||||
import maya.api.OpenMaya as om
|
||||
|
|
@ -165,9 +164,8 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
|||
containers = collect_input_containers(scene_containers,
|
||||
nodes)
|
||||
|
||||
inputs = [ObjectId(c["representation"]) for c in containers]
|
||||
inputs = [c["representation"] for c in containers]
|
||||
instance.data["inputRepresentations"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
||||
def _collect_renderlayer_inputs(self, scene_containers, instance):
|
||||
|
|
|
|||
|
|
@ -48,7 +48,6 @@ from openpype.pipeline.colorspace import (
|
|||
get_imageio_config
|
||||
)
|
||||
from openpype.pipeline.workfile import BuildWorkfile
|
||||
|
||||
from . import gizmo_menu
|
||||
from .constants import ASSIST
|
||||
|
||||
|
|
@ -2678,6 +2677,18 @@ def process_workfile_builder():
|
|||
open_file(last_workfile_path)
|
||||
|
||||
|
||||
def start_workfile_template_builder():
|
||||
from .workfile_template_builder import (
|
||||
build_workfile_template
|
||||
)
|
||||
|
||||
# to avoid looping of the callback, remove it!
|
||||
log.info("Starting workfile template builder...")
|
||||
build_workfile_template(workfile_creation_enabled=True)
|
||||
|
||||
# remove callback since it would be duplicating the workfile
|
||||
nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
@deprecated
|
||||
def recreate_instance(origin_node, avalon_data=None):
|
||||
"""Recreate input instance to different data
|
||||
|
|
@ -2850,10 +2861,10 @@ class NukeDirmap(HostDirmap):
|
|||
pass
|
||||
|
||||
def dirmap_routine(self, source_path, destination_path):
|
||||
log.debug("{}: {}->{}".format(self.file_name,
|
||||
source_path, destination_path))
|
||||
source_path = source_path.lower().replace(os.sep, '/')
|
||||
destination_path = destination_path.lower().replace(os.sep, '/')
|
||||
log.debug("Map: {} with: {}->{}".format(self.file_name,
|
||||
source_path, destination_path))
|
||||
if platform.system().lower() == "windows":
|
||||
self.file_name = self.file_name.lower().replace(
|
||||
source_path, destination_path)
|
||||
|
|
@ -2867,6 +2878,7 @@ class DirmapCache:
|
|||
_project_name = None
|
||||
_project_settings = None
|
||||
_sync_module = None
|
||||
_mapping = None
|
||||
|
||||
@classmethod
|
||||
def project_name(cls):
|
||||
|
|
@ -2886,6 +2898,36 @@ class DirmapCache:
|
|||
cls._sync_module = ModulesManager().modules_by_name["sync_server"]
|
||||
return cls._sync_module
|
||||
|
||||
@classmethod
|
||||
def mapping(cls):
|
||||
return cls._mapping
|
||||
|
||||
@classmethod
|
||||
def set_mapping(cls, mapping):
|
||||
cls._mapping = mapping
|
||||
|
||||
|
||||
def dirmap_file_name_filter(file_name):
|
||||
"""Nuke callback function with single full path argument.
|
||||
|
||||
Checks project settings for potential mapping from source to dest.
|
||||
"""
|
||||
|
||||
dirmap_processor = NukeDirmap(
|
||||
file_name,
|
||||
"nuke",
|
||||
DirmapCache.project_name(),
|
||||
DirmapCache.project_settings(),
|
||||
DirmapCache.sync_module(),
|
||||
)
|
||||
if not DirmapCache.mapping():
|
||||
DirmapCache.set_mapping(dirmap_processor.get_mappings())
|
||||
|
||||
dirmap_processor.process_dirmap(DirmapCache.mapping())
|
||||
if os.path.exists(dirmap_processor.file_name):
|
||||
return dirmap_processor.file_name
|
||||
return file_name
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def node_tempfile():
|
||||
|
|
@ -2931,25 +2973,6 @@ def duplicate_node(node):
|
|||
return dupli_node
|
||||
|
||||
|
||||
def dirmap_file_name_filter(file_name):
|
||||
"""Nuke callback function with single full path argument.
|
||||
|
||||
Checks project settings for potential mapping from source to dest.
|
||||
"""
|
||||
|
||||
dirmap_processor = NukeDirmap(
|
||||
file_name,
|
||||
"nuke",
|
||||
DirmapCache.project_name(),
|
||||
DirmapCache.project_settings(),
|
||||
DirmapCache.sync_module(),
|
||||
)
|
||||
dirmap_processor.process_dirmap()
|
||||
if os.path.exists(dirmap_processor.file_name):
|
||||
return dirmap_processor.file_name
|
||||
return file_name
|
||||
|
||||
|
||||
def get_group_io_nodes(nodes):
|
||||
"""Get the input and the output of a group of nodes."""
|
||||
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ from .lib import (
|
|||
add_publish_knob,
|
||||
WorkfileSettings,
|
||||
process_workfile_builder,
|
||||
start_workfile_template_builder,
|
||||
launch_workfiles_app,
|
||||
check_inventory_versions,
|
||||
set_avalon_knob_data,
|
||||
|
|
@ -48,7 +49,6 @@ from .workfile_template_builder import (
|
|||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin,
|
||||
build_workfile_template,
|
||||
update_workfile_template,
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
)
|
||||
|
|
@ -156,6 +156,7 @@ def add_nuke_callbacks():
|
|||
nuke.addOnCreate(
|
||||
workfile_settings.set_context_settings, nodeClass="Root")
|
||||
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
|
||||
|
||||
# fix ffmpeg settings on script
|
||||
|
|
|
|||
|
|
@ -239,7 +239,11 @@ class NukeCreator(NewCreator):
|
|||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection")
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
)
|
||||
]
|
||||
|
||||
def get_creator_settings(self, project_settings, settings_key=None):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import collections
|
||||
|
||||
import nuke
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
|
|
@ -14,7 +12,6 @@ from openpype.pipeline.workfile.workfile_template_builder import (
|
|||
from openpype.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
|
|
@ -45,7 +42,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
bool: Wether the template was successfully imported or not
|
||||
"""
|
||||
|
||||
# TODO check if the template is already imported
|
||||
|
|
@ -55,7 +52,6 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
class NukePlaceholderPlugin(PlaceholderPlugin):
|
||||
node_color = 4278190335
|
||||
|
||||
|
|
@ -947,9 +943,9 @@ class NukePlaceholderCreatePlugin(
|
|||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
||||
|
||||
def update_workfile_template(*args):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ def has_unsaved_changes():
|
|||
|
||||
def save_file(filepath):
|
||||
path = filepath.replace("\\", "/")
|
||||
nuke.scriptSaveAs(path)
|
||||
nuke.scriptSaveAs(path, overwrite=1)
|
||||
nuke.Root()["name"].setValue(path)
|
||||
nuke.Root()["project_directory"].setValue(os.path.dirname(path))
|
||||
nuke.Root().setModified(False)
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class CreateWriteImage(napi.NukeWriteCreator):
|
|||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=True,
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum(),
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=True,
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum()
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class CreateWriteRender(napi.NukeWriteCreator):
|
|||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=True,
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum()
|
||||
|
|
|
|||
|
|
@ -222,18 +222,21 @@ class LoadClip(plugin.NukeLoader):
|
|||
"""
|
||||
representation = deepcopy(representation)
|
||||
context = representation["context"]
|
||||
template = representation["data"]["template"]
|
||||
|
||||
# Get the frame from the context and hash it
|
||||
frame = context["frame"]
|
||||
hashed_frame = "#" * len(str(frame))
|
||||
|
||||
# Replace the frame with the hash in the originalBasename
|
||||
if (
|
||||
"{originalBasename}" in template
|
||||
and "frame" in context
|
||||
"{originalBasename}" in representation["data"]["template"]
|
||||
):
|
||||
frame = context["frame"]
|
||||
hashed_frame = "#" * len(str(frame))
|
||||
origin_basename = context["originalBasename"]
|
||||
context["originalBasename"] = origin_basename.replace(
|
||||
frame, hashed_frame
|
||||
)
|
||||
|
||||
# Replace the frame with the hash in the frame
|
||||
representation["context"]["frame"] = hashed_frame
|
||||
return representation
|
||||
|
||||
|
|
|
|||
|
|
@ -3,12 +3,14 @@ from pprint import pformat
|
|||
import nuke
|
||||
import pyblish.api
|
||||
from openpype.hosts.nuke import api as napi
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin):
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Collect all write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.48
|
||||
order = pyblish.api.CollectorOrder + 0.0021
|
||||
label = "Collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render", "prerender", "image"]
|
||||
|
|
@ -66,6 +68,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
if render_target == "frames":
|
||||
|
|
@ -128,6 +133,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
representation['files'] = collected_frames
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Publishing rendered frames ...")
|
||||
|
||||
|
|
@ -145,8 +156,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
instance.data["farm"] = True
|
||||
self.log.info("Farm rendering ON ...")
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
# TODO: remove this when we have proper colorspace support
|
||||
version_data = {
|
||||
"colorspace": colorspace
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,12 +4,13 @@ import shutil
|
|||
import pyblish.api
|
||||
import clique
|
||||
import nuke
|
||||
|
||||
from openpype.hosts.nuke import api as napi
|
||||
from openpype.pipeline import publish
|
||||
from openpype.lib import collect_frames
|
||||
|
||||
|
||||
class NukeRenderLocal(publish.ExtractorColormanaged):
|
||||
class NukeRenderLocal(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Render the current Nuke composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
|
|
@ -85,7 +86,7 @@ class NukeRenderLocal(publish.ExtractorColormanaged):
|
|||
)
|
||||
|
||||
ext = node["file_type"].value()
|
||||
colorspace = node["colorspace"].value()
|
||||
colorspace = napi.get_colorspace_from_node(node)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -10,10 +10,20 @@ from wsrpc_aiohttp import (
|
|||
|
||||
from qtpy import QtCore
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import Logger, StringTemplate
|
||||
from openpype.pipeline import (
|
||||
registered_host,
|
||||
Anatomy,
|
||||
)
|
||||
from openpype.pipeline.workfile import (
|
||||
get_workfile_template_key_from_context,
|
||||
get_last_workfile,
|
||||
)
|
||||
from openpype.pipeline.template_data import get_template_data_with_names
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
from openpype.pipeline.context_tools import change_current_context
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
from .ws_stub import PhotoshopServerStub
|
||||
|
||||
|
|
@ -310,23 +320,28 @@ class PhotoshopRoute(WebSocketRoute):
|
|||
# client functions
|
||||
async def set_context(self, project, asset, task):
|
||||
"""
|
||||
Sets 'project' and 'asset' to envs, eg. setting context
|
||||
Sets 'project' and 'asset' to envs, eg. setting context.
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
Opens last workile from that context if exists.
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
task (str
|
||||
"""
|
||||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
if project:
|
||||
legacy_io.Session["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
if asset:
|
||||
legacy_io.Session["AVALON_ASSET"] = asset
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
if task:
|
||||
legacy_io.Session["AVALON_TASK"] = task
|
||||
os.environ["AVALON_TASK"] = task
|
||||
log.info(f"project {project} asset {asset} task {task}")
|
||||
|
||||
asset_doc = get_asset_by_name(project, asset)
|
||||
change_current_context(asset_doc, task)
|
||||
|
||||
last_workfile_path = self._get_last_workfile_path(project,
|
||||
asset,
|
||||
task)
|
||||
if last_workfile_path and os.path.exists(last_workfile_path):
|
||||
ProcessLauncher.execute_in_main_thread(
|
||||
lambda: stub().open(last_workfile_path))
|
||||
|
||||
|
||||
async def read(self):
|
||||
log.debug("photoshop.read client calls server server calls "
|
||||
|
|
@ -356,3 +371,35 @@ class PhotoshopRoute(WebSocketRoute):
|
|||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def _get_last_workfile_path(self, project_name, asset_name, task_name):
|
||||
"""Returns last workfile path if exists"""
|
||||
host = registered_host()
|
||||
host_name = "photoshop"
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
asset_name,
|
||||
task_name,
|
||||
host_name,
|
||||
project_name=project_name
|
||||
)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = get_template_data_with_names(
|
||||
project_name, asset_name, task_name, host_name
|
||||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
file_template = anatomy.templates[template_key]["file"]
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
|
||||
folder_template = anatomy.templates[template_key]["folder"]
|
||||
work_root = StringTemplate.format_strict_template(
|
||||
folder_template, data
|
||||
)
|
||||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
|
||||
return last_workfile_path
|
||||
|
|
|
|||
|
|
@ -3,7 +3,14 @@
|
|||
import os
|
||||
import copy
|
||||
from pathlib import Path
|
||||
from openpype.widgets.splash_screen import SplashScreen
|
||||
from qtpy import QtCore
|
||||
from openpype.hosts.unreal.ue_workers import (
|
||||
UEProjectGenerationWorker,
|
||||
UEPluginInstallWorker
|
||||
)
|
||||
|
||||
from openpype import resources
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
|
|
@ -22,6 +29,7 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
shell script.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -58,6 +66,78 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
# Return filename
|
||||
return filled_anatomy[workfile_template_key]["file"]
|
||||
|
||||
def exec_plugin_install(self, engine_path: Path, env: dict = None):
|
||||
# set up the QThread and worker with necessary signals
|
||||
env = env or os.environ
|
||||
q_thread = QtCore.QThread()
|
||||
ue_plugin_worker = UEPluginInstallWorker()
|
||||
|
||||
q_thread.started.connect(ue_plugin_worker.run)
|
||||
ue_plugin_worker.setup(engine_path, env)
|
||||
ue_plugin_worker.moveToThread(q_thread)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Installing plugin",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
# set up the splash screen with necessary triggers
|
||||
ue_plugin_worker.installing.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_plugin_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_plugin_worker.log.connect(splash_screen.append_log)
|
||||
ue_plugin_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_plugin_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Plugin failed to install!")
|
||||
|
||||
def exec_ue_project_gen(self,
|
||||
engine_version: str,
|
||||
unreal_project_name: str,
|
||||
engine_path: Path,
|
||||
project_dir: Path):
|
||||
self.log.info((
|
||||
f"{self.signature} Creating unreal "
|
||||
f"project [ {unreal_project_name} ]"
|
||||
))
|
||||
|
||||
q_thread = QtCore.QThread()
|
||||
ue_project_worker = UEProjectGenerationWorker()
|
||||
ue_project_worker.setup(
|
||||
engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_dir
|
||||
)
|
||||
ue_project_worker.moveToThread(q_thread)
|
||||
q_thread.started.connect(ue_project_worker.run)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Initializing UE project",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
ue_project_worker.stage_begin.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_project_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_project_worker.log.connect(splash_screen.append_log)
|
||||
ue_project_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_project_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Failed to generate the project!")
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
workdir = self.launch_context.env["AVALON_WORKDIR"]
|
||||
|
|
@ -137,23 +217,18 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
if self.launch_context.env.get(env_key):
|
||||
os.environ[env_key] = self.launch_context.env[env_key]
|
||||
|
||||
engine_path = detected[engine_version]
|
||||
engine_path: Path = Path(detected[engine_version])
|
||||
|
||||
unreal_lib.try_installing_plugin(Path(engine_path), os.environ)
|
||||
if not unreal_lib.check_plugin_existence(engine_path):
|
||||
self.exec_plugin_install(engine_path)
|
||||
|
||||
project_file = project_path / unreal_project_filename
|
||||
if not project_file.is_file():
|
||||
self.log.info((
|
||||
f"{self.signature} creating unreal "
|
||||
f"project [ {unreal_project_name} ]"
|
||||
))
|
||||
|
||||
unreal_lib.create_unreal_project(
|
||||
unreal_project_name,
|
||||
engine_version,
|
||||
project_path,
|
||||
engine_path=Path(engine_path)
|
||||
)
|
||||
if not project_file.is_file():
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_path)
|
||||
|
||||
self.launch_context.env["OPENPYPE_UNREAL_VERSION"] = engine_version
|
||||
# Append project file to launch arguments
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ void UAssetContainer::OnAssetAdded(const FAssetData& AssetData)
|
|||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
FString assetFName = AssetData.ObjectPath.ToString();
|
||||
UE_LOG(LogTemp, Log, TEXT("asset name %s"), *assetFName);
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
|
@ -60,7 +60,7 @@ void UAssetContainer::OnAssetRemoved(const FAssetData& AssetData)
|
|||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
FString assetFName = AssetData.ObjectPath.ToString();
|
||||
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
|
@ -93,7 +93,7 @@ void UAssetContainer::OnAssetRenamed(const FAssetData& AssetData, const FString&
|
|||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
FString assetFName = AssetData.ObjectPath.ToString();
|
||||
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@ def create_unreal_project(project_name: str,
|
|||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = _get_build_id(engine_path, ue_version)
|
||||
pf_json["EngineAssociation"] = get_build_id(engine_path, ue_version)
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
|
|
@ -338,7 +338,7 @@ def get_path_to_ubt(engine_path: Path, ue_version: str) -> Path:
|
|||
return Path(u_build_tool_path)
|
||||
|
||||
|
||||
def _get_build_id(engine_path: Path, ue_version: str) -> str:
|
||||
def get_build_id(engine_path: Path, ue_version: str) -> str:
|
||||
ue_modules = Path()
|
||||
if platform.system().lower() == "windows":
|
||||
ue_modules_path = engine_path / "Engine/Binaries/Win64"
|
||||
|
|
@ -365,6 +365,26 @@ def _get_build_id(engine_path: Path, ue_version: str) -> str:
|
|||
return "{" + loaded_modules.get("BuildId") + "}"
|
||||
|
||||
|
||||
def check_plugin_existence(engine_path: Path, env: dict = None) -> bool:
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("OPENPYPE_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(integration_plugin_path):
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/OpenPype"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
return False
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_installing_plugin(engine_path: Path, env: dict = None) -> None:
|
||||
env = env or os.environ
|
||||
|
||||
|
|
@ -377,7 +397,6 @@ def try_installing_plugin(engine_path: Path, env: dict = None) -> None:
|
|||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/OpenPype"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
print("--- OpenPype Plugin is not present. Installing ...")
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = op_plugin_path / "Config"
|
||||
|
|
@ -387,7 +406,6 @@ def try_installing_plugin(engine_path: Path, env: dict = None) -> None:
|
|||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
print("--- Binaries are not present. Building the plugin ...")
|
||||
_build_and_move_plugin(engine_path, op_plugin_path, env)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for layouts."""
|
||||
import json
|
||||
import collections
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
|
@ -12,9 +13,7 @@ from unreal import FBXImportType
|
|||
from unreal import MovieSceneLevelVisibilityTrack
|
||||
from unreal import MovieSceneSubTrack
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.client import get_asset_by_name, get_assets
|
||||
from openpype.client import get_asset_by_name, get_assets, get_representations
|
||||
from openpype.pipeline import (
|
||||
discover_loader_plugins,
|
||||
loaders_from_representation,
|
||||
|
|
@ -410,6 +409,30 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
return sequence, (min_frame, max_frame)
|
||||
|
||||
def _get_repre_docs_by_version_id(self, data):
|
||||
version_ids = {
|
||||
element.get("version")
|
||||
for element in data
|
||||
if element.get("representation")
|
||||
}
|
||||
version_ids.discard(None)
|
||||
|
||||
output = collections.defaultdict(list)
|
||||
if not version_ids:
|
||||
return output
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
representation_names=["fbx", "abc"],
|
||||
version_ids=version_ids,
|
||||
fields=["_id", "parent", "name"]
|
||||
)
|
||||
for repre_doc in repre_docs:
|
||||
version_id = str(repre_doc["parent"])
|
||||
output[version_id].append(repre_doc)
|
||||
return output
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, repr_loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
|
|
@ -429,31 +452,21 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
loaded_assets = []
|
||||
|
||||
repre_docs_by_version_id = self._get_repre_docs_by_version_id(data)
|
||||
for element in data:
|
||||
representation = None
|
||||
repr_format = None
|
||||
if element.get('representation'):
|
||||
# representation = element.get('representation')
|
||||
|
||||
self.log.info(element.get("version"))
|
||||
|
||||
valid_formats = ['fbx', 'abc']
|
||||
|
||||
repr_data = legacy_io.find_one({
|
||||
"type": "representation",
|
||||
"parent": ObjectId(element.get("version")),
|
||||
"name": {"$in": valid_formats}
|
||||
})
|
||||
repr_format = repr_data.get('name')
|
||||
|
||||
if not repr_data:
|
||||
repre_docs = repre_docs_by_version_id[element.get("version")]
|
||||
if not repre_docs:
|
||||
self.log.error(
|
||||
f"No valid representation found for version "
|
||||
f"{element.get('version')}")
|
||||
continue
|
||||
repre_doc = repre_docs[0]
|
||||
representation = str(repre_doc["_id"])
|
||||
repr_format = repre_doc["name"]
|
||||
|
||||
representation = str(repr_data.get('_id'))
|
||||
print(representation)
|
||||
# This is to keep compatibility with old versions of the
|
||||
# json format.
|
||||
elif element.get('reference_fbx'):
|
||||
|
|
|
|||
335
openpype/hosts/unreal/ue_workers.py
Normal file
335
openpype/hosts/unreal/ue_workers.py
Normal file
|
|
@ -0,0 +1,335 @@
|
|||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
from distutils import dir_util
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import openpype.hosts.unreal.lib as ue_lib
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
|
||||
def parse_comp_progress(line: str, progress_signal: QtCore.Signal(int)) -> int:
|
||||
match = re.search('\[[1-9]+/[0-9]+\]', line)
|
||||
if match is not None:
|
||||
split: list[str] = match.group().split('/')
|
||||
curr: float = float(split[0][1:])
|
||||
total: float = float(split[1][:-1])
|
||||
progress_signal.emit(int((curr / total) * 100.0))
|
||||
|
||||
|
||||
def parse_prj_progress(line: str, progress_signal: QtCore.Signal(int)) -> int:
|
||||
match = re.search('@progress', line)
|
||||
if match is not None:
|
||||
percent_match = re.search('\d{1,3}', line)
|
||||
progress_signal.emit(int(percent_match.group()))
|
||||
|
||||
|
||||
class UEProjectGenerationWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
stage_begin = QtCore.Signal(str)
|
||||
|
||||
ue_version: str = None
|
||||
project_name: str = None
|
||||
env = None
|
||||
engine_path: Path = None
|
||||
project_dir: Path = None
|
||||
dev_mode = False
|
||||
|
||||
def setup(self, ue_version: str,
|
||||
project_name,
|
||||
engine_path: Path,
|
||||
project_dir: Path,
|
||||
dev_mode: bool = False,
|
||||
env: dict = None):
|
||||
|
||||
self.ue_version = ue_version
|
||||
self.project_dir = project_dir
|
||||
self.env = env or os.environ
|
||||
|
||||
preset = ue_lib.get_project_settings(
|
||||
project_name
|
||||
)["unreal"]["project_setup"]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
self.dev_mode = True
|
||||
|
||||
self.project_name = project_name
|
||||
self.engine_path = engine_path
|
||||
|
||||
def run(self):
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe = ue_lib.get_editor_exe_path(self.engine_path,
|
||||
self.ue_version)
|
||||
cmdlet_project = ue_lib.get_path_to_cmdlet_project(self.ue_version)
|
||||
project_file = self.project_dir / f"{self.project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
# 1st stage
|
||||
stage_count = 2
|
||||
if self.dev_mode:
|
||||
stage_count = 4
|
||||
|
||||
self.stage_begin.emit(f'Generating a new UE project ... 1 out of '
|
||||
f'{stage_count}')
|
||||
|
||||
commandlet_cmd = [f'{ue_editor_exe.as_posix()}',
|
||||
f'{cmdlet_project.as_posix()}',
|
||||
f'-run=OPGenerateProject',
|
||||
f'{project_file.resolve().as_posix()}']
|
||||
|
||||
if self.dev_mode:
|
||||
commandlet_cmd.append('-GenerateCode')
|
||||
|
||||
gen_process = subprocess.Popen(commandlet_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in gen_process.stdout:
|
||||
decoded_line = line.decode(errors="replace")
|
||||
print(decoded_line, end='')
|
||||
self.log.emit(decoded_line)
|
||||
gen_process.stdout.close()
|
||||
return_code = gen_process.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = 'Failed to generate ' + self.project_name \
|
||||
+ f' project! Exited with return code {return_code}'
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
print("--- Project has been generated successfully.")
|
||||
self.stage_begin.emit(f'Writing the Engine ID of the build UE ... 1'
|
||||
f' out of {stage_count}')
|
||||
|
||||
if not project_file.is_file():
|
||||
msg = "Failed to write the Engine ID into .uproject file! Can " \
|
||||
"not read!"
|
||||
self.failed.emit(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = ue_lib.get_build_id(
|
||||
self.engine_path,
|
||||
self.ue_version
|
||||
)
|
||||
print(pf_json["EngineAssociation"])
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print(f'--- Engine ID has been written into the project file')
|
||||
|
||||
self.progress.emit(90)
|
||||
if self.dev_mode:
|
||||
# 2nd stage
|
||||
self.stage_begin.emit(f'Generating project files ... 2 out of '
|
||||
f'{stage_count}')
|
||||
|
||||
self.progress.emit(0)
|
||||
ubt_path = ue_lib.get_path_to_ubt(self.engine_path,
|
||||
self.ue_version)
|
||||
|
||||
arch = "Win64"
|
||||
if platform.system().lower() == "windows":
|
||||
arch = "Win64"
|
||||
elif platform.system().lower() == "linux":
|
||||
arch = "Linux"
|
||||
elif platform.system().lower() == "darwin":
|
||||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
gen_prj_files_cmd = [ubt_path.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"]
|
||||
gen_proc = subprocess.Popen(gen_prj_files_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in gen_proc.stdout:
|
||||
decoded_line: str = line.decode(errors='replace')
|
||||
print(decoded_line, end='')
|
||||
self.log.emit(decoded_line)
|
||||
parse_prj_progress(decoded_line, self.progress)
|
||||
|
||||
gen_proc.stdout.close()
|
||||
return_code = gen_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = 'Failed to generate project files! ' \
|
||||
f'Exited with return code {return_code}'
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
self.stage_begin.emit(f'Building the project ... 3 out of '
|
||||
f'{stage_count}')
|
||||
self.progress.emit(0)
|
||||
# 3rd stage
|
||||
build_prj_cmd = [ubt_path.as_posix(),
|
||||
f"-ModuleWithSuffix={self.project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f'-Project={project_file}',
|
||||
f'{project_file}',
|
||||
"-IgnoreJunk"]
|
||||
|
||||
build_prj_proc = subprocess.Popen(build_prj_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in build_prj_proc.stdout:
|
||||
decoded_line: str = line.decode(errors='replace')
|
||||
print(decoded_line, end='')
|
||||
self.log.emit(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_prj_proc.stdout.close()
|
||||
return_code = build_prj_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = 'Failed to build project! ' \
|
||||
f'Exited with return code {return_code}'
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# ensure we have PySide2 installed in engine
|
||||
|
||||
self.progress.emit(0)
|
||||
self.stage_begin.emit(f'Checking PySide2 installation... {stage_count}'
|
||||
f' out of {stage_count}')
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Win64/python.exe")
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Linux/bin/python3")
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Mac/bin/python3")
|
||||
|
||||
if not python_path:
|
||||
msg = "Unsupported platform"
|
||||
self.failed.emit(msg, 1)
|
||||
raise NotImplementedError(msg)
|
||||
if not python_path.exists():
|
||||
msg = f"Unreal Python not found at {python_path}"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
subprocess.check_call(
|
||||
[python_path.as_posix(), "-m", "pip", "install", "pyside2"]
|
||||
)
|
||||
self.progress.emit(100)
|
||||
self.finished.emit("Project successfully built!")
|
||||
|
||||
|
||||
class UEPluginInstallWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
installing = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str, int)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
|
||||
engine_path: Path = None
|
||||
env = None
|
||||
|
||||
def setup(self, engine_path: Path, env: dict = None, ):
|
||||
self.engine_path = engine_path
|
||||
self.env = env or os.environ
|
||||
|
||||
def _build_and_move_plugin(self, plugin_build_path: Path):
|
||||
uat_path: Path = ue_lib.get_path_to_uat(self.engine_path)
|
||||
src_plugin_dir = Path(self.env.get("OPENPYPE_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if not uat_path.is_file():
|
||||
msg = "Building failed! Path to UAT is invalid!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
temp_dir: Path = src_plugin_dir.parent / "Temp"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
uplugin_path: Path = src_plugin_dir / "OpenPype.uplugin"
|
||||
|
||||
# in order to successfully build the plugin,
|
||||
# It must be built outside the Engine directory and then moved
|
||||
build_plugin_cmd: List[str] = [f'{uat_path.as_posix()}',
|
||||
'BuildPlugin',
|
||||
f'-Plugin={uplugin_path.as_posix()}',
|
||||
f'-Package={temp_dir.as_posix()}']
|
||||
|
||||
build_proc = subprocess.Popen(build_plugin_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in build_proc.stdout:
|
||||
decoded_line: str = line.decode(errors='replace')
|
||||
print(decoded_line, end='')
|
||||
self.log.emit(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_proc.stdout.close()
|
||||
return_code = build_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = 'Failed to build plugin' \
|
||||
f' project! Exited with return code {return_code}'
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Copy the contents of the 'Temp' dir into the
|
||||
# 'OpenPype' directory in the engine
|
||||
dir_util.copy_tree(temp_dir.as_posix(),
|
||||
plugin_build_path.as_posix())
|
||||
|
||||
# We need to also copy the config folder.
|
||||
# The UAT doesn't include the Config folder in the build
|
||||
plugin_install_config_path: Path = plugin_build_path / "Config"
|
||||
src_plugin_config_path = src_plugin_dir / "Config"
|
||||
|
||||
dir_util.copy_tree(src_plugin_config_path.as_posix(),
|
||||
plugin_install_config_path.as_posix())
|
||||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
||||
def run(self):
|
||||
src_plugin_dir = Path(self.env.get("OPENPYPE_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path = self.engine_path / "Engine/Plugins/Marketplace" \
|
||||
"/OpenPype"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
self.installing.emit("Installing and building the plugin ...")
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path = op_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
self.installing.emit("Building the plugin ...")
|
||||
print("--- Building the plugin...")
|
||||
|
||||
self._build_and_move_plugin(op_plugin_path)
|
||||
|
||||
self.finished.emit("Plugin successfully installed")
|
||||
|
|
@ -23,36 +23,37 @@ class ShowInKitsu(LauncherAction):
|
|||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
|
||||
# Context inputs
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session.get("AVALON_ASSET", None)
|
||||
task_name = session.get("AVALON_TASK", None)
|
||||
|
||||
project = get_project(project_name=project_name,
|
||||
fields=["data.zou_id"])
|
||||
project = get_project(
|
||||
project_name=project_name, fields=["data.zou_id"]
|
||||
)
|
||||
if not project:
|
||||
raise RuntimeError(f"Project {project_name} not found.")
|
||||
raise RuntimeError("Project {} not found.".format(project_name))
|
||||
|
||||
project_zou_id = project["data"].get("zou_id")
|
||||
if not project_zou_id:
|
||||
raise RuntimeError(f"Project {project_name} has no "
|
||||
f"connected kitsu id.")
|
||||
raise RuntimeError(
|
||||
"Project {} has no connected kitsu id.".format(project_name)
|
||||
)
|
||||
|
||||
asset_zou_name = None
|
||||
asset_zou_id = None
|
||||
asset_zou_type = 'Assets'
|
||||
asset_zou_type = "Assets"
|
||||
task_zou_id = None
|
||||
zou_sub_type = ['AssetType', 'Sequence']
|
||||
zou_sub_type = ["AssetType", "Sequence"]
|
||||
if asset_name:
|
||||
asset_zou_name = asset_name
|
||||
asset_fields = ["data.zou.id", "data.zou.type"]
|
||||
if task_name:
|
||||
asset_fields.append(f"data.tasks.{task_name}.zou.id")
|
||||
asset_fields.append("data.tasks.{}.zou.id".format(task_name))
|
||||
|
||||
asset = get_asset_by_name(project_name,
|
||||
asset_name=asset_name,
|
||||
fields=asset_fields)
|
||||
asset = get_asset_by_name(
|
||||
project_name, asset_name=asset_name, fields=asset_fields
|
||||
)
|
||||
|
||||
asset_zou_data = asset["data"].get("zou")
|
||||
|
||||
|
|
@ -67,40 +68,47 @@ class ShowInKitsu(LauncherAction):
|
|||
task_data = asset["data"]["tasks"][task_name]
|
||||
task_zou_data = task_data.get("zou", {})
|
||||
if not task_zou_data:
|
||||
self.log.debug(f"No zou task data for task: {task_name}")
|
||||
self.log.debug(
|
||||
"No zou task data for task: {}".format(task_name)
|
||||
)
|
||||
task_zou_id = task_zou_data["id"]
|
||||
|
||||
# Define URL
|
||||
url = self.get_url(project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id)
|
||||
url = self.get_url(
|
||||
project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id,
|
||||
)
|
||||
|
||||
# Open URL in webbrowser
|
||||
self.log.info(f"Opening URL: {url}")
|
||||
webbrowser.open(url,
|
||||
# Try in new tab
|
||||
new=2)
|
||||
self.log.info("Opening URL: {}".format(url))
|
||||
webbrowser.open(
|
||||
url,
|
||||
# Try in new tab
|
||||
new=2,
|
||||
)
|
||||
|
||||
def get_url(self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None):
|
||||
|
||||
shots_url = {'Shots', 'Sequence', 'Shot'}
|
||||
sub_type = {'AssetType', 'Sequence'}
|
||||
def get_url(
|
||||
self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None,
|
||||
):
|
||||
shots_url = {"Shots", "Sequence", "Shot"}
|
||||
sub_type = {"AssetType", "Sequence"}
|
||||
kitsu_module = self.get_kitsu_module()
|
||||
|
||||
# Get kitsu url with /api stripped
|
||||
kitsu_url = kitsu_module.server_url
|
||||
if kitsu_url.endswith("/api"):
|
||||
kitsu_url = kitsu_url[:-len("/api")]
|
||||
kitsu_url = kitsu_url[: -len("/api")]
|
||||
|
||||
sub_url = f"/productions/{project_id}"
|
||||
asset_type_url = "Shots" if asset_type in shots_url else "Assets"
|
||||
asset_type_url = "shots" if asset_type in shots_url else "assets"
|
||||
|
||||
if task_id:
|
||||
# Go to task page
|
||||
|
|
@ -120,6 +128,6 @@ class ShowInKitsu(LauncherAction):
|
|||
# Add search method if is a sub_type
|
||||
sub_url += f"/{asset_type_url}"
|
||||
if asset_type in sub_type:
|
||||
sub_url += f'?search={asset_name}'
|
||||
sub_url += f"?search={asset_name}"
|
||||
|
||||
return f"{kitsu_url}{sub_url}"
|
||||
|
|
|
|||
|
|
@ -13,6 +13,5 @@ class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in
|
|||
# families = ["kitsu"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
gazu.client.set_host(os.environ["KITSU_SERVER"])
|
||||
gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"])
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -12,62 +10,69 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
label = "Kitsu entities"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = context.data["assetEntity"]["data"]
|
||||
zou_asset_data = asset_data.get("zou")
|
||||
if not zou_asset_data:
|
||||
raise AssertionError("Zou asset data not found in OpenPype!")
|
||||
self.log.debug("Collected zou asset data: {}".format(zou_asset_data))
|
||||
|
||||
zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get(
|
||||
"zou"
|
||||
kitsu_project = gazu.project.get_project_by_name(
|
||||
context.data["projectName"]
|
||||
)
|
||||
if not zou_task_data:
|
||||
self.log.warning("Zou task data not found in OpenPype!")
|
||||
self.log.debug("Collected zou task data: {}".format(zou_task_data))
|
||||
|
||||
kitsu_project = gazu.project.get_project(zou_asset_data["project_id"])
|
||||
if not kitsu_project:
|
||||
raise AssertionError("Project not found in kitsu!")
|
||||
raise ValueError("Project not found in kitsu!")
|
||||
|
||||
context.data["kitsu_project"] = kitsu_project
|
||||
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
|
||||
|
||||
entity_type = zou_asset_data["type"]
|
||||
if entity_type == "Shot":
|
||||
kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"])
|
||||
else:
|
||||
kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
kitsu_entities_by_id = {}
|
||||
for instance in context:
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
continue
|
||||
|
||||
if not kitsu_entity:
|
||||
raise AssertionError("{} not found in kitsu!".format(entity_type))
|
||||
zou_asset_data = asset_doc["data"].get("zou")
|
||||
if not zou_asset_data:
|
||||
raise ValueError("Zou asset data not found in OpenPype!")
|
||||
|
||||
context.data["kitsu_entity"] = kitsu_entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(entity_type, kitsu_entity)
|
||||
)
|
||||
task_name = instance.data.get("task")
|
||||
if not task_name:
|
||||
continue
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(
|
||||
os.environ["AVALON_TASK"]
|
||||
zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou")
|
||||
self.log.debug(
|
||||
"Collected zou task data: {}".format(zou_task_data)
|
||||
)
|
||||
if not kitsu_task_type:
|
||||
raise AssertionError(
|
||||
"Task type {} not found in Kitsu!".format(
|
||||
os.environ["AVALON_TASK"]
|
||||
|
||||
entity_id = zou_asset_data["id"]
|
||||
entity = kitsu_entities_by_id.get(entity_id)
|
||||
if not entity:
|
||||
entity = gazu.entity.get_entity(entity_id)
|
||||
if not entity:
|
||||
raise ValueError(
|
||||
"{} was not found in kitsu!".format(
|
||||
zou_asset_data["name"]
|
||||
)
|
||||
)
|
||||
|
||||
kitsu_entities_by_id[entity_id] = entity
|
||||
instance.data["entity"] = entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(zou_asset_data["type"], entity)
|
||||
)
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task_id = zou_task_data["id"]
|
||||
kitsu_task = kitsu_entities_by_id.get(kitsu_task_id)
|
||||
if not kitsu_task:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
kitsu_entities_by_id[kitsu_task_id] = kitsu_task
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(task_name)
|
||||
if not kitsu_task_type:
|
||||
raise ValueError(
|
||||
"Task type {} not found in Kitsu!".format(task_name)
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
entity, kitsu_task_type
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
kitsu_entity, kitsu_task_type
|
||||
)
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
raise ValueError("Task not found in kitsu!")
|
||||
instance.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
|
|
|||
|
|
@ -8,12 +8,11 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Kitsu Note and Status"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
set_status_note = False
|
||||
note_status_shortname = "wfa"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Get comment text body
|
||||
publish_comment = context.data.get("comment")
|
||||
if not publish_comment:
|
||||
|
|
@ -21,30 +20,33 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = context.data["kitsu_task"]["task_status_id"]
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
for instance in context:
|
||||
kitsu_task = instance.data.get("kitsu_task")
|
||||
if kitsu_task is None:
|
||||
continue
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = kitsu_task["task_status"]["id"]
|
||||
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
)
|
||||
|
||||
# Add comment to kitsu task
|
||||
self.log.debug(
|
||||
"Add new note in taks id {}".format(
|
||||
context.data["kitsu_task"]["id"]
|
||||
# Add comment to kitsu task
|
||||
task_id = kitsu_task["id"]
|
||||
self.log.debug("Add new note in taks id {}".format(task_id))
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
task_id, note_status, comment=publish_comment
|
||||
)
|
||||
)
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
context.data["kitsu_task"], note_status, comment=publish_comment
|
||||
)
|
||||
|
||||
context.data["kitsu_comment"] = kitsu_comment
|
||||
instance.data["kitsu_comment"] = kitsu_comment
|
||||
|
|
|
|||
|
|
@ -8,14 +8,12 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
label = "Kitsu Review"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
task = context.data["kitsu_task"]
|
||||
comment = context.data.get("kitsu_comment")
|
||||
task = instance.data["kitsu_task"]["id"]
|
||||
comment = instance.data["kitsu_comment"]["id"]
|
||||
|
||||
# Check comment has been created
|
||||
if not comment:
|
||||
|
|
@ -27,9 +25,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
# Add review representations as preview of comment
|
||||
for representation in instance.data.get("representations", []):
|
||||
# Skip if not tagged as review
|
||||
if "review" not in representation.get("tags", []):
|
||||
if "kitsureview" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
review_path = representation.get("published_path")
|
||||
self.log.debug("Found review at: {}".format(review_path))
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ def validate_host(kitsu_url: str) -> bool:
|
|||
if gazu.client.host_is_valid():
|
||||
return True
|
||||
else:
|
||||
raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.")
|
||||
raise gazu.exception.HostException(
|
||||
"Host '{}' is invalid.".format(kitsu_url))
|
||||
|
||||
|
||||
def clear_credentials():
|
||||
|
|
|
|||
|
|
@ -1,3 +1,15 @@
|
|||
"""
|
||||
Bugs:
|
||||
* Error when adding task type to anything that isn't Shot or Assets
|
||||
* Assets don't get added under an episode if TV show
|
||||
* Assets added under Main Pack throws error. No Main Pack name in dict
|
||||
|
||||
Features ToDo:
|
||||
* Select in settings what types you wish to sync
|
||||
* Print what's updated on entity-update
|
||||
* Add listener for Edits
|
||||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
|
||||
|
|
@ -5,6 +17,7 @@ import gazu
|
|||
|
||||
from openpype.client import get_project, get_assets, get_asset_by_name
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.lib import Logger
|
||||
from .credentials import validate_credentials
|
||||
from .update_op_with_zou import (
|
||||
create_op_asset,
|
||||
|
|
@ -14,6 +27,8 @@ from .update_op_with_zou import (
|
|||
update_op_assets,
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class Listener:
|
||||
"""Host Kitsu listener."""
|
||||
|
|
@ -38,7 +53,7 @@ class Listener:
|
|||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
'Kitsu authentication failed for login: "{}"...'.format(login)
|
||||
)
|
||||
|
||||
gazu.set_event_host(
|
||||
|
|
@ -86,7 +101,9 @@ class Listener:
|
|||
self.event_client, "sequence:delete", self._delete_sequence
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "shot:new", self._new_shot)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:new", self._new_shot
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:update", self._update_shot
|
||||
)
|
||||
|
|
@ -94,7 +111,9 @@ class Listener:
|
|||
self.event_client, "shot:delete", self._delete_shot
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "task:new", self._new_task)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:new", self._new_task
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:update", self._update_task
|
||||
)
|
||||
|
|
@ -103,44 +122,62 @@ class Listener:
|
|||
)
|
||||
|
||||
def start(self):
|
||||
"""Start listening for events."""
|
||||
log.info("Listening to Kitsu events...")
|
||||
gazu.events.run_client(self.event_client)
|
||||
|
||||
def get_ep_dict(self, ep_id):
|
||||
if ep_id and ep_id != "":
|
||||
return gazu.entity.get_entity(ep_id)
|
||||
return
|
||||
|
||||
# == Project ==
|
||||
def _new_project(self, data):
|
||||
"""Create new project into OP DB."""
|
||||
|
||||
# Use update process to avoid duplicating code
|
||||
self._update_project(data)
|
||||
self._update_project(data, new_project=True)
|
||||
|
||||
def _update_project(self, data):
|
||||
def _update_project(self, data, new_project=False):
|
||||
"""Update project into OP DB."""
|
||||
# Get project entity
|
||||
project = gazu.project.get_project(data["project_id"])
|
||||
project_name = project["name"]
|
||||
|
||||
update_project = write_project_to_op(project, self.dbcon)
|
||||
|
||||
# Write into DB
|
||||
if update_project:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(
|
||||
data["project_id"]
|
||||
)
|
||||
self.dbcon.bulk_write([update_project])
|
||||
|
||||
if new_project:
|
||||
log.info("Project created: {}".format(project["name"]))
|
||||
|
||||
def _delete_project(self, data):
|
||||
"""Delete project."""
|
||||
|
||||
project_name = get_kitsu_project_name(data["project_id"])
|
||||
collections = self.dbcon.database.list_collection_names()
|
||||
for collection in collections:
|
||||
project = self.dbcon.database[collection].find_one(
|
||||
{"data.zou_id": data["project_id"]}
|
||||
)
|
||||
if project:
|
||||
# Delete project collection
|
||||
self.dbcon.database[project["name"]].drop()
|
||||
|
||||
# Delete project collection
|
||||
self.dbcon.database[project_name].drop()
|
||||
# Print message
|
||||
log.info("Project deleted: {}".format(project["name"]))
|
||||
return
|
||||
|
||||
# == Asset ==
|
||||
|
||||
def _new_asset(self, data):
|
||||
"""Create new asset into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
# Get asset entity
|
||||
asset = gazu.asset.get_asset(data["asset_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
|
|
@ -149,6 +186,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_asset(data)
|
||||
|
||||
# Print message
|
||||
ep_id = asset.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset created: {proj_name} - {ep_name}"
|
||||
"{asset_type_name} - {asset_name}".format(
|
||||
proj_name=asset["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
asset_type_name=asset["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_asset(self, data):
|
||||
"""Update asset into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -166,10 +218,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(asset["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[asset],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -179,10 +236,27 @@ class Listener:
|
|||
"""Delete asset of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
asset = self.dbcon.find_one({"data.zou.id": data["asset_id"]})
|
||||
if asset:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = asset["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset deleted: {proj_name} - {ep_name}"
|
||||
"{type_name} - {asset_name}".format(
|
||||
proj_name=asset["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
type_name=asset["data"]["zou"]["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Episode ==
|
||||
def _new_episode(self, data):
|
||||
|
|
@ -191,14 +265,20 @@ class Listener:
|
|||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(episode))
|
||||
self.dbcon.insert_one(create_op_asset(ep))
|
||||
|
||||
# Update
|
||||
self._update_episode(data)
|
||||
|
||||
# Print message
|
||||
msg = "Episode created: {proj_name} - {ep_name}".format(
|
||||
proj_name=ep["project_name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_episode(self, data):
|
||||
"""Update episode into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -206,7 +286,7 @@ class Listener:
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
|
|
@ -215,11 +295,16 @@ class Listener:
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
|
||||
zou_ids_and_asset_docs[ep["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(ep["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[ep],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -228,12 +313,23 @@ class Listener:
|
|||
def _delete_episode(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete episode") # TODO check bugfix
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
ep = self.dbcon.find_one({"data.zou.id": data["episode_id"]})
|
||||
if ep:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
project = gazu.project.get_project(
|
||||
ep["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = "Episode deleted: {proj_name} - {ep_name}".format(
|
||||
proj_name=project["name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Sequence ==
|
||||
def _new_sequence(self, data):
|
||||
|
|
@ -250,6 +346,20 @@ class Listener:
|
|||
# Update
|
||||
self._update_sequence(data)
|
||||
|
||||
# Print message
|
||||
ep_id = sequence.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Sequence created: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=sequence["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_sequence(self, data):
|
||||
"""Update sequence into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -267,10 +377,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(sequence["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[sequence],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -279,12 +394,30 @@ class Listener:
|
|||
def _delete_sequence(self, data):
|
||||
"""Delete sequence of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete sequence") # TODO check bugfix
|
||||
sequence = self.dbcon.find_one({"data.zou.id": data["sequence_id"]})
|
||||
if sequence:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
# Print message
|
||||
ep_id = sequence["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
gazu_project = gazu.project.get_project(
|
||||
sequence["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = (
|
||||
"Sequence deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=gazu_project["name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Shot ==
|
||||
def _new_shot(self, data):
|
||||
|
|
@ -301,6 +434,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_shot(data)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["episode_id"]
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot created: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_shot(self, data):
|
||||
"""Update shot into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -318,11 +466,17 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(shot["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[shot],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
|
@ -330,11 +484,28 @@ class Listener:
|
|||
def _delete_shot(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
shot = self.dbcon.find_one({"data.zou.id": data["shot_id"]})
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
if shot:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["data"]["zou"]["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Task ==
|
||||
def _new_task(self, data):
|
||||
|
|
@ -346,23 +517,59 @@ class Listener:
|
|||
# Get gazu entity
|
||||
task = gazu.task.get_task(data["task_id"])
|
||||
|
||||
# Find asset doc
|
||||
parent_name = task["entity"]["name"]
|
||||
# Print message
|
||||
ep_id = task.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
asset_doc = get_asset_by_name(project_name, parent_name)
|
||||
parent_name = None
|
||||
asset_name = None
|
||||
ent_type = None
|
||||
|
||||
if task["task_type"]["for_entity"] == "Asset":
|
||||
parent_name = task["entity"]["name"]
|
||||
asset_name = task["entity"]["name"]
|
||||
ent_type = task["entity_type"]["name"]
|
||||
elif task["task_type"]["for_entity"] == "Shot":
|
||||
parent_name = "{ep_name}{sequence_name} - {shot_name}".format(
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
asset_name = "{ep_name}{sequence_name}_{shot_name}".format(
|
||||
ep_name=ep["name"] + "_" if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
|
||||
# Update asset tasks with new one
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {"type": task_type_name, "zou": task}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}
|
||||
)
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
if asset_doc:
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {
|
||||
"type": task_type_name,
|
||||
"zou": task,
|
||||
}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
msg = (
|
||||
"Task created: {proj} - {ent_type}{parent}"
|
||||
" - {task}".format(
|
||||
proj=task["project"]["name"],
|
||||
ent_type=ent_type + " - " if ent_type is not None else "",
|
||||
parent=parent_name,
|
||||
task=task["task_type"]["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_task(self, data):
|
||||
"""Update task into OP DB."""
|
||||
# TODO is it necessary?
|
||||
pass
|
||||
|
||||
def _delete_task(self, data):
|
||||
"""Delete task of OP DB."""
|
||||
|
|
@ -384,6 +591,31 @@ class Listener:
|
|||
{"_id": doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
entity = gazu.entity.get_entity(task["zou"]["entity_id"])
|
||||
ep = self.get_ep_dict(entity["source_id"])
|
||||
|
||||
if entity["type"] == "Asset":
|
||||
parent_name = "{ep}{entity_type} - {entity}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
entity_type=task["zou"]["entity_type"]["name"],
|
||||
entity=task["zou"]["entity"]["name"],
|
||||
)
|
||||
elif entity["type"] == "Shot":
|
||||
parent_name = "{ep}{sequence} - {shot}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
sequence=task["zou"]["sequence"]["name"],
|
||||
shot=task["zou"]["entity"]["name"],
|
||||
)
|
||||
|
||||
msg = "Task deleted: {proj} - {parent} - {task}".format(
|
||||
proj=task["zou"]["project"]["name"],
|
||||
parent=parent_name,
|
||||
task=name,
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
|
@ -394,9 +626,10 @@ def start_listeners(login: str, password: str):
|
|||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
|
||||
# Refresh token every week
|
||||
def refresh_token_every_week():
|
||||
print("Refreshing token...")
|
||||
log.info("Refreshing token...")
|
||||
gazu.refresh_token()
|
||||
threading.Timer(7 * 3600 * 24, refresh_token_every_week).start()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,10 +5,6 @@ from typing import Dict, List
|
|||
|
||||
from pymongo import DeleteOne, UpdateOne
|
||||
import gazu
|
||||
from gazu.task import (
|
||||
all_tasks_for_asset,
|
||||
all_tasks_for_shot,
|
||||
)
|
||||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
|
|
@ -18,7 +14,6 @@ from openpype.client import (
|
|||
create_project,
|
||||
)
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
from openpype.lib import Logger
|
||||
|
|
@ -69,6 +64,7 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str):
|
|||
|
||||
def update_op_assets(
|
||||
dbcon: AvalonMongoDB,
|
||||
gazu_project: dict,
|
||||
project_doc: dict,
|
||||
entities_list: List[dict],
|
||||
asset_doc_ids: Dict[str, dict],
|
||||
|
|
@ -78,14 +74,18 @@ def update_op_assets(
|
|||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to DB
|
||||
gazu_project (dict): Dict of gazu,
|
||||
project_doc (dict): Dict of project,
|
||||
entities_list (List[dict]): List of zou entities to update
|
||||
asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...]
|
||||
|
||||
Returns:
|
||||
List[Dict[str, dict]]: List of (doc_id, update_dict) tuples
|
||||
"""
|
||||
if not project_doc:
|
||||
return
|
||||
|
||||
project_name = project_doc["name"]
|
||||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
|
||||
assets_with_update = []
|
||||
for item in entities_list:
|
||||
|
|
@ -94,7 +94,9 @@ def update_op_assets(
|
|||
if not item_doc: # Create asset
|
||||
op_asset = create_op_asset(item)
|
||||
insert_result = dbcon.insert_one(op_asset)
|
||||
item_doc = get_asset_by_id(project_name, insert_result.inserted_id)
|
||||
item_doc = get_asset_by_id(
|
||||
project_name, insert_result.inserted_id
|
||||
)
|
||||
|
||||
# Update asset
|
||||
item_data = deepcopy(item_doc["data"])
|
||||
|
|
@ -113,38 +115,79 @@ def update_op_assets(
|
|||
except (TypeError, ValueError):
|
||||
frame_in = 1001
|
||||
item_data["frameStart"] = frame_in
|
||||
# Frames duration, fallback on 0
|
||||
# Frames duration, fallback on 1
|
||||
try:
|
||||
# NOTE nb_frames is stored directly in item
|
||||
# because of zou's legacy design
|
||||
frames_duration = int(item.get("nb_frames", 0))
|
||||
frames_duration = int(item.get("nb_frames", 1))
|
||||
except (TypeError, ValueError):
|
||||
frames_duration = 0
|
||||
frames_duration = None
|
||||
# Frame out, fallback on frame_in + duration or project's value or 1001
|
||||
frame_out = item_data.pop("frame_out", None)
|
||||
if not frame_out:
|
||||
frame_out = frame_in + frames_duration
|
||||
try:
|
||||
frame_out = int(frame_out)
|
||||
except (TypeError, ValueError):
|
||||
frame_out = 1001
|
||||
if frames_duration:
|
||||
frame_out = frame_in + frames_duration - 1
|
||||
else:
|
||||
frame_out = project_doc["data"].get("frameEnd", frame_in)
|
||||
item_data["frameEnd"] = frame_out
|
||||
# Fps, fallback to project's value or default value (25.0)
|
||||
try:
|
||||
fps = float(item_data.get("fps", project_doc["data"].get("fps")))
|
||||
fps = float(item_data.get("fps"))
|
||||
except (TypeError, ValueError):
|
||||
fps = 25.0
|
||||
fps = float(
|
||||
gazu_project.get("fps", project_doc["data"].get("fps", 25))
|
||||
)
|
||||
item_data["fps"] = fps
|
||||
# Resolution, fall back to project default
|
||||
match_res = re.match(
|
||||
r"(\d+)x(\d+)",
|
||||
item_data.get("resolution", gazu_project.get("resolution")),
|
||||
)
|
||||
if match_res:
|
||||
item_data["resolutionWidth"] = int(match_res.group(1))
|
||||
item_data["resolutionHeight"] = int(match_res.group(2))
|
||||
else:
|
||||
item_data["resolutionWidth"] = project_doc["data"].get(
|
||||
"resolutionWidth"
|
||||
)
|
||||
item_data["resolutionHeight"] = project_doc["data"].get(
|
||||
"resolutionHeight"
|
||||
)
|
||||
# Properties that doesn't fully exist in Kitsu.
|
||||
# Guessing those property names below:
|
||||
# Pixel Aspect Ratio
|
||||
item_data["pixelAspect"] = item_data.get(
|
||||
"pixel_aspect", project_doc["data"].get("pixelAspect")
|
||||
)
|
||||
# Handle Start
|
||||
item_data["handleStart"] = item_data.get(
|
||||
"handle_start", project_doc["data"].get("handleStart")
|
||||
)
|
||||
# Handle End
|
||||
item_data["handleEnd"] = item_data.get(
|
||||
"handle_end", project_doc["data"].get("handleEnd")
|
||||
)
|
||||
# Clip In
|
||||
item_data["clipIn"] = item_data.get(
|
||||
"clip_in", project_doc["data"].get("clipIn")
|
||||
)
|
||||
# Clip Out
|
||||
item_data["clipOut"] = item_data.get(
|
||||
"clip_out", project_doc["data"].get("clipOut")
|
||||
)
|
||||
|
||||
# Tasks
|
||||
tasks_list = []
|
||||
item_type = item["type"]
|
||||
if item_type == "Asset":
|
||||
tasks_list = all_tasks_for_asset(item)
|
||||
tasks_list = gazu.task.all_tasks_for_asset(item)
|
||||
elif item_type == "Shot":
|
||||
tasks_list = all_tasks_for_shot(item)
|
||||
tasks_list = gazu.task.all_tasks_for_shot(item)
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {"type": t["task_type_name"], "zou": t}
|
||||
t["task_type_name"]: {
|
||||
"type": t["task_type_name"],
|
||||
"zou": gazu.task.get_task(t["id"]),
|
||||
}
|
||||
for t in tasks_list
|
||||
}
|
||||
|
||||
|
|
@ -176,9 +219,16 @@ def update_op_assets(
|
|||
entity_root_asset_name = "Shots"
|
||||
|
||||
# Root parent folder if exist
|
||||
visual_parent_doc_id = (
|
||||
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
|
||||
)
|
||||
visual_parent_doc_id = None
|
||||
if parent_zou_id is not None:
|
||||
parent_zou_id_dict = asset_doc_ids.get(parent_zou_id)
|
||||
if parent_zou_id_dict is not None:
|
||||
visual_parent_doc_id = (
|
||||
parent_zou_id_dict.get("_id")
|
||||
if parent_zou_id_dict
|
||||
else None
|
||||
)
|
||||
|
||||
if visual_parent_doc_id is None:
|
||||
# Find root folder doc ("Assets" or "Shots")
|
||||
root_folder_doc = get_asset_by_name(
|
||||
|
|
@ -197,12 +247,15 @@ def update_op_assets(
|
|||
item_data["parents"] = []
|
||||
ancestor_id = parent_zou_id
|
||||
while ancestor_id is not None:
|
||||
parent_doc = asset_doc_ids[ancestor_id]
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
parent_doc = asset_doc_ids.get(ancestor_id)
|
||||
if parent_doc is not None:
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
else:
|
||||
ancestor_id = None
|
||||
|
||||
# Build OpenPype compatible name
|
||||
if item_type in ["Shot", "Sequence"] and parent_zou_id is not None:
|
||||
|
|
@ -250,13 +303,12 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
UpdateOne: Update instance for the project
|
||||
"""
|
||||
project_name = project["name"]
|
||||
project_doc = get_project(project_name)
|
||||
if not project_doc:
|
||||
log.info(f"Creating project '{project_name}'")
|
||||
project_doc = create_project(project_name, project_name)
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
project_dict = create_project(project_name, project_name)
|
||||
|
||||
# Project data and tasks
|
||||
project_data = project_doc["data"] or {}
|
||||
project_data = project_dict["data"] or {}
|
||||
|
||||
# Build project code and update Kitsu
|
||||
project_code = project.get("code")
|
||||
|
|
@ -287,7 +339,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": project_doc["_id"]},
|
||||
{"_id": project_dict["_id"]},
|
||||
{
|
||||
"$set": {
|
||||
"config.tasks": {
|
||||
|
|
@ -301,7 +353,9 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
|
||||
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
|
||||
def sync_all_projects(
|
||||
login: str, password: str, ignore_projects: list = None
|
||||
):
|
||||
"""Update all OP projects in DB with Zou data.
|
||||
|
||||
Args:
|
||||
|
|
@ -346,7 +400,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
if not project:
|
||||
project = gazu.project.get_project_by_name(project["name"])
|
||||
|
||||
log.info(f"Synchronizing {project['name']}...")
|
||||
log.info("Synchronizing {}...".format(project["name"]))
|
||||
|
||||
# Get all assets from zou
|
||||
all_assets = gazu.asset.all_assets_for_project(project)
|
||||
|
|
@ -365,12 +419,16 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
]
|
||||
|
||||
# Sync project. Create if doesn't exist
|
||||
project_name = project["name"]
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
log.info("Project created: {}".format(project_name))
|
||||
bulk_writes.append(write_project_to_op(project, dbcon))
|
||||
|
||||
# Try to find project document
|
||||
project_name = project["name"]
|
||||
if not project_dict:
|
||||
project_dict = get_project(project_name)
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
project_doc = get_project(project_name)
|
||||
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
|
|
@ -378,7 +436,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[project["id"]] = project_doc
|
||||
zou_ids_and_asset_docs[project["id"]] = project_dict
|
||||
|
||||
# Create entities root folders
|
||||
to_insert = [
|
||||
|
|
@ -389,6 +447,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
"data": {
|
||||
"root_of": r,
|
||||
"tasks": {},
|
||||
"visualParent": None,
|
||||
"parents": [],
|
||||
},
|
||||
}
|
||||
for r in ["Assets", "Shots"]
|
||||
|
|
@ -423,7 +483,11 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
[
|
||||
UpdateOne({"_id": id}, update)
|
||||
for id, update in update_op_assets(
|
||||
dbcon, project_doc, all_entities, zou_ids_and_asset_docs
|
||||
dbcon,
|
||||
project,
|
||||
project_dict,
|
||||
all_entities,
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ def sync_zou_from_op_project(
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get all entities from zou
|
||||
print(f"Synchronizing {project_name}...")
|
||||
print("Synchronizing {}...".format(project_name))
|
||||
zou_project = gazu.project.get_project_by_name(project_name)
|
||||
|
||||
# Create project
|
||||
|
|
@ -82,7 +82,9 @@ def sync_zou_from_op_project(
|
|||
f"x{project_doc['data']['resolutionHeight']}",
|
||||
}
|
||||
)
|
||||
gazu.project.update_project_data(zou_project, data=project_doc["data"])
|
||||
gazu.project.update_project_data(
|
||||
zou_project, data=project_doc["data"]
|
||||
)
|
||||
gazu.project.update_project(zou_project)
|
||||
|
||||
asset_types = gazu.asset.all_asset_types()
|
||||
|
|
@ -98,8 +100,7 @@ def sync_zou_from_op_project(
|
|||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
asset_docs = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in get_assets(project_name)
|
||||
asset_doc["_id"]: asset_doc for asset_doc in get_assets(project_name)
|
||||
}
|
||||
|
||||
# Create new assets
|
||||
|
|
@ -174,7 +175,9 @@ def sync_zou_from_op_project(
|
|||
doc["name"],
|
||||
frame_in=doc["data"]["frameStart"],
|
||||
frame_out=doc["data"]["frameEnd"],
|
||||
nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"],
|
||||
nb_frames=(
|
||||
doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1
|
||||
),
|
||||
)
|
||||
|
||||
elif match.group(2): # Sequence
|
||||
|
|
@ -229,7 +232,7 @@ def sync_zou_from_op_project(
|
|||
"frame_in": frame_in,
|
||||
"frame_out": frame_out,
|
||||
},
|
||||
"nb_frames": frame_out - frame_in,
|
||||
"nb_frames": frame_out - frame_in + 1,
|
||||
}
|
||||
)
|
||||
entity = gazu.raw.update("entities", zou_id, entity_data)
|
||||
|
|
@ -258,7 +261,7 @@ def sync_zou_from_op_project(
|
|||
for asset_doc in asset_docs.values()
|
||||
}
|
||||
for entity_id in deleted_entities:
|
||||
gazu.raw.delete(f"data/entities/{entity_id}")
|
||||
gazu.raw.delete("data/entities/{}".format(entity_id))
|
||||
|
||||
# Write into DB
|
||||
if bulk_writes:
|
||||
|
|
|
|||
|
|
@ -1472,13 +1472,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return sync_settings
|
||||
|
||||
def get_all_site_configs(self, project_name=None):
|
||||
def get_all_site_configs(self, project_name=None,
|
||||
local_editable_only=False):
|
||||
"""
|
||||
Returns (dict) with all sites configured system wide.
|
||||
|
||||
Args:
|
||||
project_name (str)(optional): if present, check if not disabled
|
||||
|
||||
local_editable_only (bool)(opt): if True return only Local
|
||||
Setting configurable (for LS UI)
|
||||
Returns:
|
||||
(dict): {'studio': {'provider':'local_drive'...},
|
||||
'MY_LOCAL': {'provider':....}}
|
||||
|
|
@ -1499,9 +1501,21 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if site_settings:
|
||||
detail.update(site_settings)
|
||||
system_sites[site] = detail
|
||||
|
||||
system_sites.update(self._get_default_site_configs(sync_enabled,
|
||||
project_name))
|
||||
if local_editable_only:
|
||||
local_schema = SyncServerModule.get_local_settings_schema()
|
||||
editable_keys = {}
|
||||
for provider_code, editables in local_schema.items():
|
||||
editable_keys[provider_code] = ["enabled", "provider"]
|
||||
for editable_item in editables:
|
||||
editable_keys[provider_code].append(editable_item["key"])
|
||||
|
||||
for _, site in system_sites.items():
|
||||
provider = site["provider"]
|
||||
for site_config_key in list(site.keys()):
|
||||
if site_config_key not in editable_keys[provider]:
|
||||
site.pop(site_config_key, None)
|
||||
|
||||
return system_sites
|
||||
|
||||
|
|
|
|||
|
|
@ -335,9 +335,10 @@ def get_imageio_config(
|
|||
get_template_data_from_session)
|
||||
anatomy_data = get_template_data_from_session()
|
||||
|
||||
formatting_data = deepcopy(anatomy_data)
|
||||
# add project roots to anatomy data
|
||||
anatomy_data["root"] = anatomy.roots
|
||||
anatomy_data["platform"] = platform.system().lower()
|
||||
formatting_data["root"] = anatomy.roots
|
||||
formatting_data["platform"] = platform.system().lower()
|
||||
|
||||
# get colorspace settings
|
||||
imageio_global, imageio_host = _get_imageio_settings(
|
||||
|
|
@ -347,7 +348,7 @@ def get_imageio_config(
|
|||
|
||||
if config_host.get("enabled"):
|
||||
config_data = _get_config_data(
|
||||
config_host["filepath"], anatomy_data
|
||||
config_host["filepath"], formatting_data
|
||||
)
|
||||
else:
|
||||
config_data = None
|
||||
|
|
@ -356,7 +357,7 @@ def get_imageio_config(
|
|||
# get config path from either global or host_name
|
||||
config_global = imageio_global["ocio_config"]
|
||||
config_data = _get_config_data(
|
||||
config_global["filepath"], anatomy_data
|
||||
config_global["filepath"], formatting_data
|
||||
)
|
||||
|
||||
if not config_data:
|
||||
|
|
@ -372,12 +373,12 @@ def _get_config_data(path_list, anatomy_data):
|
|||
"""Return first existing path in path list.
|
||||
|
||||
If template is used in path inputs,
|
||||
then it is formated by anatomy data
|
||||
then it is formatted by anatomy data
|
||||
and environment variables
|
||||
|
||||
Args:
|
||||
path_list (list[str]): list of abs paths
|
||||
anatomy_data (dict): formating data
|
||||
anatomy_data (dict): formatting data
|
||||
|
||||
Returns:
|
||||
dict: config data
|
||||
|
|
@ -389,30 +390,30 @@ def _get_config_data(path_list, anatomy_data):
|
|||
|
||||
# first try host config paths
|
||||
for path_ in path_list:
|
||||
formated_path = _format_path(path_, formatting_data)
|
||||
formatted_path = _format_path(path_, formatting_data)
|
||||
|
||||
if not os.path.exists(formated_path):
|
||||
if not os.path.exists(formatted_path):
|
||||
continue
|
||||
|
||||
return {
|
||||
"path": os.path.normpath(formated_path),
|
||||
"path": os.path.normpath(formatted_path),
|
||||
"template": path_
|
||||
}
|
||||
|
||||
|
||||
def _format_path(tempate_path, formatting_data):
|
||||
"""Single template path formating.
|
||||
def _format_path(template_path, formatting_data):
|
||||
"""Single template path formatting.
|
||||
|
||||
Args:
|
||||
tempate_path (str): template string
|
||||
template_path (str): template string
|
||||
formatting_data (dict): data to be used for
|
||||
template formating
|
||||
template formatting
|
||||
|
||||
Returns:
|
||||
str: absolute formated path
|
||||
str: absolute formatted path
|
||||
"""
|
||||
# format path for anatomy keys
|
||||
formatted_path = StringTemplate(tempate_path).format(
|
||||
formatted_path = StringTemplate(template_path).format(
|
||||
formatting_data)
|
||||
|
||||
return os.path.abspath(formatted_path)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from .publish_plugins import (
|
|||
RepairContextAction,
|
||||
|
||||
Extractor,
|
||||
ExtractorColormanaged,
|
||||
ColormanagedPyblishPluginMixin
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -64,7 +64,7 @@ __all__ = (
|
|||
"RepairContextAction",
|
||||
|
||||
"Extractor",
|
||||
"ExtractorColormanaged",
|
||||
"ColormanagedPyblishPluginMixin",
|
||||
|
||||
"get_publish_template_name",
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from abc import ABCMeta
|
|||
from pprint import pformat
|
||||
import pyblish.api
|
||||
from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin
|
||||
|
||||
from openpype.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
||||
from openpype.lib import BoolDef
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -288,28 +288,29 @@ class Extractor(pyblish.api.InstancePlugin):
|
|||
return get_instance_staging_dir(instance)
|
||||
|
||||
|
||||
class ExtractorColormanaged(Extractor):
|
||||
"""Extractor base for color managed image data.
|
||||
|
||||
Each Extractor intended to export pixel data representation
|
||||
should inherit from this class to allow color managed data.
|
||||
Class implements "get_colorspace_settings" and
|
||||
"set_representation_colorspace" functions used
|
||||
for injecting colorspace data to representation data for farther
|
||||
integration into db document.
|
||||
class ColormanagedPyblishPluginMixin(object):
|
||||
"""Mixin for colormanaged plugins.
|
||||
|
||||
This class is used to set colorspace data to a publishing
|
||||
representation. It contains a static method,
|
||||
get_colorspace_settings, which returns config and
|
||||
file rules data for the host context.
|
||||
It also contains a method, set_representation_colorspace,
|
||||
which sets colorspace data to the representation.
|
||||
The allowed file extensions are listed in the allowed_ext variable.
|
||||
The method first checks if the file extension is in
|
||||
the list of allowed extensions. If it is, it then gets the
|
||||
colorspace settings from the host context and gets a
|
||||
matching colorspace from rules. Finally, it infuses this
|
||||
data into the representation.
|
||||
"""
|
||||
|
||||
allowed_ext = [
|
||||
"cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
|
||||
"mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
|
||||
"1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
|
||||
"sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img"
|
||||
]
|
||||
allowed_ext = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_colorspace_settings(context):
|
||||
"""Retuns solved settings for the host context.
|
||||
"""Returns solved settings for the host context.
|
||||
|
||||
Args:
|
||||
context (publish.Context): publishing context
|
||||
|
|
@ -375,7 +376,10 @@ class ExtractorColormanaged(Extractor):
|
|||
ext = representation["ext"]
|
||||
# check extension
|
||||
self.log.debug("__ ext: `{}`".format(ext))
|
||||
if ext.lower() not in self.allowed_ext:
|
||||
|
||||
# check if ext in lower case is in self.allowed_ext
|
||||
if ext.lstrip(".").lower() not in self.allowed_ext:
|
||||
self.log.debug("Extension is not in allowed extensions.")
|
||||
return
|
||||
|
||||
if colorspace_settings is None:
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from openpype.settings import (
|
|||
get_project_settings,
|
||||
get_system_settings,
|
||||
)
|
||||
from openpype.host import IWorkfileHost
|
||||
from openpype.host import HostBase
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
|
|
@ -43,7 +44,8 @@ from openpype.pipeline.load import (
|
|||
load_with_repre_context,
|
||||
)
|
||||
from openpype.pipeline.create import (
|
||||
discover_legacy_creator_plugins
|
||||
discover_legacy_creator_plugins,
|
||||
CreateContext,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -91,6 +93,7 @@ class AbstractTemplateBuilder(object):
|
|||
"""
|
||||
|
||||
_log = None
|
||||
use_legacy_creators = False
|
||||
|
||||
def __init__(self, host):
|
||||
# Get host name
|
||||
|
|
@ -110,6 +113,7 @@ class AbstractTemplateBuilder(object):
|
|||
self._placeholder_plugins = None
|
||||
self._loaders_by_name = None
|
||||
self._creators_by_name = None
|
||||
self._create_context = None
|
||||
|
||||
self._system_settings = None
|
||||
self._project_settings = None
|
||||
|
|
@ -171,6 +175,16 @@ class AbstractTemplateBuilder(object):
|
|||
.get("type")
|
||||
)
|
||||
|
||||
@property
|
||||
def create_context(self):
|
||||
if self._create_context is None:
|
||||
self._create_context = CreateContext(
|
||||
self.host,
|
||||
discover_publish_plugins=False,
|
||||
headless=True
|
||||
)
|
||||
return self._create_context
|
||||
|
||||
def get_placeholder_plugin_classes(self):
|
||||
"""Get placeholder plugin classes that can be used to build template.
|
||||
|
||||
|
|
@ -235,18 +249,29 @@ class AbstractTemplateBuilder(object):
|
|||
self._loaders_by_name = get_loaders_by_name()
|
||||
return self._loaders_by_name
|
||||
|
||||
def _collect_legacy_creators(self):
|
||||
creators_by_name = {}
|
||||
for creator in discover_legacy_creator_plugins():
|
||||
if not creator.enabled:
|
||||
continue
|
||||
creator_name = creator.__name__
|
||||
if creator_name in creators_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated creator name {} !".format(creator_name)
|
||||
)
|
||||
creators_by_name[creator_name] = creator
|
||||
self._creators_by_name = creators_by_name
|
||||
|
||||
def _collect_creators(self):
|
||||
self._creators_by_name = dict(self.create_context.creators)
|
||||
|
||||
def get_creators_by_name(self):
|
||||
if self._creators_by_name is None:
|
||||
self._creators_by_name = {}
|
||||
for creator in discover_legacy_creator_plugins():
|
||||
if not creator.enabled:
|
||||
continue
|
||||
creator_name = creator.__name__
|
||||
if creator_name in self._creators_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated creator name {} !".format(creator_name)
|
||||
)
|
||||
self._creators_by_name[creator_name] = creator
|
||||
if self.use_legacy_creators:
|
||||
self._collect_legacy_creators()
|
||||
else:
|
||||
self._collect_creators()
|
||||
|
||||
return self._creators_by_name
|
||||
|
||||
def get_shared_data(self, key):
|
||||
|
|
@ -416,7 +441,9 @@ class AbstractTemplateBuilder(object):
|
|||
self,
|
||||
template_path=None,
|
||||
level_limit=None,
|
||||
keep_placeholders=None
|
||||
keep_placeholders=None,
|
||||
create_first_version=None,
|
||||
workfile_creation_enabled=False
|
||||
):
|
||||
"""Main callback for building workfile from template path.
|
||||
|
||||
|
|
@ -433,6 +460,11 @@ class AbstractTemplateBuilder(object):
|
|||
keep_placeholders (bool): Add flag to placeholder data for
|
||||
hosts to decide if they want to remove
|
||||
placeholder after it is used.
|
||||
create_first_version (bool): create first version of a workfile
|
||||
workfile_creation_enabled (bool): If True, it might create
|
||||
first version but ignore
|
||||
process if version is created
|
||||
|
||||
"""
|
||||
template_preset = self.get_template_preset()
|
||||
|
||||
|
|
@ -441,6 +473,30 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
if keep_placeholders is None:
|
||||
keep_placeholders = template_preset["keep_placeholder"]
|
||||
if create_first_version is None:
|
||||
create_first_version = template_preset["create_first_version"]
|
||||
|
||||
# check if first version is created
|
||||
created_version_workfile = self.create_first_workfile_version()
|
||||
|
||||
# if first version is created, import template
|
||||
# and populate placeholders
|
||||
if (
|
||||
create_first_version
|
||||
and workfile_creation_enabled
|
||||
and created_version_workfile
|
||||
):
|
||||
self.import_template(template_path)
|
||||
self.populate_scene_placeholders(
|
||||
level_limit, keep_placeholders)
|
||||
|
||||
# save workfile after template is populated
|
||||
self.save_workfile(created_version_workfile)
|
||||
|
||||
# ignore process if first workfile is enabled
|
||||
# but a version is already created
|
||||
if workfile_creation_enabled:
|
||||
return
|
||||
|
||||
self.import_template(template_path)
|
||||
self.populate_scene_placeholders(
|
||||
|
|
@ -492,6 +548,39 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
pass
|
||||
|
||||
def create_first_workfile_version(self):
|
||||
"""
|
||||
Create first version of workfile.
|
||||
|
||||
Should load the content of template into scene so
|
||||
'populate_scene_placeholders' can be started.
|
||||
|
||||
Args:
|
||||
template_path (str): Fullpath for current task and
|
||||
host's template file.
|
||||
"""
|
||||
last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE")
|
||||
self.log.info("__ last_workfile_path: {}".format(last_workfile_path))
|
||||
if os.path.exists(last_workfile_path):
|
||||
# ignore in case workfile existence
|
||||
self.log.info("Workfile already exists, skipping creation.")
|
||||
return False
|
||||
|
||||
# Create first version
|
||||
self.log.info("Creating first version of workfile.")
|
||||
self.save_workfile(last_workfile_path)
|
||||
|
||||
# Confirm creation of first version
|
||||
return last_workfile_path
|
||||
|
||||
def save_workfile(self, workfile_path):
|
||||
"""Save workfile in current host."""
|
||||
# Save current scene, continue to open file
|
||||
if isinstance(self.host, IWorkfileHost):
|
||||
self.host.save_workfile(workfile_path)
|
||||
else:
|
||||
self.host.save_file(workfile_path)
|
||||
|
||||
def _prepare_placeholders(self, placeholders):
|
||||
"""Run preparation part for placeholders on plugins.
|
||||
|
||||
|
|
@ -675,6 +764,8 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
# switch to remove placeholders after they are used
|
||||
keep_placeholder = profile.get("keep_placeholder")
|
||||
create_first_version = profile.get("create_first_version")
|
||||
|
||||
# backward compatibility, since default is True
|
||||
if keep_placeholder is None:
|
||||
keep_placeholder = True
|
||||
|
|
@ -708,7 +799,8 @@ class AbstractTemplateBuilder(object):
|
|||
self.log.info("Found template at: '{}'".format(path))
|
||||
return {
|
||||
"path": path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
"keep_placeholder": keep_placeholder,
|
||||
"create_first_version": create_first_version
|
||||
}
|
||||
|
||||
solved_path = None
|
||||
|
|
@ -737,7 +829,8 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
return {
|
||||
"path": solved_path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
"keep_placeholder": keep_placeholder,
|
||||
"create_first_version": create_first_version
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1579,6 +1672,8 @@ class PlaceholderCreateMixin(object):
|
|||
placeholder (PlaceholderItem): Placeholder item with information
|
||||
about requested publishable instance.
|
||||
"""
|
||||
|
||||
legacy_create = self.builder.use_legacy_creators
|
||||
creator_name = placeholder.data["creator"]
|
||||
create_variant = placeholder.data["create_variant"]
|
||||
|
||||
|
|
@ -1589,17 +1684,28 @@ class PlaceholderCreateMixin(object):
|
|||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
||||
# get asset id
|
||||
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
|
||||
assert asset_doc, "No current asset found in Session"
|
||||
asset_id = asset_doc['_id']
|
||||
if legacy_create:
|
||||
asset_doc = get_asset_by_name(
|
||||
project_name, asset_name, fields=["_id"]
|
||||
)
|
||||
assert asset_doc, "No current asset found in Session"
|
||||
subset_name = creator_plugin.get_subset_name(
|
||||
create_variant,
|
||||
task_name,
|
||||
asset_doc["_id"],
|
||||
project_name
|
||||
)
|
||||
|
||||
subset_name = creator_plugin.get_subset_name(
|
||||
create_variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
project_name
|
||||
)
|
||||
else:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
assert asset_doc, "No current asset found in Session"
|
||||
subset_name = creator_plugin.get_subset_name(
|
||||
create_variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
self.builder.host_name
|
||||
)
|
||||
|
||||
creator_data = {
|
||||
"creator_name": creator_name,
|
||||
|
|
@ -1612,12 +1718,20 @@ class PlaceholderCreateMixin(object):
|
|||
|
||||
# compile subset name from variant
|
||||
try:
|
||||
creator_instance = creator_plugin(
|
||||
subset_name,
|
||||
asset_name
|
||||
).process()
|
||||
if legacy_create:
|
||||
creator_instance = creator_plugin(
|
||||
subset_name,
|
||||
asset_name
|
||||
).process()
|
||||
else:
|
||||
creator_instance = self.builder.create_context.create(
|
||||
creator_plugin.identifier,
|
||||
create_variant,
|
||||
asset_doc,
|
||||
task_name=task_name
|
||||
)
|
||||
|
||||
except Exception:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
self.create_failed(placeholder, creator_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,8 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin):
|
|||
representations = set()
|
||||
for instance in context:
|
||||
inst_repre = instance.data.get("inputRepresentations", [])
|
||||
representations.update(inst_repre)
|
||||
if inst_repre:
|
||||
representations.update(inst_repre)
|
||||
|
||||
representations_docs = get_representations(
|
||||
project_name=context.data["projectEntity"]["name"],
|
||||
|
|
@ -31,7 +32,8 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin):
|
|||
fields=["_id", "parent"])
|
||||
|
||||
representation_id_to_version_id = {
|
||||
repre["_id"]: repre["parent"] for repre in representations_docs
|
||||
str(repre["_id"]): repre["parent"]
|
||||
for repre in representations_docs
|
||||
}
|
||||
|
||||
for instance in context:
|
||||
|
|
@ -39,9 +41,8 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin):
|
|||
if not inst_repre:
|
||||
continue
|
||||
|
||||
input_versions = instance.data.get("inputVersions", [])
|
||||
input_versions = instance.data.setdefault("inputVersions", [])
|
||||
for repre_id in inst_repre:
|
||||
repre_id = ObjectId(repre_id)
|
||||
version_id = representation_id_to_version_id[repre_id]
|
||||
input_versions.append(version_id)
|
||||
instance.data["inputVersions"] = input_versions
|
||||
version_id = representation_id_to_version_id.get(repre_id)
|
||||
if version_id:
|
||||
input_versions.append(version_id)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractColorspaceData(publish.ExtractorColormanaged):
|
||||
class ExtractColorspaceData(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
""" Inject Colorspace data to available representations.
|
||||
|
||||
Input data:
|
||||
|
|
|
|||
|
|
@ -135,6 +135,38 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
)
|
||||
return project_doc
|
||||
|
||||
def _prepare_new_tasks(self, asset_doc, entity_data):
|
||||
new_tasks = entity_data.get("tasks") or {}
|
||||
if not asset_doc:
|
||||
return new_tasks
|
||||
|
||||
old_tasks = asset_doc.get("data", {}).get("tasks")
|
||||
# Just use new tasks if old are not available
|
||||
if not old_tasks:
|
||||
return new_tasks
|
||||
|
||||
output = deepcopy(old_tasks)
|
||||
# Create mapping of lowered task names from old tasks
|
||||
cur_task_low_mapping = {
|
||||
task_name.lower(): task_name
|
||||
for task_name in old_tasks
|
||||
}
|
||||
# Add/update tasks from new entity data
|
||||
for task_name, task_info in new_tasks.items():
|
||||
task_info = deepcopy(task_info)
|
||||
task_name_low = task_name.lower()
|
||||
# Add new task
|
||||
if task_name_low not in cur_task_low_mapping:
|
||||
output[task_name] = task_info
|
||||
continue
|
||||
|
||||
# Update existing task with new info
|
||||
mapped_task_name = cur_task_low_mapping.pop(task_name_low)
|
||||
src_task_info = output.pop(mapped_task_name)
|
||||
src_task_info.update(task_info)
|
||||
output[task_name] = src_task_info
|
||||
return output
|
||||
|
||||
def sync_asset(
|
||||
self,
|
||||
asset_name,
|
||||
|
|
@ -170,11 +202,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
data["parents"] = parents
|
||||
|
||||
asset_doc = asset_docs_by_name.get(asset_name)
|
||||
|
||||
# Tasks
|
||||
data["tasks"] = self._prepare_new_tasks(asset_doc, entity_data)
|
||||
|
||||
# --- Create/Unarchive asset and end ---
|
||||
if not asset_doc:
|
||||
# Just use tasks from entity data as they are
|
||||
# - this is different from the case when tasks are updated
|
||||
data["tasks"] = entity_data.get("tasks") or {}
|
||||
archived_asset_doc = None
|
||||
for archived_entity in archived_asset_docs_by_name[asset_name]:
|
||||
archived_parents = (
|
||||
|
|
@ -201,19 +234,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if "data" not in asset_doc:
|
||||
asset_doc["data"] = {}
|
||||
cur_entity_data = asset_doc["data"]
|
||||
cur_entity_tasks = cur_entity_data.get("tasks") or {}
|
||||
|
||||
# Tasks
|
||||
data["tasks"] = {}
|
||||
new_tasks = entity_data.get("tasks") or {}
|
||||
for task_name, task_info in new_tasks.items():
|
||||
task_info = deepcopy(task_info)
|
||||
if task_name in cur_entity_tasks:
|
||||
src_task_info = deepcopy(cur_entity_tasks[task_name])
|
||||
src_task_info.update(task_info)
|
||||
task_info = src_task_info
|
||||
|
||||
data["tasks"][task_name] = task_info
|
||||
|
||||
changes = {}
|
||||
for key, value in data.items():
|
||||
|
|
|
|||
|
|
@ -139,7 +139,8 @@
|
|||
"ext": "mp4",
|
||||
"tags": [
|
||||
"burnin",
|
||||
"ftrackreview"
|
||||
"ftrackreview",
|
||||
"kitsureview"
|
||||
],
|
||||
"burnins": [],
|
||||
"ffmpeg_args": {
|
||||
|
|
|
|||
|
|
@ -565,7 +565,17 @@
|
|||
]
|
||||
},
|
||||
"templated_workfile_build": {
|
||||
"profiles": []
|
||||
"profiles": [
|
||||
{
|
||||
"task_types": [
|
||||
"Compositing"
|
||||
],
|
||||
"task_names": [],
|
||||
"path": "{project[name]}/templates/comp.nk",
|
||||
"keep_placeholder": true,
|
||||
"create_first_version": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@
|
|||
"type": "number",
|
||||
"key": "fps",
|
||||
"label": "Frame Rate",
|
||||
"decimal": 2,
|
||||
"decimal": 3,
|
||||
"minimum": 0
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@
|
|||
{
|
||||
"shotgridreview": "Add review to Shotgrid"
|
||||
},
|
||||
{
|
||||
"kitsureview": "Add review to Kitsu"
|
||||
},
|
||||
{
|
||||
"delete": "Delete output"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -34,6 +34,12 @@
|
|||
"label": "Keep placeholders",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"key": "create_first_version",
|
||||
"label": "Create first version",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -186,7 +186,7 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
|
|||
|
||||
class _BaseAttrDefWidget(QtWidgets.QWidget):
|
||||
# Type 'object' may not work with older PySide versions
|
||||
value_changed = QtCore.Signal(object, uuid.UUID)
|
||||
value_changed = QtCore.Signal(object, str)
|
||||
|
||||
def __init__(self, attr_def, parent):
|
||||
super(_BaseAttrDefWidget, self).__init__(parent)
|
||||
|
|
|
|||
|
|
@ -295,10 +295,10 @@ class SubsetWidget(QtWidgets.QWidget):
|
|||
self.model.set_grouping(state)
|
||||
|
||||
def _subset_changed(self, text):
|
||||
if hasattr(self.proxy, "setFilterRegularExpression"):
|
||||
self.proxy.setFilterRegularExpression(text)
|
||||
else:
|
||||
if hasattr(self.proxy, "setFilterRegExp"):
|
||||
self.proxy.setFilterRegExp(text)
|
||||
else:
|
||||
self.proxy.setFilterRegularExpression(text)
|
||||
self.view.expandAll()
|
||||
|
||||
def set_loading_state(self, loading, empty):
|
||||
|
|
|
|||
|
|
@ -83,15 +83,18 @@ class NumberDelegate(QtWidgets.QStyledItemDelegate):
|
|||
decimals(int): How many decimal points can be used. Float will be used
|
||||
as value if is higher than 0.
|
||||
"""
|
||||
def __init__(self, minimum, maximum, decimals, *args, **kwargs):
|
||||
def __init__(self, minimum, maximum, decimals, step, *args, **kwargs):
|
||||
super(NumberDelegate, self).__init__(*args, **kwargs)
|
||||
self.minimum = minimum
|
||||
self.maximum = maximum
|
||||
self.decimals = decimals
|
||||
self.step = step
|
||||
|
||||
def createEditor(self, parent, option, index):
|
||||
if self.decimals > 0:
|
||||
editor = DoubleSpinBoxScrollFixed(parent)
|
||||
editor.setSingleStep(self.step)
|
||||
editor.setDecimals(self.decimals)
|
||||
else:
|
||||
editor = SpinBoxScrollFixed(parent)
|
||||
|
||||
|
|
|
|||
|
|
@ -26,10 +26,11 @@ class NameDef:
|
|||
|
||||
|
||||
class NumberDef:
|
||||
def __init__(self, minimum=None, maximum=None, decimals=None):
|
||||
def __init__(self, minimum=None, maximum=None, decimals=None, step=None):
|
||||
self.minimum = 0 if minimum is None else minimum
|
||||
self.maximum = 999999999 if maximum is None else maximum
|
||||
self.decimals = 0 if decimals is None else decimals
|
||||
self.step = 1 if decimals is None else step
|
||||
|
||||
|
||||
class TypeDef:
|
||||
|
|
@ -73,14 +74,14 @@ class HierarchyView(QtWidgets.QTreeView):
|
|||
"type": TypeDef(),
|
||||
"frameStart": NumberDef(1),
|
||||
"frameEnd": NumberDef(1),
|
||||
"fps": NumberDef(1, decimals=2),
|
||||
"fps": NumberDef(1, decimals=3, step=1),
|
||||
"resolutionWidth": NumberDef(0),
|
||||
"resolutionHeight": NumberDef(0),
|
||||
"handleStart": NumberDef(0),
|
||||
"handleEnd": NumberDef(0),
|
||||
"clipIn": NumberDef(1),
|
||||
"clipOut": NumberDef(1),
|
||||
"pixelAspect": NumberDef(0, decimals=2),
|
||||
"pixelAspect": NumberDef(0, decimals=2, step=0.01),
|
||||
"tools_env": ToolsDef()
|
||||
}
|
||||
|
||||
|
|
@ -96,6 +97,10 @@ class HierarchyView(QtWidgets.QTreeView):
|
|||
"stretch": QtWidgets.QHeaderView.Interactive,
|
||||
"width": 140
|
||||
},
|
||||
"fps": {
|
||||
"stretch": QtWidgets.QHeaderView.Interactive,
|
||||
"width": 65
|
||||
},
|
||||
"tools_env": {
|
||||
"stretch": QtWidgets.QHeaderView.Interactive,
|
||||
"width": 200
|
||||
|
|
@ -148,7 +153,8 @@ class HierarchyView(QtWidgets.QTreeView):
|
|||
delegate = NumberDelegate(
|
||||
item_type.minimum,
|
||||
item_type.maximum,
|
||||
item_type.decimals
|
||||
item_type.decimals,
|
||||
item_type.step
|
||||
)
|
||||
|
||||
elif isinstance(item_type, TypeDef):
|
||||
|
|
|
|||
|
|
@ -482,10 +482,10 @@ class FilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
return True
|
||||
|
||||
# Filter by regex
|
||||
if hasattr(self, "filterRegularExpression"):
|
||||
regex = self.filterRegularExpression()
|
||||
else:
|
||||
if hasattr(self, "filterRegExp"):
|
||||
regex = self.filterRegExp()
|
||||
else:
|
||||
regex = self.filterRegularExpression()
|
||||
pattern = regex.pattern()
|
||||
if pattern:
|
||||
pattern = re.escape(pattern)
|
||||
|
|
|
|||
|
|
@ -160,10 +160,10 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
self._model.set_hierarchy_view(enabled)
|
||||
|
||||
def _on_text_filter_change(self, text_filter):
|
||||
if hasattr(self._proxy, "setFilterRegularExpression"):
|
||||
self._proxy.setFilterRegularExpression(text_filter)
|
||||
else:
|
||||
if hasattr(self._proxy, "setFilterRegExp"):
|
||||
self._proxy.setFilterRegExp(text_filter)
|
||||
else:
|
||||
self._proxy.setFilterRegularExpression(text_filter)
|
||||
|
||||
def _on_outdated_state_change(self):
|
||||
self._proxy.set_filter_outdated(
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ class SitesWidget(QtWidgets.QWidget):
|
|||
)
|
||||
|
||||
site_configs = sync_server_module.get_all_site_configs(
|
||||
self._project_name)
|
||||
self._project_name, local_editable_only=True)
|
||||
|
||||
roots_entity = (
|
||||
self.project_settings[PROJECT_ANATOMY_KEY][LOCAL_ROOTS_KEY]
|
||||
|
|
|
|||
|
|
@ -27,10 +27,10 @@ class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
if not parent.isValid():
|
||||
return False
|
||||
|
||||
if hasattr(self, "filterRegularExpression"):
|
||||
regex = self.filterRegularExpression()
|
||||
else:
|
||||
if hasattr(self, "filterRegExp"):
|
||||
regex = self.filterRegExp()
|
||||
else:
|
||||
regex = self.filterRegularExpression()
|
||||
|
||||
pattern = regex.pattern()
|
||||
if pattern and regex.isValid():
|
||||
|
|
@ -111,10 +111,10 @@ class SearchEntitiesDialog(QtWidgets.QDialog):
|
|||
|
||||
def _on_filter_timer(self):
|
||||
text = self._filter_edit.text()
|
||||
if hasattr(self._proxy, "setFilterRegularExpression"):
|
||||
self._proxy.setFilterRegularExpression(text)
|
||||
else:
|
||||
if hasattr(self._proxy, "setFilterRegExp"):
|
||||
self._proxy.setFilterRegExp(text)
|
||||
else:
|
||||
self._proxy.setFilterRegularExpression(text)
|
||||
|
||||
# WARNING This expanding and resizing is relatively slow.
|
||||
self._view.expandAll()
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ from qtpy import QtCore
|
|||
class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
|
||||
"""Filters to the regex if any of the children matches allow parent"""
|
||||
def filterAcceptsRow(self, row, parent):
|
||||
if hasattr(self, "filterRegularExpression"):
|
||||
regex = self.filterRegularExpression()
|
||||
else:
|
||||
if hasattr(self, "filterRegExp"):
|
||||
regex = self.filterRegExp()
|
||||
else:
|
||||
regex = self.filterRegularExpression()
|
||||
pattern = regex.pattern()
|
||||
if pattern:
|
||||
model = self.sourceModel()
|
||||
|
|
|
|||
|
|
@ -202,11 +202,20 @@ class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
Use case: Filtering by string - parent won't be filtered if does not match
|
||||
the filter string but first checks if any children does.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RecursiveSortFilterProxyModel, self).__init__(*args, **kwargs)
|
||||
recursive_enabled = False
|
||||
if hasattr(self, "setRecursiveFilteringEnabled"):
|
||||
self.setRecursiveFilteringEnabled(True)
|
||||
recursive_enabled = True
|
||||
self._recursive_enabled = recursive_enabled
|
||||
|
||||
def filterAcceptsRow(self, row, parent_index):
|
||||
if hasattr(self, "filterRegularExpression"):
|
||||
regex = self.filterRegularExpression()
|
||||
else:
|
||||
if hasattr(self, "filterRegExp"):
|
||||
regex = self.filterRegExp()
|
||||
else:
|
||||
regex = self.filterRegularExpression()
|
||||
|
||||
pattern = regex.pattern()
|
||||
if pattern:
|
||||
|
|
@ -219,8 +228,9 @@ class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
|
||||
# Check current index itself
|
||||
value = model.data(source_index, self.filterRole())
|
||||
if re.search(pattern, value, re.IGNORECASE):
|
||||
return True
|
||||
matched = bool(re.search(pattern, value, re.IGNORECASE))
|
||||
if matched or self._recursive_enabled:
|
||||
return matched
|
||||
|
||||
rows = model.rowCount(source_index)
|
||||
for idx in range(rows):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.2-nightly.3"
|
||||
__version__ = "3.15.2-nightly.6"
|
||||
|
|
|
|||
102
openpype/widgets/README.md
Normal file
102
openpype/widgets/README.md
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# Widgets
|
||||
|
||||
## Splash Screen
|
||||
|
||||
This widget is used for executing a monitoring progress of a process which has been executed on a different thread.
|
||||
|
||||
To properly use this widget certain preparation has to be done in order to correctly execute the process and show the
|
||||
splash screen.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
In order to run a function or an operation on another thread, a `QtCore.QObject` class needs to be created with the
|
||||
desired code. The class has to have a method as an entry point for the thread to execute the code.
|
||||
|
||||
For utilizing the functionalities of the splash screen, certain signals need to be declared to let it know what is
|
||||
happening in the thread and how is it progressing. It is also recommended to have a function to set up certain variables
|
||||
which are needed in the worker's code
|
||||
|
||||
For example:
|
||||
```python
|
||||
from qtpy import QtCore
|
||||
|
||||
class ExampleWorker(QtCore.QObject):
|
||||
|
||||
finished = QtCore.Signal()
|
||||
failed = QtCore.Signal(str)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
stage_begin = QtCore.Signal(str)
|
||||
|
||||
foo = None
|
||||
bar = None
|
||||
|
||||
def run(self):
|
||||
# The code goes here
|
||||
print("Hello world!")
|
||||
self.finished.emit()
|
||||
|
||||
def setup(self,
|
||||
foo: str,
|
||||
bar: str,):
|
||||
self.foo = foo
|
||||
self.bar = bar
|
||||
```
|
||||
|
||||
### Creating the splash screen
|
||||
|
||||
```python
|
||||
import os
|
||||
from qtpy import QtCore
|
||||
from pathlib import Path
|
||||
from openpype.widgets.splash_screen import SplashScreen
|
||||
from openpype import resources
|
||||
|
||||
|
||||
def exec_plugin_install( engine_path: Path, env: dict = None):
|
||||
env = env or os.environ
|
||||
q_thread = QtCore.QThread()
|
||||
example_worker = ExampleWorker()
|
||||
|
||||
q_thread.started.connect(example_worker.run)
|
||||
example_worker.setup(engine_path, env)
|
||||
example_worker.moveToThread(q_thread)
|
||||
|
||||
splash_screen = SplashScreen("Executing process ...",
|
||||
resources.get_openpype_icon_filepath())
|
||||
|
||||
# set up the splash screen with necessary events
|
||||
example_worker.installing.connect(splash_screen.update_top_label_text)
|
||||
example_worker.progress.connect(splash_screen.update_progress)
|
||||
example_worker.log.connect(splash_screen.append_log)
|
||||
example_worker.finished.connect(splash_screen.quit_and_close)
|
||||
example_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
```
|
||||
|
||||
In this example code, before executing the process the worker needs to be instantiated and moved onto a newly created
|
||||
`QtCore.QThread` object. After this, needed signals have to be connected to the desired slots to make full use of
|
||||
the splash screen. Finally, the `start_thread` and `show_ui` is called.
|
||||
|
||||
**Note that when the `show_ui` function is called the thread is blocked until the splash screen quits automatically, or
|
||||
it is closed by the user in case the process fails! The `start_thread` method in that case must be called before
|
||||
showing the UI!**
|
||||
|
||||
The most important signals are
|
||||
```python
|
||||
q_thread.started.connect(example_worker.run)
|
||||
```
|
||||
and
|
||||
```python
|
||||
example_worker.finished.connect(splash_screen.quit_and_close)
|
||||
```
|
||||
|
||||
These ensure that when the `start_thread` method is called (which takes as a parameter the `QtCore.QThread` object and
|
||||
saves it as a reference), the `QThread` object starts and signals the worker to
|
||||
start executing its own code. Once the worker is done and emits a signal that it has finished with the `quit_and_close`
|
||||
slot, the splash screen quits the `QtCore.QThread` and closes itself.
|
||||
|
||||
It is highly recommended to also use the `fail` slot in case an exception or other error occurs during the execution of
|
||||
the worker's code (You would use in this case the `failed` signal in the `ExampleWorker`).
|
||||
|
|
@ -98,15 +98,22 @@ class Popup(QtWidgets.QDialog):
|
|||
height = window.height()
|
||||
height = max(height, window.sizeHint().height())
|
||||
|
||||
desktop_geometry = QtWidgets.QDesktopWidget().availableGeometry()
|
||||
screen_geometry = window.geometry()
|
||||
try:
|
||||
screen = window.screen()
|
||||
desktop_geometry = screen.availableGeometry()
|
||||
except AttributeError:
|
||||
# Backwards compatibility for older Qt versions
|
||||
# PySide6 removed QDesktopWidget
|
||||
desktop_geometry = QtWidgets.QDesktopWidget().availableGeometry()
|
||||
|
||||
screen_width = screen_geometry.width()
|
||||
screen_height = screen_geometry.height()
|
||||
window_geometry = window.geometry()
|
||||
|
||||
screen_width = window_geometry.width()
|
||||
screen_height = window_geometry.height()
|
||||
|
||||
# Calculate width and height of system tray
|
||||
systray_width = screen_geometry.width() - desktop_geometry.width()
|
||||
systray_height = screen_geometry.height() - desktop_geometry.height()
|
||||
systray_width = window_geometry.width() - desktop_geometry.width()
|
||||
systray_height = window_geometry.height() - desktop_geometry.height()
|
||||
|
||||
padding = 10
|
||||
|
||||
|
|
|
|||
258
openpype/widgets/splash_screen.py
Normal file
258
openpype/widgets/splash_screen.py
Normal file
|
|
@ -0,0 +1,258 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from openpype import style, resources
|
||||
from igniter.nice_progress_bar import NiceProgressBar
|
||||
|
||||
|
||||
class SplashScreen(QtWidgets.QDialog):
|
||||
"""Splash screen for executing a process on another thread. It is able
|
||||
to inform about the progress of the process and log given information.
|
||||
"""
|
||||
|
||||
splash_icon = None
|
||||
top_label = None
|
||||
show_log_btn: QtWidgets.QLabel = None
|
||||
progress_bar = None
|
||||
log_text: QtWidgets.QLabel = None
|
||||
scroll_area: QtWidgets.QScrollArea = None
|
||||
close_btn: QtWidgets.QPushButton = None
|
||||
scroll_bar: QtWidgets.QScrollBar = None
|
||||
|
||||
is_log_visible = False
|
||||
is_scroll_auto = True
|
||||
|
||||
thread_return_code = None
|
||||
q_thread: QtCore.QThread = None
|
||||
|
||||
def __init__(self,
|
||||
window_title: str,
|
||||
splash_icon=None,
|
||||
window_icon=None):
|
||||
"""
|
||||
Args:
|
||||
window_title (str): String which sets the window title
|
||||
splash_icon (str | bytes | None): A resource (pic) which is used
|
||||
for the splash icon
|
||||
window_icon (str | bytes | None: A resource (pic) which is used for
|
||||
the window's icon
|
||||
"""
|
||||
super(SplashScreen, self).__init__()
|
||||
|
||||
if splash_icon is None:
|
||||
splash_icon = resources.get_openpype_icon_filepath()
|
||||
|
||||
if window_icon is None:
|
||||
window_icon = resources.get_openpype_icon_filepath()
|
||||
|
||||
self.splash_icon = splash_icon
|
||||
self.setWindowIcon(QtGui.QIcon(window_icon))
|
||||
self.setWindowTitle(window_title)
|
||||
self.init_ui()
|
||||
|
||||
def was_proc_successful(self) -> bool:
|
||||
if self.thread_return_code == 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
def start_thread(self, q_thread: QtCore.QThread):
|
||||
"""Saves the reference to this thread and starts it.
|
||||
|
||||
Args:
|
||||
q_thread (QtCore.QThread): A QThread containing a given worker
|
||||
(QtCore.QObject)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if not q_thread:
|
||||
raise RuntimeError("Failed to run a worker thread! "
|
||||
"The thread is null!")
|
||||
|
||||
self.q_thread = q_thread
|
||||
self.q_thread.start()
|
||||
|
||||
@QtCore.Slot()
|
||||
def quit_and_close(self):
|
||||
"""Quits the thread and closes the splash screen. Note that this means
|
||||
the thread has exited with the return code 0!
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.thread_return_code = 0
|
||||
self.q_thread.quit()
|
||||
self.close()
|
||||
|
||||
@QtCore.Slot()
|
||||
def toggle_log(self):
|
||||
if self.is_log_visible:
|
||||
self.scroll_area.hide()
|
||||
width = self.width()
|
||||
self.adjustSize()
|
||||
self.resize(width, self.height())
|
||||
else:
|
||||
self.scroll_area.show()
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
self.resize(self.width(), 300)
|
||||
|
||||
self.is_log_visible = not self.is_log_visible
|
||||
|
||||
def show_ui(self):
|
||||
"""Shows the splash screen. BEWARE THAT THIS FUNCTION IS BLOCKING
|
||||
(The execution of code can not proceed further beyond this function
|
||||
until the splash screen is closed!)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.show()
|
||||
self.exec_()
|
||||
|
||||
def init_ui(self):
|
||||
self.resize(450, 100)
|
||||
self.setMinimumWidth(250)
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Top Section
|
||||
self.top_label = QtWidgets.QLabel(self)
|
||||
self.top_label.setText("Starting process ...")
|
||||
self.top_label.setWordWrap(True)
|
||||
|
||||
icon = QtWidgets.QLabel(self)
|
||||
icon.setPixmap(QtGui.QPixmap(self.splash_icon))
|
||||
icon.setFixedHeight(45)
|
||||
icon.setFixedWidth(45)
|
||||
icon.setScaledContents(True)
|
||||
|
||||
self.close_btn = QtWidgets.QPushButton(self)
|
||||
self.close_btn.setText("Quit")
|
||||
self.close_btn.clicked.connect(self.close)
|
||||
self.close_btn.setFixedWidth(80)
|
||||
self.close_btn.hide()
|
||||
|
||||
self.show_log_btn = QtWidgets.QPushButton(self)
|
||||
self.show_log_btn.setText("Show log")
|
||||
self.show_log_btn.setFixedWidth(80)
|
||||
self.show_log_btn.clicked.connect(self.toggle_log)
|
||||
|
||||
button_layout = QtWidgets.QVBoxLayout()
|
||||
button_layout.addWidget(self.show_log_btn)
|
||||
button_layout.addWidget(self.close_btn)
|
||||
|
||||
# Progress Bar
|
||||
self.progress_bar = NiceProgressBar()
|
||||
self.progress_bar.setValue(0)
|
||||
self.progress_bar.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
# Log Content
|
||||
self.scroll_area = QtWidgets.QScrollArea(self)
|
||||
self.scroll_area.hide()
|
||||
log_widget = QtWidgets.QWidget(self.scroll_area)
|
||||
self.scroll_area.setWidgetResizable(True)
|
||||
self.scroll_area.setHorizontalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setVerticalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setWidget(log_widget)
|
||||
|
||||
self.scroll_bar = self.scroll_area.verticalScrollBar()
|
||||
self.scroll_bar.sliderMoved.connect(self.on_scroll)
|
||||
|
||||
self.log_text = QtWidgets.QLabel(self)
|
||||
self.log_text.setText('')
|
||||
self.log_text.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
log_layout = QtWidgets.QVBoxLayout(log_widget)
|
||||
log_layout.addWidget(self.log_text)
|
||||
|
||||
top_layout = QtWidgets.QHBoxLayout()
|
||||
top_layout.setAlignment(QtCore.Qt.AlignTop)
|
||||
top_layout.addWidget(icon)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addWidget(self.top_label)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addLayout(button_layout)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addLayout(top_layout)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.progress_bar)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.scroll_area)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
desktop_rect = QtWidgets.QApplication.desktop().availableGeometry(self)
|
||||
center = desktop_rect.center()
|
||||
self.move(
|
||||
center.x() - (self.width() * 0.5),
|
||||
center.y() - (self.height() * 0.5)
|
||||
)
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def update_progress(self, value: int):
|
||||
self.progress_bar.setValue(value)
|
||||
|
||||
@QtCore.Slot(str)
|
||||
def update_top_label_text(self, text: str):
|
||||
self.top_label.setText(text)
|
||||
|
||||
@QtCore.Slot(str, str)
|
||||
def append_log(self, text: str, end: str = ''):
|
||||
"""A slot used for receiving log info and appending it to scroll area's
|
||||
content.
|
||||
Args:
|
||||
text (str): A log text that will append to the current one in the
|
||||
scroll area.
|
||||
end (str): end string which can be appended to the end of the given
|
||||
line (for ex. a line break).
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.log_text.setText(self.log_text.text() + text + end)
|
||||
if self.is_scroll_auto:
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def on_scroll(self, position: int):
|
||||
"""
|
||||
A slot for the vertical scroll bar's movement. This ensures the
|
||||
auto-scrolling feature of the scroll area when the scroll bar is at its
|
||||
maximum value.
|
||||
|
||||
Args:
|
||||
position (int): Position value of the scroll bar.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if self.scroll_bar.maximum() == position:
|
||||
self.is_scroll_auto = True
|
||||
return
|
||||
|
||||
self.is_scroll_auto = False
|
||||
|
||||
@QtCore.Slot(str, int)
|
||||
def fail(self, text: str, return_code: int = 1):
|
||||
"""
|
||||
A slot used for signals which can emit when a worker (process) has
|
||||
failed. at this moment the splash screen doesn't close by itself.
|
||||
it has to be closed by the user.
|
||||
|
||||
Args:
|
||||
text (str): A text which can be set to the top label.
|
||||
|
||||
Returns:
|
||||
return_code (int): Return code of the thread's code
|
||||
"""
|
||||
self.top_label.setText(text)
|
||||
self.close_btn.show()
|
||||
self.thread_return_code = return_code
|
||||
self.q_thread.exit(return_code)
|
||||
Loading…
Add table
Add a link
Reference in a new issue