mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/AY-1009_Houdini-Local-Rendering
This commit is contained in:
commit
fd18973536
133 changed files with 4590 additions and 2709 deletions
|
|
@ -60,7 +60,7 @@ def main(*subprocess_args):
|
|||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
elif os.environ.get("AVALON_AFTEREFFECTS_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
save = True
|
||||
|
|
|
|||
|
|
@ -8,14 +8,11 @@ from ayon_core.lib import Logger, register_event_callback
|
|||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_workfile_build_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AYON_INSTANCE_ID,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
)
|
||||
from ayon_core.pipeline.load import any_outdated_containers
|
||||
import ayon_core.hosts.aftereffects
|
||||
|
||||
|
|
@ -40,6 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
|||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
|
||||
class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
|
@ -76,6 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
register_event_callback("application.launched", application_launch)
|
||||
|
||||
|
|
@ -118,12 +117,6 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
item["id"] = "publish_context"
|
||||
self.stub.imprint(item["id"], item)
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
# created instances section
|
||||
def list_instances(self):
|
||||
"""List all created instances from current workfile which
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os.path
|
||||
import uuid
|
||||
import shutil
|
||||
from abc import abstractmethod
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
|
|
@ -9,13 +10,9 @@ from ayon_core.tools.workfile_template_build import (
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin
|
||||
PlaceholderItem
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
PLACEHOLDER_ID = "openpype.placeholder"
|
||||
|
|
@ -51,6 +48,10 @@ class AETemplateBuilder(AbstractTemplateBuilder):
|
|||
class AEPlaceholderPlugin(PlaceholderPlugin):
|
||||
"""Contains generic methods for all PlaceholderPlugins."""
|
||||
|
||||
@abstractmethod
|
||||
def _create_placeholder_item(self, item_data: dict) -> PlaceholderItem:
|
||||
pass
|
||||
|
||||
def collect_placeholders(self):
|
||||
"""Collect info from file metadata about created placeholders.
|
||||
|
||||
|
|
@ -63,17 +64,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
|
|||
if item.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
if isinstance(self, AEPlaceholderLoadPlugin):
|
||||
item = LoadPlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
elif isinstance(self, AEPlaceholderCreatePlugin):
|
||||
item = CreatePlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
else:
|
||||
raise NotImplementedError(f"Not implemented for {type(self)}")
|
||||
|
||||
item = self._create_placeholder_item(item)
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
|
|
@ -135,87 +126,6 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
|
|||
stub.imprint(item_id, container_data)
|
||||
|
||||
|
||||
class AEPlaceholderCreatePlugin(AEPlaceholderPlugin, PlaceholderCreateMixin):
|
||||
"""Adds Create placeholder.
|
||||
|
||||
This adds composition and runs Create
|
||||
"""
|
||||
identifier = "aftereffects.create"
|
||||
label = "AfterEffects create"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
stub = get_stub()
|
||||
name = "CREATEPLACEHOLDER"
|
||||
item_id = stub.add_item(name, "COMP")
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Replace 'placeholder' with publishable instance.
|
||||
|
||||
Renames prepared composition name, creates publishable instance, sets
|
||||
frame/duration settings according to DB.
|
||||
"""
|
||||
pre_create_data = {"use_selection": True}
|
||||
item_id, item = self._get_item(placeholder)
|
||||
get_stub().select_items([item_id])
|
||||
self.populate_create_placeholder(placeholder, pre_create_data)
|
||||
|
||||
# apply settings for populated composition
|
||||
item_id, metadata_item = self._get_item(placeholder)
|
||||
set_settings(True, True, [item_id])
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
|
||||
class AEPlaceholderLoadPlugin(AEPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "aftereffects.load"
|
||||
label = "AfterEffects load"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
"""Creates AE's Placeholder item in Project items list.
|
||||
|
||||
Sets dummy resolution/duration/fps settings, will be replaced when
|
||||
populated.
|
||||
"""
|
||||
stub = get_stub()
|
||||
name = "LOADERPLACEHOLDER"
|
||||
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Use Openpype Loader from `placeholder` to create new FootageItems
|
||||
|
||||
New FootageItems are created, files are imported.
|
||||
"""
|
||||
self.populate_load_placeholder(placeholder)
|
||||
errors = placeholder.get_errors()
|
||||
stub = get_stub()
|
||||
if errors:
|
||||
stub.print_msg("\n".join(errors))
|
||||
else:
|
||||
if not placeholder.data["keep_placeholder"]:
|
||||
metadata = stub.get_metadata()
|
||||
for item in metadata:
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
scene_identifier = item.get("uuid")
|
||||
if (scene_identifier and
|
||||
scene_identifier == placeholder.scene_identifier):
|
||||
stub.delete_item(item["members"][0])
|
||||
stub.remove_instance(placeholder.scene_identifier, metadata)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder_item_id, _ = self._get_item(placeholder)
|
||||
item_id = container.id
|
||||
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = AETemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class AERenderInstance(RenderInstance):
|
|||
|
||||
class CollectAERender(publish.AbstractCollectRender):
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.405
|
||||
order = pyblish.api.CollectorOrder + 0.100
|
||||
label = "Collect After Effects Render Layers"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
|
|
@ -145,6 +145,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
instance.deadline = inst.data.get("deadline")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderCreateMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
|
||||
|
||||
|
||||
class AEPlaceholderCreatePlugin(wtb.AEPlaceholderPlugin,
|
||||
PlaceholderCreateMixin):
|
||||
"""Adds Create placeholder.
|
||||
|
||||
This adds composition and runs Create
|
||||
"""
|
||||
identifier = "aftereffects.create"
|
||||
label = "AfterEffects create"
|
||||
|
||||
def _create_placeholder_item(self, item_data) -> CreatePlaceholderItem:
|
||||
return CreatePlaceholderItem(
|
||||
scene_identifier=item_data["uuid"],
|
||||
data=item_data["data"],
|
||||
plugin=self
|
||||
)
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
stub = get_stub()
|
||||
name = "CREATEPLACEHOLDER"
|
||||
item_id = stub.add_item(name, "COMP")
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Replace 'placeholder' with publishable instance.
|
||||
|
||||
Renames prepared composition name, creates publishable instance, sets
|
||||
frame/duration settings according to DB.
|
||||
"""
|
||||
pre_create_data = {"use_selection": True}
|
||||
item_id, item = self._get_item(placeholder)
|
||||
get_stub().select_items([item_id])
|
||||
self.populate_create_placeholder(placeholder, pre_create_data)
|
||||
|
||||
# apply settings for populated composition
|
||||
item_id, metadata_item = self._get_item(placeholder)
|
||||
set_settings(True, True, [item_id])
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
|
||||
|
||||
|
||||
class AEPlaceholderLoadPlugin(wtb.AEPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "aftereffects.load"
|
||||
label = "AfterEffects load"
|
||||
|
||||
def _create_placeholder_item(self, item_data) -> LoadPlaceholderItem:
|
||||
return LoadPlaceholderItem(
|
||||
scene_identifier=item_data["uuid"],
|
||||
data=item_data["data"],
|
||||
plugin=self
|
||||
)
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
"""Creates AE's Placeholder item in Project items list.
|
||||
|
||||
Sets dummy resolution/duration/fps settings, will be replaced when
|
||||
populated.
|
||||
"""
|
||||
stub = get_stub()
|
||||
name = "LOADERPLACEHOLDER"
|
||||
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Use Openpype Loader from `placeholder` to create new FootageItems
|
||||
|
||||
New FootageItems are created, files are imported.
|
||||
"""
|
||||
self.populate_load_placeholder(placeholder)
|
||||
errors = placeholder.get_errors()
|
||||
stub = get_stub()
|
||||
if errors:
|
||||
stub.print_msg("\n".join(errors))
|
||||
else:
|
||||
if not placeholder.data["keep_placeholder"]:
|
||||
metadata = stub.get_metadata()
|
||||
for item in metadata:
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
scene_identifier = item.get("uuid")
|
||||
if (scene_identifier and
|
||||
scene_identifier == placeholder.scene_identifier):
|
||||
stub.delete_item(item["members"][0])
|
||||
stub.remove_instance(placeholder.scene_identifier, metadata)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder_item_id, _ = self._get_item(placeholder)
|
||||
item_id = container.id
|
||||
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
|
||||
|
|
@ -33,7 +33,7 @@ def load_scripts(paths):
|
|||
if register:
|
||||
try:
|
||||
register()
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
else:
|
||||
print("\nWarning! '%s' has no register function, "
|
||||
|
|
@ -45,7 +45,7 @@ def load_scripts(paths):
|
|||
if unregister:
|
||||
try:
|
||||
unregister()
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
|
||||
def test_reload(mod):
|
||||
|
|
@ -57,7 +57,7 @@ def load_scripts(paths):
|
|||
|
||||
try:
|
||||
return importlib.reload(mod)
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
|
||||
def test_register(mod):
|
||||
|
|
|
|||
|
|
@ -143,13 +143,19 @@ def deselect_all():
|
|||
if obj.mode != 'OBJECT':
|
||||
modes.append((obj, obj.mode))
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
context_override = create_blender_context(active=obj)
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
context_override = create_blender_context()
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
for p in modes:
|
||||
bpy.context.view_layer.objects.active = p[0]
|
||||
bpy.ops.object.mode_set(mode=p[1])
|
||||
context_override = create_blender_context(active=p[0])
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.mode_set(mode=p[1])
|
||||
|
||||
bpy.context.view_layer.objects.active = active
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,10 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
def _process(self, libpath, asset_group, group_name):
|
||||
plugin.deselect_all()
|
||||
|
||||
bpy.ops.wm.alembic_import(filepath=libpath)
|
||||
# Force the creation of the transform cache even if the camera
|
||||
# doesn't have an animation. We use the cache to update the camera.
|
||||
bpy.ops.wm.alembic_import(
|
||||
filepath=libpath, always_add_cache_reader=True)
|
||||
|
||||
objects = lib.get_selection()
|
||||
|
||||
|
|
@ -178,12 +181,33 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
for obj in asset_group.children:
|
||||
found = False
|
||||
for constraint in obj.constraints:
|
||||
if constraint.type == "TRANSFORM_CACHE":
|
||||
constraint.cache_file.filepath = libpath.as_posix()
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
# This is to keep compatibility with cameras loaded with
|
||||
# the old loader
|
||||
# Create a new constraint for the cache file
|
||||
constraint = obj.constraints.new("TRANSFORM_CACHE")
|
||||
bpy.ops.cachefile.open(filepath=libpath.as_posix())
|
||||
constraint.cache_file = bpy.data.cache_files[-1]
|
||||
constraint.cache_file.scale = 1.0
|
||||
|
||||
self._remove(asset_group)
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
# This is a workaround to set the object path. Blender doesn't
|
||||
# load the list of object paths until the object is evaluated.
|
||||
# This is a hack to force the object to be evaluated.
|
||||
# The modifier doesn't need to be removed because camera
|
||||
# objects don't have modifiers.
|
||||
obj.modifiers.new(
|
||||
name='MeshSequenceCache', type='MESH_SEQUENCE_CACHE')
|
||||
bpy.context.evaluated_depsgraph_get()
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
constraint.object_path = (
|
||||
constraint.cache_file.object_paths[0].path)
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False):
|
|||
def _on_repair():
|
||||
attributes = dict()
|
||||
for key, comp_key, _label in validations:
|
||||
value = folder_value[key]
|
||||
value = folder_attributes[key]
|
||||
comp_key_full = "Comp.FrameFormat.{}".format(comp_key)
|
||||
attributes[comp_key_full] = value
|
||||
comp.SetPrefs(attributes)
|
||||
|
|
|
|||
|
|
@ -115,6 +115,7 @@ class CollectFusionRender(
|
|||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
instance.deadline = inst.data.get("deadline")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -177,7 +177,10 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
outputFormat=info[1],
|
||||
outputStartFrame=info[3],
|
||||
leadingZeros=info[2],
|
||||
ignoreFrameHandleCheck=True
|
||||
ignoreFrameHandleCheck=True,
|
||||
#todo: inst is not available, must be determined, fix when
|
||||
#reworking to Publisher
|
||||
# deadline=inst.data.get("deadline")
|
||||
|
||||
)
|
||||
render_instance.context = context
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from .lib import (
|
|||
sync_avalon_data_to_workfile,
|
||||
launch_workfiles_app,
|
||||
before_project_save,
|
||||
apply_colorspace_project
|
||||
)
|
||||
from .tags import add_tags_to_workfile
|
||||
from .menu import update_menu_task_label
|
||||
|
|
@ -44,6 +45,8 @@ def afterNewProjectCreated(event):
|
|||
# reset workfiles startup not to open any more in session
|
||||
os.environ["WORKFILES_STARTUP"] = "0"
|
||||
|
||||
apply_colorspace_project()
|
||||
|
||||
|
||||
def beforeProjectLoad(event):
|
||||
log.info("before project load event...")
|
||||
|
|
@ -122,6 +125,7 @@ def register_hiero_events():
|
|||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
def register_events():
|
||||
"""
|
||||
Adding all callbacks.
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ import warnings
|
|||
import json
|
||||
import ast
|
||||
import secrets
|
||||
import shutil
|
||||
import hiero
|
||||
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
|
@ -36,9 +35,6 @@ from .constants import (
|
|||
DEFAULT_SEQUENCE_NAME,
|
||||
DEFAULT_BIN_NAME
|
||||
)
|
||||
from ayon_core.pipeline.colorspace import (
|
||||
get_imageio_config
|
||||
)
|
||||
|
||||
|
||||
class _CTX:
|
||||
|
|
@ -105,9 +101,9 @@ def flatten(list_):
|
|||
|
||||
|
||||
def get_current_project(remove_untitled=False):
|
||||
projects = flatten(hiero.core.projects())
|
||||
projects = hiero.core.projects()
|
||||
if not remove_untitled:
|
||||
return next(iter(projects))
|
||||
return projects[0]
|
||||
|
||||
# if remove_untitled
|
||||
for proj in projects:
|
||||
|
|
@ -1050,18 +1046,68 @@ def _set_hrox_project_knobs(doc, **knobs):
|
|||
|
||||
|
||||
def apply_colorspace_project():
|
||||
project_name = get_current_project_name()
|
||||
# get path the the active projects
|
||||
project = get_current_project(remove_untitled=True)
|
||||
current_file = project.path()
|
||||
|
||||
# close the active project
|
||||
project.close()
|
||||
"""Apply colorspaces from settings.
|
||||
|
||||
Due to not being able to set the project settings through the Python API,
|
||||
we need to do use some dubious code to find the widgets and set them. It is
|
||||
possible to set the project settings without traversing through the widgets
|
||||
but it involves reading the hrox files from disk with XML, so no in-memory
|
||||
support. See https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
|
||||
for more details.
|
||||
"""
|
||||
# get presets for hiero
|
||||
project_name = get_current_project_name()
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
presets = imageio.get("workfile")
|
||||
|
||||
# Open Project Settings UI.
|
||||
for act in hiero.ui.registeredActions():
|
||||
if act.objectName() == "foundry.project.settings":
|
||||
act.trigger()
|
||||
|
||||
# Find widgets from their sibling label.
|
||||
labels = {
|
||||
"Working Space:": "workingSpace",
|
||||
"Viewer:": "viewerLut",
|
||||
"Thumbnails:": "thumbnailLut",
|
||||
"Monitor Out:": "monitorOutLut",
|
||||
"8 Bit Files:": "eightBitLut",
|
||||
"16 Bit Files:": "sixteenBitLut",
|
||||
"Log Files:": "logLut",
|
||||
"Floating Point Files:": "floatLut"
|
||||
}
|
||||
widgets = {x: None for x in labels.values()}
|
||||
|
||||
def _recursive_children(widget, labels, widgets):
|
||||
children = widget.children()
|
||||
for count, child in enumerate(children):
|
||||
if isinstance(child, QtWidgets.QLabel):
|
||||
if child.text() in labels.keys():
|
||||
widgets[labels[child.text()]] = children[count + 1]
|
||||
_recursive_children(child, labels, widgets)
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
title = "Project Settings"
|
||||
for widget in app.topLevelWidgets():
|
||||
if isinstance(widget, QtWidgets.QMainWindow):
|
||||
if widget.windowTitle() != title:
|
||||
continue
|
||||
_recursive_children(widget, labels, widgets)
|
||||
widget.close()
|
||||
|
||||
msg = "Setting value \"{}\" is not a valid option for \"{}\""
|
||||
for key, widget in widgets.items():
|
||||
options = [widget.itemText(i) for i in range(widget.count())]
|
||||
setting_value = presets[key]
|
||||
assert setting_value in options, msg.format(setting_value, key)
|
||||
widget.setCurrentText(presets[key])
|
||||
|
||||
# This code block is for setting up project colorspaces for files on disk.
|
||||
# Due to not having Python API access to set the project settings, the
|
||||
# Foundry recommended way is to modify the hrox files on disk with XML. See
|
||||
# this forum thread for more details;
|
||||
# https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
|
||||
'''
|
||||
# backward compatibility layer
|
||||
# TODO: remove this after some time
|
||||
config_data = get_imageio_config(
|
||||
|
|
@ -1074,6 +1120,13 @@ def apply_colorspace_project():
|
|||
"ocioConfigName": "custom"
|
||||
})
|
||||
|
||||
# get path the the active projects
|
||||
project = get_current_project()
|
||||
current_file = project.path()
|
||||
|
||||
msg = "The project needs to be saved to disk to apply colorspace settings."
|
||||
assert current_file, msg
|
||||
|
||||
# save the workfile as subversion "comment:_colorspaceChange"
|
||||
split_current_file = os.path.splitext(current_file)
|
||||
copy_current_file = current_file
|
||||
|
|
@ -1116,6 +1169,7 @@ def apply_colorspace_project():
|
|||
|
||||
# open the file as current project
|
||||
hiero.core.openProject(copy_current_file)
|
||||
'''
|
||||
|
||||
|
||||
def apply_colorspace_clips():
|
||||
|
|
@ -1125,10 +1179,8 @@ def apply_colorspace_clips():
|
|||
|
||||
# get presets for hiero
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
from pprint import pprint
|
||||
|
||||
presets = imageio.get("regexInputs", {}).get("inputs", {})
|
||||
pprint(presets)
|
||||
for clip in clips:
|
||||
clip_media_source_path = clip.mediaSource().firstpath()
|
||||
clip_name = clip.name()
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ def add_tags_to_workfile():
|
|||
# Get project task types.
|
||||
project_name = get_current_project_name()
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
task_types = project_entity["taskType"]
|
||||
task_types = project_entity["taskTypes"]
|
||||
nks_pres_tags["[Tasks]"] = {}
|
||||
log.debug("__ tasks: {}".format(task_types))
|
||||
for task_type in task_types:
|
||||
|
|
|
|||
|
|
@ -51,13 +51,12 @@ def open_file(filepath):
|
|||
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
# open project file
|
||||
hiero.core.openProject(filepath.replace(os.path.sep, "/"))
|
||||
|
||||
# close previous project
|
||||
project.close()
|
||||
|
||||
|
||||
# Close previous project if its different to the current project.
|
||||
filepath = filepath.replace(os.path.sep, "/")
|
||||
if project.path().replace(os.path.sep, "/") != filepath:
|
||||
# open project file
|
||||
hiero.core.openProject(filepath)
|
||||
project.close()
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -24,8 +24,14 @@ class SetDefaultDisplayView(PreLaunchHook):
|
|||
if not OCIO:
|
||||
return
|
||||
|
||||
# workfile settings added in '0.2.13'
|
||||
houdini_color_settings = \
|
||||
self.data["project_settings"]["houdini"]["imageio"]["workfile"]
|
||||
self.data["project_settings"]["houdini"]["imageio"].get("workfile")
|
||||
|
||||
if not houdini_color_settings:
|
||||
self.log.info("Hook 'SetDefaultDisplayView' requires Houdini "
|
||||
"addon version >= '0.2.13'")
|
||||
return
|
||||
|
||||
if not houdini_color_settings["enabled"]:
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -18,8 +18,11 @@ class CreateReview(plugin.HoudiniCreator):
|
|||
|
||||
def apply_settings(self, project_settings):
|
||||
super(CreateReview, self).apply_settings(project_settings)
|
||||
color_settings = project_settings["houdini"]["imageio"]["workfile"]
|
||||
if color_settings["enabled"]:
|
||||
# workfile settings added in '0.2.13'
|
||||
color_settings = project_settings["houdini"]["imageio"].get(
|
||||
"workfile", {}
|
||||
)
|
||||
if color_settings.get("enabled"):
|
||||
self.review_color_space = color_settings.get("review_color_space")
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ from ayon_core.hosts.houdini.api import lib
|
|||
class CollectDataforCache(pyblish.api.InstancePlugin):
|
||||
"""Collect data for caching to Deadline."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
# Run after Collect Frames
|
||||
order = pyblish.api.CollectorOrder + 0.11
|
||||
families = ["ass", "pointcache",
|
||||
"mantraifd", "redshiftproxy",
|
||||
"vdbcache"]
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
label = "Collect Frames"
|
||||
families = ["vdbcache", "imagesequence", "ass",
|
||||
"mantraifd", "redshiftproxy", "review",
|
||||
"bgeo"]
|
||||
"pointcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -28,10 +28,15 @@ class ExtractAlembic(publish.Extractor):
|
|||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
if instance.data.get("frames"):
|
||||
# list of files
|
||||
files = instance.data["frames"]
|
||||
else:
|
||||
# single file
|
||||
files = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (files,
|
||||
staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
|
@ -42,7 +47,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': file_name,
|
||||
'files': files,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import hou
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
|
|
@ -26,28 +25,21 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
("Output node(s) `{}` are incorrect. "
|
||||
"See plug-in log for details.").format(invalid),
|
||||
title=self.label
|
||||
"Output node '{}' is incorrect. "
|
||||
"See plug-in log for details.".format(invalid),
|
||||
title=self.label,
|
||||
description=(
|
||||
"### Invalid COP output node\n\n"
|
||||
"The output node path for the instance must be set to a "
|
||||
"valid COP node path.\n\nSee the log for more details."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
output_node = instance.data.get("output_node")
|
||||
|
||||
import hou
|
||||
|
||||
try:
|
||||
output_node = instance.data["output_node"]
|
||||
except KeyError:
|
||||
six.reraise(
|
||||
PublishValidationError,
|
||||
PublishValidationError(
|
||||
"Can't determine COP output node.",
|
||||
title=cls.__name__),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
if output_node is None:
|
||||
if not output_node:
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
|
|
@ -61,8 +53,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
cls.log.error(
|
||||
"Output node %s is not a COP node. "
|
||||
"COP Path must point to a COP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
"instead found category type: %s",
|
||||
output_node.path(), output_node.type().category().name()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
|
|
@ -70,9 +62,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
# is Cop2 to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
if output_node.type().category().name() != "Cop2":
|
||||
raise PublishValidationError(
|
||||
(
|
||||
"Output node {} is not of category Cop2."
|
||||
" This is a bug..."
|
||||
).format(output_node.path()),
|
||||
title=cls.label)
|
||||
cls.log.error(
|
||||
"Output node %s is not of category Cop2.", output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -45,9 +45,12 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin,
|
|||
category="houdini")
|
||||
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
|
||||
|
||||
# workfile settings added in '0.2.13'
|
||||
color_settings = project_settings["houdini"]["imageio"].get(
|
||||
"workfile", {}
|
||||
)
|
||||
# Add review color settings
|
||||
color_settings = project_settings["houdini"]["imageio"]["workfile"]
|
||||
if color_settings["enabled"]:
|
||||
if color_settings.get("enabled"):
|
||||
cls.review_color_space = color_settings.get("review_color_space")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,12 +6,9 @@ import json
|
|||
from typing import Any, Dict, Union
|
||||
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
get_current_folder_path,
|
||||
get_current_task_name,
|
||||
colorspace
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
|
@ -496,9 +493,9 @@ def object_transform_set(container_children):
|
|||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node.name}.transform"
|
||||
name = f"{node}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node.name}.scale"
|
||||
name = f"{node}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
||||
|
|
@ -519,6 +516,36 @@ def get_plugins() -> list:
|
|||
return plugin_info_list
|
||||
|
||||
|
||||
def update_modifier_node_names(event, node):
|
||||
"""Update the name of the nodes after renaming
|
||||
|
||||
Args:
|
||||
event (pymxs.MXSWrapperBase): Event Name (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
node (list): Event Number (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
|
||||
"""
|
||||
containers = [
|
||||
obj
|
||||
for obj in rt.Objects
|
||||
if (
|
||||
rt.ClassOf(obj) == rt.Container
|
||||
and rt.getUserProp(obj, "id") == "pyblish.avalon.instance"
|
||||
and rt.getUserProp(obj, "productType") not in {
|
||||
"workfile", "tyflow"
|
||||
}
|
||||
)
|
||||
]
|
||||
if not containers:
|
||||
return
|
||||
for container in containers:
|
||||
ayon_data = container.modifiers[0].openPypeData
|
||||
updated_node_names = [str(node.node) for node
|
||||
in ayon_data.all_handles]
|
||||
rt.setProperty(ayon_data, "sel_list", updated_node_names)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def render_resolution(width, height):
|
||||
"""Set render resolution option during context
|
||||
|
|
|
|||
|
|
@ -63,6 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
rt.callbacks.addScript(rt.Name('postWorkspaceChange'),
|
||||
self._deferred_menu_creation)
|
||||
rt.NodeEventCallback(
|
||||
nameChanged=lib.update_modifier_node_names)
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return rt.getSaveRequired()
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_container.append(rt.getNodeByName(max_obj.name))
|
||||
max_container.append(max_obj)
|
||||
return containerise(
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
|
@ -158,11 +158,11 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_objects.append(max_obj)
|
||||
max_transform = f"{max_obj.name}.transform"
|
||||
max_transform = f"{max_obj}.transform"
|
||||
if max_transform in transform_data.keys():
|
||||
max_obj.pos = transform_data[max_transform] or 0
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"] or 0
|
||||
f"{max_obj}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, max_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
|
|
|
|||
|
|
@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
for fbx_object in current_fbx_objects:
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object.name}.transform"
|
||||
fbx_transform = f"{fbx_object}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"] or 0
|
||||
f"{fbx_object}.scale"] or 0
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin):
|
|||
selections = rt.GetCurrentSelection()
|
||||
for selection in selections:
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
selection_transform = f"{selection.name}.transform"
|
||||
selection_transform = f"{selection}.transform"
|
||||
if selection_transform in transform_data.keys():
|
||||
selection.pos = transform_data[selection_transform] or 0
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"] or 0
|
||||
f"{selection}.scale"] or 0
|
||||
update_custom_attribute_data(node, selections)
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
for children in asset.Children:
|
||||
children.name = f"{namespace}:{children.name}"
|
||||
usd_objects.append(children)
|
||||
children_transform = f"{children.name}.transform"
|
||||
children_transform = f"{children}.transform"
|
||||
if children_transform in transform_data.keys():
|
||||
children.pos = transform_data[children_transform] or 0
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"] or 0
|
||||
f"{children}.scale"] or 0
|
||||
|
||||
asset.name = f"{namespace}:{asset.name}"
|
||||
usd_objects.append(asset)
|
||||
|
|
|
|||
|
|
@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin):
|
|||
abc.Parent = container
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
ox_abc_objects.append(abc)
|
||||
ox_transform = f"{abc.name}.transform"
|
||||
ox_transform = f"{abc}.transform"
|
||||
if ox_transform in transform_data.keys():
|
||||
abc.pos = transform_data[ox_transform] or 0
|
||||
abc.scale = transform_data[f"{abc.name}.scale"] or 0
|
||||
abc.scale = transform_data[f"{abc}.scale"] or 0
|
||||
update_custom_attribute_data(node, ox_abc_objects)
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
|
|
|
|||
|
|
@ -12,4 +12,4 @@
|
|||
max create mode
|
||||
|
||||
python.ExecuteFile startup
|
||||
)
|
||||
)
|
||||
|
|
|
|||
350
client/ayon_core/hosts/maya/api/alembic.py
Normal file
350
client/ayon_core/hosts/maya/api/alembic.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from maya import cmds # noqa
|
||||
|
||||
from ayon_core.hosts.maya.api.lib import evaluation
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# The maya alembic export types
|
||||
ALEMBIC_ARGS = {
|
||||
"attr": (list, tuple),
|
||||
"attrPrefix": (list, tuple),
|
||||
"autoSubd": bool,
|
||||
"dataFormat": str,
|
||||
"endFrame": float,
|
||||
"eulerFilter": bool,
|
||||
"frameRange": str, # "start end"; overrides startFrame & endFrame
|
||||
"frameRelativeSample": float,
|
||||
"melPerFrameCallback": str,
|
||||
"melPostJobCallback": str,
|
||||
"noNormals": bool,
|
||||
"preRoll": bool,
|
||||
"pythonPerFrameCallback": str,
|
||||
"pythonPostJobCallback": str,
|
||||
"renderableOnly": bool,
|
||||
"root": (list, tuple),
|
||||
"selection": bool,
|
||||
"startFrame": float,
|
||||
"step": float,
|
||||
"stripNamespaces": bool,
|
||||
"userAttr": (list, tuple),
|
||||
"userAttrPrefix": (list, tuple),
|
||||
"uvWrite": bool,
|
||||
"uvsOnly": bool,
|
||||
"verbose": bool,
|
||||
"wholeFrameGeo": bool,
|
||||
"worldSpace": bool,
|
||||
"writeColorSets": bool,
|
||||
"writeCreases": bool, # Maya 2015 Ext1+
|
||||
"writeFaceSets": bool,
|
||||
"writeUVSets": bool, # Maya 2017+
|
||||
"writeVisibility": bool,
|
||||
}
|
||||
|
||||
|
||||
def extract_alembic(
|
||||
file,
|
||||
attr=None,
|
||||
attrPrefix=None,
|
||||
dataFormat="ogawa",
|
||||
endFrame=None,
|
||||
eulerFilter=True,
|
||||
frameRange="",
|
||||
melPerFrameCallback=None,
|
||||
melPostJobCallback=None,
|
||||
noNormals=False,
|
||||
preRoll=False,
|
||||
preRollStartFrame=0,
|
||||
pythonPerFrameCallback=None,
|
||||
pythonPostJobCallback=None,
|
||||
renderableOnly=False,
|
||||
root=None,
|
||||
selection=True,
|
||||
startFrame=None,
|
||||
step=1.0,
|
||||
stripNamespaces=True,
|
||||
userAttr=None,
|
||||
userAttrPrefix=None,
|
||||
uvsOnly=False,
|
||||
uvWrite=True,
|
||||
verbose=False,
|
||||
wholeFrameGeo=False,
|
||||
worldSpace=False,
|
||||
writeColorSets=False,
|
||||
writeCreases=False,
|
||||
writeFaceSets=False,
|
||||
writeUVSets=False,
|
||||
writeVisibility=False
|
||||
):
|
||||
"""Extract a single Alembic Cache.
|
||||
|
||||
This extracts an Alembic cache using the `-selection` flag to minimize
|
||||
the extracted content to solely what was Collected into the instance.
|
||||
|
||||
Arguments:
|
||||
file (str): The filepath to write the alembic file to.
|
||||
|
||||
attr (list of str, optional): A specific geometric attribute to write
|
||||
out. Defaults to [].
|
||||
|
||||
attrPrefix (list of str, optional): Prefix filter for determining which
|
||||
geometric attributes to write out. Defaults to ["ABC_"].
|
||||
|
||||
dataFormat (str): The data format to use for the cache,
|
||||
defaults to "ogawa"
|
||||
|
||||
endFrame (float): End frame of output. Ignored if `frameRange`
|
||||
provided.
|
||||
|
||||
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
|
||||
an Euler filter. Euler filtering helps resolve irregularities in
|
||||
rotations especially if X, Y, and Z rotations exceed 360 degrees.
|
||||
Defaults to True.
|
||||
|
||||
frameRange (tuple or str): Two-tuple with start and end frame or a
|
||||
string formatted as: "startFrame endFrame". This argument
|
||||
overrides `startFrame` and `endFrame` arguments.
|
||||
|
||||
melPerFrameCallback (Optional[str]): MEL callback run per frame.
|
||||
|
||||
melPostJobCallback (Optional[str]): MEL callback after last frame is
|
||||
written.
|
||||
|
||||
noNormals (bool): When on, normal data from the original polygon
|
||||
objects is not included in the exported Alembic cache file.
|
||||
|
||||
preRoll (bool): This frame range will not be sampled.
|
||||
Defaults to False.
|
||||
|
||||
preRollStartFrame (float): The frame to start scene
|
||||
evaluation at. This is used to set the starting frame for time
|
||||
dependent translations and can be used to evaluate run-up that
|
||||
isn't actually translated. Defaults to 0.
|
||||
|
||||
pythonPerFrameCallback (Optional[str]): Python callback run per frame.
|
||||
|
||||
pythonPostJobCallback (Optional[str]): Python callback after last frame
|
||||
is written.
|
||||
|
||||
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
|
||||
such as hidden objects, are not included in the Alembic file.
|
||||
Defaults to False.
|
||||
|
||||
root (list of str): Maya dag path which will be parented to
|
||||
the root of the Alembic file. Defaults to [], which means the
|
||||
entire scene will be written out.
|
||||
|
||||
selection (bool): Write out all all selected nodes from the
|
||||
active selection list that are descendents of the roots specified
|
||||
with -root. Defaults to False.
|
||||
|
||||
startFrame (float): Start frame of output. Ignored if `frameRange`
|
||||
provided.
|
||||
|
||||
step (float): The time interval (expressed in frames) at
|
||||
which the frame range is sampled. Additional samples around each
|
||||
frame can be specified with -frs. Defaults to 1.0.
|
||||
|
||||
stripNamespaces (bool): When on, any namespaces associated with the
|
||||
exported objects are removed from the Alembic file. For example, an
|
||||
object with the namespace taco:foo:bar appears as bar in the
|
||||
Alembic file.
|
||||
|
||||
userAttr (list of str, optional): A specific user defined attribute to
|
||||
write out. Defaults to [].
|
||||
|
||||
userAttrPrefix (list of str, optional): Prefix filter for determining
|
||||
which user defined attributes to write out. Defaults to [].
|
||||
|
||||
uvsOnly (bool): When on, only uv data for PolyMesh and SubD shapes
|
||||
will be written to the Alembic file.
|
||||
|
||||
uvWrite (bool): When on, UV data from polygon meshes and subdivision
|
||||
objects are written to the Alembic file. Only the current UV map is
|
||||
included.
|
||||
|
||||
verbose (bool): When on, outputs frame number information to the
|
||||
Script Editor or output window during extraction.
|
||||
|
||||
wholeFrameGeo (bool): Data for geometry will only be written
|
||||
out on whole frames. Defaults to False.
|
||||
|
||||
worldSpace (bool): When on, the top node in the node hierarchy is
|
||||
stored as world space. By default, these nodes are stored as local
|
||||
space. Defaults to False.
|
||||
|
||||
writeColorSets (bool): Write all color sets on MFnMeshes as
|
||||
color 3 or color 4 indexed geometry parameters with face varying
|
||||
scope. Defaults to False.
|
||||
|
||||
writeCreases (bool): If the mesh has crease edges or crease
|
||||
vertices, the mesh (OPolyMesh) would now be written out as an OSubD
|
||||
and crease info will be stored in the Alembic file. Otherwise,
|
||||
creases info won't be preserved in Alembic file unless a custom
|
||||
Boolean attribute SubDivisionMesh has been added to mesh node and
|
||||
its value is true. Defaults to False.
|
||||
|
||||
writeFaceSets (bool): Write all Face sets on MFnMeshes.
|
||||
Defaults to False.
|
||||
|
||||
writeUVSets (bool): Write all uv sets on MFnMeshes as vector
|
||||
2 indexed geometry parameters with face varying scope. Defaults to
|
||||
False.
|
||||
|
||||
writeVisibility (bool): Visibility state will be stored in
|
||||
the Alembic file. Otherwise everything written out is treated as
|
||||
visible. Defaults to False.
|
||||
"""
|
||||
|
||||
# Ensure alembic exporter is loaded
|
||||
cmds.loadPlugin('AbcExport', quiet=True)
|
||||
|
||||
# Alembic Exporter requires forward slashes
|
||||
file = file.replace('\\', '/')
|
||||
|
||||
# Ensure list arguments are valid.
|
||||
attr = attr or []
|
||||
attrPrefix = attrPrefix or []
|
||||
userAttr = userAttr or []
|
||||
userAttrPrefix = userAttrPrefix or []
|
||||
root = root or []
|
||||
|
||||
# Pass the start and end frame on as `frameRange` so that it
|
||||
# never conflicts with that argument
|
||||
if not frameRange:
|
||||
# Fallback to maya timeline if no start or end frame provided.
|
||||
if startFrame is None:
|
||||
startFrame = cmds.playbackOptions(query=True,
|
||||
animationStartTime=True)
|
||||
if endFrame is None:
|
||||
endFrame = cmds.playbackOptions(query=True,
|
||||
animationEndTime=True)
|
||||
|
||||
# Ensure valid types are converted to frame range
|
||||
assert isinstance(startFrame, ALEMBIC_ARGS["startFrame"])
|
||||
assert isinstance(endFrame, ALEMBIC_ARGS["endFrame"])
|
||||
frameRange = "{0} {1}".format(startFrame, endFrame)
|
||||
else:
|
||||
# Allow conversion from tuple for `frameRange`
|
||||
if isinstance(frameRange, (list, tuple)):
|
||||
assert len(frameRange) == 2
|
||||
frameRange = "{0} {1}".format(frameRange[0], frameRange[1])
|
||||
|
||||
# Assemble options
|
||||
options = {
|
||||
"selection": selection,
|
||||
"frameRange": frameRange,
|
||||
"eulerFilter": eulerFilter,
|
||||
"noNormals": noNormals,
|
||||
"preRoll": preRoll,
|
||||
"root": root,
|
||||
"renderableOnly": renderableOnly,
|
||||
"uvWrite": uvWrite,
|
||||
"uvsOnly": uvsOnly,
|
||||
"writeColorSets": writeColorSets,
|
||||
"writeFaceSets": writeFaceSets,
|
||||
"wholeFrameGeo": wholeFrameGeo,
|
||||
"worldSpace": worldSpace,
|
||||
"writeVisibility": writeVisibility,
|
||||
"writeUVSets": writeUVSets,
|
||||
"writeCreases": writeCreases,
|
||||
"dataFormat": dataFormat,
|
||||
"step": step,
|
||||
"attr": attr,
|
||||
"attrPrefix": attrPrefix,
|
||||
"userAttr": userAttr,
|
||||
"userAttrPrefix": userAttrPrefix,
|
||||
"stripNamespaces": stripNamespaces,
|
||||
"verbose": verbose
|
||||
}
|
||||
|
||||
# Validate options
|
||||
for key, value in options.copy().items():
|
||||
|
||||
# Discard unknown options
|
||||
if key not in ALEMBIC_ARGS:
|
||||
log.warning("extract_alembic() does not support option '%s'. "
|
||||
"Flag will be ignored..", key)
|
||||
options.pop(key)
|
||||
continue
|
||||
|
||||
# Validate value type
|
||||
valid_types = ALEMBIC_ARGS[key]
|
||||
if not isinstance(value, valid_types):
|
||||
raise TypeError("Alembic option unsupported type: "
|
||||
"{0} (expected {1})".format(value, valid_types))
|
||||
|
||||
# Ignore empty values, like an empty string, since they mess up how
|
||||
# job arguments are built
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = [x for x in value if x.strip()]
|
||||
|
||||
# Ignore option completely if no values remaining
|
||||
if not value:
|
||||
options.pop(key)
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
|
||||
maya_version = int(cmds.about(version=True))
|
||||
if maya_version >= 2018:
|
||||
options['autoSubd'] = options.pop('writeCreases', False)
|
||||
|
||||
# Only add callbacks if they are set so that we're not passing `None`
|
||||
callbacks = {
|
||||
"melPerFrameCallback": melPerFrameCallback,
|
||||
"melPostJobCallback": melPostJobCallback,
|
||||
"pythonPerFrameCallback": pythonPerFrameCallback,
|
||||
"pythonPostJobCallback": pythonPostJobCallback,
|
||||
}
|
||||
for key, callback in callbacks.items():
|
||||
if callback:
|
||||
options[key] = str(callback)
|
||||
|
||||
# Format the job string from options
|
||||
job_args = list()
|
||||
for key, value in options.items():
|
||||
if isinstance(value, (list, tuple)):
|
||||
for entry in value:
|
||||
job_args.append("-{} {}".format(key, entry))
|
||||
elif isinstance(value, bool):
|
||||
# Add only when state is set to True
|
||||
if value:
|
||||
job_args.append("-{0}".format(key))
|
||||
else:
|
||||
job_args.append("-{0} {1}".format(key, value))
|
||||
|
||||
job_str = " ".join(job_args)
|
||||
job_str += ' -file "%s"' % file
|
||||
|
||||
# Ensure output directory exists
|
||||
parent_dir = os.path.dirname(file)
|
||||
if not os.path.exists(parent_dir):
|
||||
os.makedirs(parent_dir)
|
||||
|
||||
if verbose:
|
||||
log.debug("Preparing Alembic export with options: %s",
|
||||
json.dumps(options, indent=4))
|
||||
log.debug("Extracting Alembic with job arguments: %s", job_str)
|
||||
|
||||
# Perform extraction
|
||||
print("Alembic Job Arguments : {}".format(job_str))
|
||||
|
||||
# Disable the parallel evaluation temporarily to ensure no buggy
|
||||
# exports are made. (PLN-31)
|
||||
# TODO: Make sure this actually fixes the issues
|
||||
with evaluation("off"):
|
||||
cmds.AbcExport(
|
||||
j=job_str,
|
||||
verbose=verbose,
|
||||
preRollStartFrame=preRollStartFrame
|
||||
)
|
||||
|
||||
if verbose:
|
||||
log.debug("Extracted Alembic to: %s", file)
|
||||
|
||||
return file
|
||||
|
|
@ -70,37 +70,6 @@ DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
|
|||
0.0, 0.0, 1.0, 0.0,
|
||||
0.0, 0.0, 0.0, 1.0]
|
||||
|
||||
# The maya alembic export types
|
||||
_alembic_options = {
|
||||
"startFrame": float,
|
||||
"endFrame": float,
|
||||
"frameRange": str, # "start end"; overrides startFrame & endFrame
|
||||
"eulerFilter": bool,
|
||||
"frameRelativeSample": float,
|
||||
"noNormals": bool,
|
||||
"renderableOnly": bool,
|
||||
"step": float,
|
||||
"stripNamespaces": bool,
|
||||
"uvWrite": bool,
|
||||
"wholeFrameGeo": bool,
|
||||
"worldSpace": bool,
|
||||
"writeVisibility": bool,
|
||||
"writeColorSets": bool,
|
||||
"writeFaceSets": bool,
|
||||
"writeCreases": bool, # Maya 2015 Ext1+
|
||||
"writeUVSets": bool, # Maya 2017+
|
||||
"dataFormat": str,
|
||||
"root": (list, tuple),
|
||||
"attr": (list, tuple),
|
||||
"attrPrefix": (list, tuple),
|
||||
"userAttr": (list, tuple),
|
||||
"melPerFrameCallback": str,
|
||||
"melPostJobCallback": str,
|
||||
"pythonPerFrameCallback": str,
|
||||
"pythonPostJobCallback": str,
|
||||
"selection": bool
|
||||
}
|
||||
|
||||
INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
|
||||
FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94}
|
||||
|
||||
|
|
@ -1330,7 +1299,7 @@ def is_visible(node,
|
|||
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
||||
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(
|
||||
node))
|
||||
if override_enabled and override_visibility:
|
||||
if override_enabled and not override_visibility:
|
||||
return False
|
||||
|
||||
if parentHidden:
|
||||
|
|
@ -1346,178 +1315,6 @@ def is_visible(node,
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def extract_alembic(file,
|
||||
startFrame=None,
|
||||
endFrame=None,
|
||||
selection=True,
|
||||
uvWrite=True,
|
||||
eulerFilter=True,
|
||||
dataFormat="ogawa",
|
||||
verbose=False,
|
||||
**kwargs):
|
||||
"""Extract a single Alembic Cache.
|
||||
|
||||
This extracts an Alembic cache using the `-selection` flag to minimize
|
||||
the extracted content to solely what was Collected into the instance.
|
||||
|
||||
Arguments:
|
||||
|
||||
startFrame (float): Start frame of output. Ignored if `frameRange`
|
||||
provided.
|
||||
|
||||
endFrame (float): End frame of output. Ignored if `frameRange`
|
||||
provided.
|
||||
|
||||
frameRange (tuple or str): Two-tuple with start and end frame or a
|
||||
string formatted as: "startFrame endFrame". This argument
|
||||
overrides `startFrame` and `endFrame` arguments.
|
||||
|
||||
dataFormat (str): The data format to use for the cache,
|
||||
defaults to "ogawa"
|
||||
|
||||
verbose (bool): When on, outputs frame number information to the
|
||||
Script Editor or output window during extraction.
|
||||
|
||||
noNormals (bool): When on, normal data from the original polygon
|
||||
objects is not included in the exported Alembic cache file.
|
||||
|
||||
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
|
||||
such as hidden objects, are not included in the Alembic file.
|
||||
Defaults to False.
|
||||
|
||||
stripNamespaces (bool): When on, any namespaces associated with the
|
||||
exported objects are removed from the Alembic file. For example, an
|
||||
object with the namespace taco:foo:bar appears as bar in the
|
||||
Alembic file.
|
||||
|
||||
uvWrite (bool): When on, UV data from polygon meshes and subdivision
|
||||
objects are written to the Alembic file. Only the current UV map is
|
||||
included.
|
||||
|
||||
worldSpace (bool): When on, the top node in the node hierarchy is
|
||||
stored as world space. By default, these nodes are stored as local
|
||||
space. Defaults to False.
|
||||
|
||||
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
|
||||
an Euler filter. Euler filtering helps resolve irregularities in
|
||||
rotations especially if X, Y, and Z rotations exceed 360 degrees.
|
||||
Defaults to True.
|
||||
|
||||
"""
|
||||
|
||||
# Ensure alembic exporter is loaded
|
||||
cmds.loadPlugin('AbcExport', quiet=True)
|
||||
|
||||
# Alembic Exporter requires forward slashes
|
||||
file = file.replace('\\', '/')
|
||||
|
||||
# Pass the start and end frame on as `frameRange` so that it
|
||||
# never conflicts with that argument
|
||||
if "frameRange" not in kwargs:
|
||||
# Fallback to maya timeline if no start or end frame provided.
|
||||
if startFrame is None:
|
||||
startFrame = cmds.playbackOptions(query=True,
|
||||
animationStartTime=True)
|
||||
if endFrame is None:
|
||||
endFrame = cmds.playbackOptions(query=True,
|
||||
animationEndTime=True)
|
||||
|
||||
# Ensure valid types are converted to frame range
|
||||
assert isinstance(startFrame, _alembic_options["startFrame"])
|
||||
assert isinstance(endFrame, _alembic_options["endFrame"])
|
||||
kwargs["frameRange"] = "{0} {1}".format(startFrame, endFrame)
|
||||
else:
|
||||
# Allow conversion from tuple for `frameRange`
|
||||
frame_range = kwargs["frameRange"]
|
||||
if isinstance(frame_range, (list, tuple)):
|
||||
assert len(frame_range) == 2
|
||||
kwargs["frameRange"] = "{0} {1}".format(frame_range[0],
|
||||
frame_range[1])
|
||||
|
||||
# Assemble options
|
||||
options = {
|
||||
"selection": selection,
|
||||
"uvWrite": uvWrite,
|
||||
"eulerFilter": eulerFilter,
|
||||
"dataFormat": dataFormat
|
||||
}
|
||||
options.update(kwargs)
|
||||
|
||||
# Validate options
|
||||
for key, value in options.copy().items():
|
||||
|
||||
# Discard unknown options
|
||||
if key not in _alembic_options:
|
||||
log.warning("extract_alembic() does not support option '%s'. "
|
||||
"Flag will be ignored..", key)
|
||||
options.pop(key)
|
||||
continue
|
||||
|
||||
# Validate value type
|
||||
valid_types = _alembic_options[key]
|
||||
if not isinstance(value, valid_types):
|
||||
raise TypeError("Alembic option unsupported type: "
|
||||
"{0} (expected {1})".format(value, valid_types))
|
||||
|
||||
# Ignore empty values, like an empty string, since they mess up how
|
||||
# job arguments are built
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = [x for x in value if x.strip()]
|
||||
|
||||
# Ignore option completely if no values remaining
|
||||
if not value:
|
||||
options.pop(key)
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
|
||||
maya_version = int(cmds.about(version=True))
|
||||
if maya_version >= 2018:
|
||||
options['autoSubd'] = options.pop('writeCreases', False)
|
||||
|
||||
# Format the job string from options
|
||||
job_args = list()
|
||||
for key, value in options.items():
|
||||
if isinstance(value, (list, tuple)):
|
||||
for entry in value:
|
||||
job_args.append("-{} {}".format(key, entry))
|
||||
elif isinstance(value, bool):
|
||||
# Add only when state is set to True
|
||||
if value:
|
||||
job_args.append("-{0}".format(key))
|
||||
else:
|
||||
job_args.append("-{0} {1}".format(key, value))
|
||||
|
||||
job_str = " ".join(job_args)
|
||||
job_str += ' -file "%s"' % file
|
||||
|
||||
# Ensure output directory exists
|
||||
parent_dir = os.path.dirname(file)
|
||||
if not os.path.exists(parent_dir):
|
||||
os.makedirs(parent_dir)
|
||||
|
||||
if verbose:
|
||||
log.debug("Preparing Alembic export with options: %s",
|
||||
json.dumps(options, indent=4))
|
||||
log.debug("Extracting Alembic with job arguments: %s", job_str)
|
||||
|
||||
# Perform extraction
|
||||
print("Alembic Job Arguments : {}".format(job_str))
|
||||
|
||||
# Disable the parallel evaluation temporarily to ensure no buggy
|
||||
# exports are made. (PLN-31)
|
||||
# TODO: Make sure this actually fixes the issues
|
||||
with evaluation("off"):
|
||||
cmds.AbcExport(j=job_str, verbose=verbose)
|
||||
|
||||
if verbose:
|
||||
log.debug("Extracted Alembic to: %s", file)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
# region ID
|
||||
def get_id_required_nodes(referenced_nodes=False,
|
||||
nodes=None,
|
||||
|
|
@ -2520,7 +2317,16 @@ def set_scene_fps(fps, update=True):
|
|||
"""
|
||||
|
||||
fps_mapping = {
|
||||
'2': '2fps',
|
||||
'3': '3fps',
|
||||
'4': '4fps',
|
||||
'5': '5fps',
|
||||
'6': '6fps',
|
||||
'8': '8fps',
|
||||
'10': '10fps',
|
||||
'12': '12fps',
|
||||
'15': 'game',
|
||||
'16': '16fps',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
|
|
@ -2612,21 +2418,24 @@ def get_fps_for_current_context():
|
|||
Returns:
|
||||
Union[int, float]: FPS value.
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"attrib.fps"}
|
||||
) or {}
|
||||
fps = folder_entity.get("attrib", {}).get("fps")
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
fps = task_entity.get("attrib", {}).get("fps")
|
||||
if not fps:
|
||||
project_entity = ayon_api.get_project(
|
||||
project_name, fields=["attrib.fps"]
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"attrib.fps"}
|
||||
) or {}
|
||||
fps = project_entity.get("attrib", {}).get("fps")
|
||||
|
||||
fps = folder_entity.get("attrib", {}).get("fps")
|
||||
if not fps:
|
||||
fps = 25
|
||||
project_entity = ayon_api.get_project(
|
||||
project_name, fields=["attrib.fps"]
|
||||
) or {}
|
||||
fps = project_entity.get("attrib", {}).get("fps")
|
||||
|
||||
if not fps:
|
||||
fps = 25
|
||||
|
||||
return convert_to_maya_fps(fps)
|
||||
|
||||
|
|
@ -4403,3 +4212,23 @@ def create_rig_animation_instance(
|
|||
variant=namespace,
|
||||
pre_create_data={"use_selection": True}
|
||||
)
|
||||
|
||||
|
||||
def get_node_index_under_parent(node: str) -> int:
|
||||
"""Return the index of a DAG node under its parent.
|
||||
|
||||
Arguments:
|
||||
node (str): A DAG Node path.
|
||||
|
||||
Returns:
|
||||
int: The DAG node's index under its parents or world
|
||||
|
||||
"""
|
||||
node = cmds.ls(node, long=True)[0] # enforce long names
|
||||
parent = node.rsplit("|", 1)[0]
|
||||
if not parent:
|
||||
return cmds.ls(assemblies=True, long=True).index(node)
|
||||
else:
|
||||
return cmds.listRelatives(parent,
|
||||
children=True,
|
||||
fullPath=True).index(node)
|
||||
|
|
|
|||
|
|
@ -30,9 +30,11 @@ from ayon_core.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_creator_plugin_path,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
AYON_CONTAINER_ID,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -47,7 +49,6 @@ from ayon_core.hosts.maya import MAYA_ROOT_DIR
|
|||
from ayon_core.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
from . import menu, lib
|
||||
from .workfile_template_builder import MayaPlaceholderLoadPlugin
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
|
|
@ -64,6 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
|
|
@ -93,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
self.log.info(PUBLISH_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
self.log.info("Installing callbacks ... ")
|
||||
register_event_callback("init", on_init)
|
||||
|
|
@ -148,11 +150,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
MayaPlaceholderLoadPlugin
|
||||
]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
with lib.maintained_selection():
|
||||
|
|
@ -338,6 +335,7 @@ def uninstall():
|
|||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
deregister_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
||||
|
|
|
|||
|
|
@ -12,14 +12,13 @@ from ayon_core.pipeline.workfile.workfile_template_builder import (
|
|||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderItem,
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import read, imprint, get_reference_node, get_main_window
|
||||
from .lib import read, imprint, get_main_window
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
|
@ -91,170 +90,102 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
return True
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
class MayaPlaceholderPlugin(PlaceholderPlugin):
|
||||
"""Base Placeholder Plugin for Maya with one unified cache.
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
attributes = cmds.ls("*.plugin_identifier", long=True)
|
||||
placeholder_nodes = {}
|
||||
for attribute in attributes:
|
||||
node_name = attribute.rpartition(".")[0]
|
||||
placeholder_nodes[node_name] = (
|
||||
self._parse_placeholder_node_data(node_name)
|
||||
)
|
||||
Creates a locator as placeholder node, which during populate provide
|
||||
all of its attributes defined on the locator's transform in
|
||||
`placeholder.data` and where `placeholder.scene_identifier` is the
|
||||
full path to the node.
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
Inherited classes must still implement `populate_placeholder`
|
||||
|
||||
def _parse_placeholder_node_data(self, node_name):
|
||||
placeholder_data = read(node_name)
|
||||
parent_name = (
|
||||
cmds.getAttr(node_name + ".parent", asString=True)
|
||||
or node_name.rpartition("|")[0]
|
||||
or ""
|
||||
)
|
||||
if parent_name:
|
||||
siblings = cmds.listRelatives(parent_name, children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = node_name.rpartition("|")[2]
|
||||
current_index = cmds.getAttr(node_name + ".index", asString=True)
|
||||
if current_index < 0:
|
||||
current_index = siblings.index(node_shortname)
|
||||
"""
|
||||
|
||||
placeholder_data.update({
|
||||
"parent": parent_name,
|
||||
"index": current_index
|
||||
})
|
||||
return placeholder_data
|
||||
use_selection_as_parent = True
|
||||
item_class = PlaceholderItem
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
placeholder_name_parts = placeholder_data["builder_type"].split("_")
|
||||
return self.identifier.replace(".", "_")
|
||||
|
||||
pos = 1
|
||||
placeholder_product_type = placeholder_data.get("product_type")
|
||||
if placeholder_product_type is None:
|
||||
placeholder_product_type = placeholder_data.get("family")
|
||||
|
||||
if placeholder_product_type:
|
||||
placeholder_name_parts.insert(pos, placeholder_product_type)
|
||||
pos += 1
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = json.loads(loader_args.replace('\'', '\"'))
|
||||
values = [v for v in loader_args.values()]
|
||||
for value in values:
|
||||
placeholder_name_parts.insert(pos, value)
|
||||
pos += 1
|
||||
|
||||
placeholder_name = "_".join(placeholder_name_parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
def _collect_scene_placeholders(self):
|
||||
nodes_by_identifier = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
containers = []
|
||||
if nodes_by_identifier is None:
|
||||
# Cache placeholder data to shared data
|
||||
nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True)
|
||||
|
||||
loaded_representation_ids = {
|
||||
cmds.getAttr(container + ".representation")
|
||||
for container in containers
|
||||
}
|
||||
nodes_by_identifier = {}
|
||||
for node in nodes:
|
||||
identifier = cmds.getAttr("{}.plugin_identifier".format(node))
|
||||
nodes_by_identifier.setdefault(identifier, []).append(node)
|
||||
|
||||
# Set the cache
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
"placeholder_nodes", nodes_by_identifier
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
return nodes_by_identifier
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError("More then one item are selected")
|
||||
|
||||
parent = selection[0] if selection else None
|
||||
parent = None
|
||||
if self.use_selection_as_parent:
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError(
|
||||
"More than one node is selected. "
|
||||
"Please select only one to define the parent."
|
||||
)
|
||||
parent = selection[0] if selection else None
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
if parent:
|
||||
placeholder = cmds.parent(placeholder, selection[0])[0]
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + ".parent", "", type="string")
|
||||
self.imprint(placeholder, placeholder_data)
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
new_values = {}
|
||||
|
||||
changed_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
placeholder_value = placeholder_item.data.get(key)
|
||||
if value != placeholder_value:
|
||||
new_values[key] = value
|
||||
placeholder_item.data[key] = value
|
||||
if value != placeholder_item.data.get(key):
|
||||
changed_values[key] = value
|
||||
|
||||
for key in new_values.keys():
|
||||
cmds.deleteAttr(node_name + "." + key)
|
||||
# Delete attributes to ensure we imprint new data with correct type
|
||||
for key in changed_values.keys():
|
||||
placeholder_item.data[key] = value
|
||||
if cmds.attributeQuery(key, node=node_name, exists=True):
|
||||
attribute = "{}.{}".format(node_name, key)
|
||||
cmds.deleteAttr(attribute)
|
||||
|
||||
imprint(node_name, new_values)
|
||||
self.imprint(node_name, changed_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, placeholder_data in scene_placeholders.items():
|
||||
if placeholder_data.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
placeholders = []
|
||||
nodes_by_identifier = self._collect_scene_placeholders()
|
||||
for node in nodes_by_identifier.get(self.identifier, []):
|
||||
# TODO do data validations and maybe upgrades if they are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
placeholder_data = self.read(node)
|
||||
placeholders.append(
|
||||
self.item_class(scene_identifier=node,
|
||||
data=placeholder_data,
|
||||
plugin=self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
return placeholders
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Hide placeholder, add them to placeholder set.
|
||||
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
|
|
@ -263,82 +194,56 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
# Hide placeholder and add them to placeholder set
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
# If we just populate the placeholders from current scene, the
|
||||
# placeholder set will not be created so account for that.
|
||||
if not cmds.objExists(PLACEHOLDER_SET):
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
cmds.setAttr("{}.hiddenInOutliner".format(node), True)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful"""
|
||||
cmds.delete(placeholder.scene_identifier)
|
||||
"""Remove placeholder if building was successful
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
|
||||
Args:
|
||||
container (str): Placeholder loaded containers
|
||||
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin.
|
||||
"""
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
if not container:
|
||||
return
|
||||
# To avoid that deleting a placeholder node will have Maya delete
|
||||
# any objectSets the node was a member of we will first remove it
|
||||
# from any sets it was a member of. This way the `PLACEHOLDERS_SET`
|
||||
# will survive long enough
|
||||
sets = cmds.listSets(o=node) or []
|
||||
for object_set in sets:
|
||||
cmds.sets(node, remove=object_set)
|
||||
|
||||
roots = cmds.sets(container, q=True) or []
|
||||
ref_node = None
|
||||
try:
|
||||
ref_node = get_reference_node(roots)
|
||||
except AssertionError as e:
|
||||
self.log.info(e.args[0])
|
||||
cmds.delete(node)
|
||||
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if ref_node:
|
||||
ref_root = cmds.referenceQuery(root, nodes=True)[0]
|
||||
ref_root = (
|
||||
cmds.listRelatives(ref_root, parent=True, path=True) or
|
||||
[ref_root]
|
||||
)
|
||||
nodes_to_parent.extend(ref_root)
|
||||
continue
|
||||
if root.endswith("_RN"):
|
||||
# Backwards compatibility for hardcoded reference names.
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root not in cmds.listSets(allSets=True):
|
||||
nodes_to_parent.append(root)
|
||||
def imprint(self, node, data):
|
||||
"""Imprint call for placeholder node"""
|
||||
|
||||
elif not cmds.sets(root, q=True):
|
||||
return
|
||||
# Complicated data that can't be represented as flat maya attributes
|
||||
# we write to json strings, e.g. multiselection EnumDef
|
||||
for key, value in data.items():
|
||||
if isinstance(value, (list, tuple, dict)):
|
||||
data[key] = "JSON::{}".format(json.dumps(value))
|
||||
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
scene_parent = cmds.listRelatives(
|
||||
placeholder.scene_identifier, parent=True, fullPath=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=placeholder.data["index"])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
if scene_parent:
|
||||
cmds.parent(node, scene_parent)
|
||||
else:
|
||||
if cmds.listRelatives(node, parent=True):
|
||||
cmds.parent(node, world=True)
|
||||
imprint(node, data)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
def read(self, node):
|
||||
"""Read call for placeholder node"""
|
||||
|
||||
data = read(node)
|
||||
|
||||
# Complicated data that can't be represented as flat maya attributes
|
||||
# we read from json strings, e.g. multiselection EnumDef
|
||||
for key, value in data.items():
|
||||
if isinstance(value, str) and value.startswith("JSON::"):
|
||||
value = value[len("JSON::"):] # strip of JSON:: prefix
|
||||
data[key] = json.loads(value)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
|
|
|
|||
|
|
@ -1,89 +0,0 @@
|
|||
from ayon_core.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
TextDef
|
||||
)
|
||||
|
||||
|
||||
class CreateAnimation(plugin.MayaHiddenCreator):
|
||||
"""Animation output for character rigs
|
||||
|
||||
We hide the animation creator from the UI since the creation of it is
|
||||
automated upon loading a rig. There's an inventory action to recreate it
|
||||
for loaded rigs if by chance someone deleted the animation instance.
|
||||
"""
|
||||
identifier = "io.openpype.creators.maya.animation"
|
||||
name = "animationDefault"
|
||||
label = "Animation"
|
||||
product_type = "animation"
|
||||
icon = "male"
|
||||
|
||||
write_color_sets = False
|
||||
write_face_sets = False
|
||||
include_parent_hierarchy = False
|
||||
include_user_defined_attributes = False
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
||||
defs = lib.collect_animation_defs()
|
||||
|
||||
defs.extend([
|
||||
BoolDef("writeColorSets",
|
||||
label="Write vertex colors",
|
||||
tooltip="Write vertex colors with the geometry",
|
||||
default=self.write_color_sets),
|
||||
BoolDef("writeFaceSets",
|
||||
label="Write face sets",
|
||||
tooltip="Write face sets with the geometry",
|
||||
default=self.write_face_sets),
|
||||
BoolDef("writeNormals",
|
||||
label="Write normals",
|
||||
tooltip="Write normals with the deforming geometry",
|
||||
default=True),
|
||||
BoolDef("renderableOnly",
|
||||
label="Renderable Only",
|
||||
tooltip="Only export renderable visible shapes",
|
||||
default=False),
|
||||
BoolDef("visibleOnly",
|
||||
label="Visible Only",
|
||||
tooltip="Only export dag objects visible during "
|
||||
"frame range",
|
||||
default=False),
|
||||
BoolDef("includeParentHierarchy",
|
||||
label="Include Parent Hierarchy",
|
||||
tooltip="Whether to include parent hierarchy of nodes in "
|
||||
"the publish instance",
|
||||
default=self.include_parent_hierarchy),
|
||||
BoolDef("worldSpace",
|
||||
label="World-Space Export",
|
||||
default=True),
|
||||
BoolDef("includeUserDefinedAttributes",
|
||||
label="Include User Defined Attributes",
|
||||
default=self.include_user_defined_attributes),
|
||||
TextDef("attr",
|
||||
label="Custom Attributes",
|
||||
default="",
|
||||
placeholder="attr1, attr2"),
|
||||
TextDef("attrPrefix",
|
||||
label="Custom Attributes Prefix",
|
||||
placeholder="prefix1, prefix2")
|
||||
])
|
||||
|
||||
# TODO: Implement these on a Deadline plug-in instead?
|
||||
"""
|
||||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
"""
|
||||
|
||||
return defs
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
super(CreateAnimation, self).apply_settings(project_settings)
|
||||
# Hardcoding creator to be enabled due to existing settings would
|
||||
# disable the creator causing the creator plugin to not be
|
||||
# discoverable.
|
||||
self.enabled = True
|
||||
|
|
@ -0,0 +1,138 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api import lib, plugin
|
||||
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
|
||||
|
||||
def _get_animation_attr_defs(cls):
|
||||
"""Get Animation generic definitions."""
|
||||
defs = lib.collect_animation_defs()
|
||||
defs.extend(
|
||||
[
|
||||
BoolDef("farm", label="Submit to Farm"),
|
||||
NumberDef("priority", label="Farm job Priority", default=50),
|
||||
BoolDef("refresh", label="Refresh viewport during export"),
|
||||
BoolDef(
|
||||
"includeParentHierarchy",
|
||||
label="Include Parent Hierarchy",
|
||||
tooltip=(
|
||||
"Whether to include parent hierarchy of nodes in the "
|
||||
"publish instance."
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"includeUserDefinedAttributes",
|
||||
label="Include User Defined Attributes",
|
||||
tooltip=(
|
||||
"Whether to include all custom maya attributes found "
|
||||
"on nodes as attributes in the Alembic data."
|
||||
)
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
return defs
|
||||
|
||||
|
||||
def convert_legacy_alembic_creator_attributes(node_data, class_name):
|
||||
"""This is a legacy transfer of creator attributes to publish attributes
|
||||
for ExtractAlembic/ExtractAnimation plugin.
|
||||
"""
|
||||
publish_attributes = node_data["publish_attributes"]
|
||||
|
||||
if class_name in publish_attributes:
|
||||
return node_data
|
||||
|
||||
attributes = [
|
||||
"attr",
|
||||
"attrPrefix",
|
||||
"visibleOnly",
|
||||
"writeColorSets",
|
||||
"writeFaceSets",
|
||||
"writeNormals",
|
||||
"renderableOnly",
|
||||
"visibleOnly",
|
||||
"worldSpace",
|
||||
"renderableOnly"
|
||||
]
|
||||
plugin_attributes = {}
|
||||
for attr in attributes:
|
||||
if attr not in node_data["creator_attributes"]:
|
||||
continue
|
||||
value = node_data["creator_attributes"].pop(attr)
|
||||
|
||||
plugin_attributes[attr] = value
|
||||
|
||||
publish_attributes[class_name] = plugin_attributes
|
||||
|
||||
return node_data
|
||||
|
||||
|
||||
class CreateAnimation(plugin.MayaHiddenCreator):
|
||||
"""Animation output for character rigs
|
||||
|
||||
We hide the animation creator from the UI since the creation of it is
|
||||
automated upon loading a rig. There's an inventory action to recreate it
|
||||
for loaded rigs if by chance someone deleted the animation instance.
|
||||
"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.animation"
|
||||
name = "animationDefault"
|
||||
label = "Animation"
|
||||
product_type = "animation"
|
||||
icon = "male"
|
||||
|
||||
write_color_sets = False
|
||||
write_face_sets = False
|
||||
include_parent_hierarchy = False
|
||||
include_user_defined_attributes = False
|
||||
|
||||
def read_instance_node(self, node):
|
||||
node_data = super(CreateAnimation, self).read_instance_node(node)
|
||||
node_data = convert_legacy_alembic_creator_attributes(
|
||||
node_data, "ExtractAnimation"
|
||||
)
|
||||
return node_data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
defs = super(CreateAnimation, self).get_instance_attr_defs()
|
||||
defs += _get_animation_attr_defs(self)
|
||||
return defs
|
||||
|
||||
|
||||
class CreatePointCache(plugin.MayaCreator):
|
||||
"""Alembic pointcache for animated data"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.pointcache"
|
||||
label = "Pointcache"
|
||||
product_type = "pointcache"
|
||||
icon = "gears"
|
||||
write_color_sets = False
|
||||
write_face_sets = False
|
||||
include_user_defined_attributes = False
|
||||
|
||||
def read_instance_node(self, node):
|
||||
node_data = super(CreatePointCache, self).read_instance_node(node)
|
||||
node_data = convert_legacy_alembic_creator_attributes(
|
||||
node_data, "ExtractAlembic"
|
||||
)
|
||||
return node_data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
defs = super(CreatePointCache, self).get_instance_attr_defs()
|
||||
defs += _get_animation_attr_defs(self)
|
||||
return defs
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
instance = super(CreatePointCache, self).create(
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
instance_node = instance.get("instance_node")
|
||||
|
||||
# For Arnold standin proxy
|
||||
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
|
||||
cmds.sets(proxy_set, forceElement=instance_node)
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
|
|
@ -87,16 +89,24 @@ class CreateArnoldSceneSource(plugin.MayaCreator):
|
|||
|
||||
return defs
|
||||
|
||||
|
||||
class CreateArnoldSceneSourceProxy(CreateArnoldSceneSource):
|
||||
"""Arnold Scene Source Proxy
|
||||
|
||||
This product type facilitates working with proxy geometry in the viewport.
|
||||
"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.assproxy"
|
||||
label = "Arnold Scene Source Proxy"
|
||||
product_type = "assProxy"
|
||||
icon = "cube"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
from maya import cmds
|
||||
|
||||
instance = super(CreateArnoldSceneSource, self).create(
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
instance_node = instance.get("instance_node")
|
||||
|
||||
content = cmds.sets(name=instance_node + "_content_SET", empty=True)
|
||||
proxy = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
|
||||
cmds.sets([content, proxy], forceElement=instance_node)
|
||||
cmds.sets([proxy], forceElement=instance_node)
|
||||
|
|
|
|||
|
|
@ -1,88 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
TextDef
|
||||
)
|
||||
|
||||
|
||||
class CreatePointCache(plugin.MayaCreator):
|
||||
"""Alembic pointcache for animated data"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.pointcache"
|
||||
label = "Pointcache"
|
||||
product_type = "pointcache"
|
||||
icon = "gears"
|
||||
write_color_sets = False
|
||||
write_face_sets = False
|
||||
include_user_defined_attributes = False
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
||||
defs = lib.collect_animation_defs()
|
||||
|
||||
defs.extend([
|
||||
BoolDef("writeColorSets",
|
||||
label="Write vertex colors",
|
||||
tooltip="Write vertex colors with the geometry",
|
||||
default=False),
|
||||
BoolDef("writeFaceSets",
|
||||
label="Write face sets",
|
||||
tooltip="Write face sets with the geometry",
|
||||
default=False),
|
||||
BoolDef("renderableOnly",
|
||||
label="Renderable Only",
|
||||
tooltip="Only export renderable visible shapes",
|
||||
default=False),
|
||||
BoolDef("visibleOnly",
|
||||
label="Visible Only",
|
||||
tooltip="Only export dag objects visible during "
|
||||
"frame range",
|
||||
default=False),
|
||||
BoolDef("includeParentHierarchy",
|
||||
label="Include Parent Hierarchy",
|
||||
tooltip="Whether to include parent hierarchy of nodes in "
|
||||
"the publish instance",
|
||||
default=False),
|
||||
BoolDef("worldSpace",
|
||||
label="World-Space Export",
|
||||
default=True),
|
||||
BoolDef("refresh",
|
||||
label="Refresh viewport during export",
|
||||
default=False),
|
||||
BoolDef("includeUserDefinedAttributes",
|
||||
label="Include User Defined Attributes",
|
||||
default=self.include_user_defined_attributes),
|
||||
TextDef("attr",
|
||||
label="Custom Attributes",
|
||||
default="",
|
||||
placeholder="attr1, attr2"),
|
||||
TextDef("attrPrefix",
|
||||
label="Custom Attributes Prefix",
|
||||
default="",
|
||||
placeholder="prefix1, prefix2")
|
||||
])
|
||||
|
||||
# TODO: Implement these on a Deadline plug-in instead?
|
||||
"""
|
||||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
"""
|
||||
|
||||
return defs
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
instance = super(CreatePointCache, self).create(
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
instance_node = instance.get("instance_node")
|
||||
|
||||
# For Arnold standin proxy
|
||||
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
|
||||
cmds.sets(proxy_set, forceElement=instance_node)
|
||||
|
|
@ -12,6 +12,7 @@ from ayon_core.hosts.maya.api.lib import (
|
|||
unique_namespace,
|
||||
get_attribute_input,
|
||||
maintained_selection,
|
||||
get_fps_for_current_context
|
||||
)
|
||||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type
|
||||
|
|
@ -29,7 +30,13 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
"""Load as Arnold standin"""
|
||||
|
||||
product_types = {
|
||||
"ass", "animation", "model", "proxyAbc", "pointcache", "usd"
|
||||
"ass",
|
||||
"assProxy",
|
||||
"animation",
|
||||
"model",
|
||||
"proxyAbc",
|
||||
"pointcache",
|
||||
"usd"
|
||||
}
|
||||
representations = {"ass", "abc", "usda", "usdc", "usd"}
|
||||
|
||||
|
|
@ -95,8 +102,10 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
sequence = is_sequence(os.listdir(os.path.dirname(repre_path)))
|
||||
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
|
||||
|
||||
fps = float(version_attributes.get("fps")) or 25
|
||||
cmds.setAttr(standin_shape + ".abcFPS", fps)
|
||||
fps = (
|
||||
version_attributes.get("fps") or get_fps_for_current_context()
|
||||
)
|
||||
cmds.setAttr(standin_shape + ".abcFPS", float(fps))
|
||||
|
||||
nodes = [root, standin, standin_shape]
|
||||
if operator is not None:
|
||||
|
|
@ -128,6 +137,18 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
proxy_path = "/".join([os.path.dirname(path), proxy_basename])
|
||||
return proxy_basename, proxy_path
|
||||
|
||||
def _update_operators(self, string_replace_operator, proxy_basename, path):
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename.split(".")[0],
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path).split(".")[0],
|
||||
type="string"
|
||||
)
|
||||
|
||||
def _setup_proxy(self, shape, path, namespace):
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
|
|
@ -150,16 +171,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
"*.(@node=='{}')".format(node_type),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
self._update_operators(string_replace_operator, proxy_basename, path)
|
||||
|
||||
cmds.connectAttr(
|
||||
string_replace_operator + ".out",
|
||||
|
|
@ -194,18 +206,9 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(repre_entity)
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
# Whether there is proxy or so, we still update the string operator.
|
||||
# Whether there is proxy or not, we still update the string operator.
|
||||
# If no proxy exists, the string operator won't replace anything.
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
self._update_operators(string_replace_operator, proxy_basename, path)
|
||||
|
||||
dso_path = path
|
||||
if os.path.exists(proxy_path):
|
||||
|
|
|
|||
|
|
@ -58,4 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
|
|||
|
||||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
||||
|
|
|
|||
|
|
@ -10,21 +10,23 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
# Offset to be after renderable camera collection.
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Arnold Scene Source"
|
||||
families = ["ass"]
|
||||
families = ["ass", "assProxy"]
|
||||
|
||||
def process(self, instance):
|
||||
objsets = instance.data["setMembers"]
|
||||
instance.data["members"] = []
|
||||
for set_member in instance.data["setMembers"]:
|
||||
if cmds.nodeType(set_member) != "objectSet":
|
||||
instance.data["members"].extend(self.get_hierarchy(set_member))
|
||||
continue
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
members = cmds.sets(objset, query=True)
|
||||
members = cmds.sets(set_member, query=True)
|
||||
members = cmds.ls(members, long=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
self.log.warning(
|
||||
"Skipped empty instance: \"%s\" " % set_member
|
||||
)
|
||||
continue
|
||||
if objset.endswith("content_SET"):
|
||||
instance.data["contentMembers"] = self.get_hierarchy(members)
|
||||
if objset.endswith("proxy_SET"):
|
||||
if set_member.endswith("proxy_SET"):
|
||||
instance.data["proxy"] = self.get_hierarchy(members)
|
||||
|
||||
# Use camera in object set if present else default to render globals
|
||||
|
|
@ -33,7 +35,7 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)]
|
||||
if renderable:
|
||||
camera = renderable[0]
|
||||
for node in instance.data["contentMembers"]:
|
||||
for node in instance.data["members"]:
|
||||
camera_shapes = cmds.listRelatives(
|
||||
node, shapes=True, type="camera"
|
||||
)
|
||||
|
|
@ -46,18 +48,11 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
self.log.debug("data: {}".format(instance.data))
|
||||
|
||||
def get_hierarchy(self, nodes):
|
||||
"""Return nodes with all their children.
|
||||
|
||||
Arguments:
|
||||
nodes (List[str]): List of nodes to collect children hierarchy for
|
||||
|
||||
Returns:
|
||||
list: Input nodes with their children hierarchy
|
||||
|
||||
"""
|
||||
"""Return nodes with all their children"""
|
||||
nodes = cmds.ls(nodes, long=True)
|
||||
if not nodes:
|
||||
return []
|
||||
|
||||
children = get_all_children(nodes, ignore_intermediate_objects=True)
|
||||
return list(children.union(nodes))
|
||||
children = get_all_children(nodes)
|
||||
# Make sure nodes merged with children only
|
||||
# contains unique entries
|
||||
return list(set(nodes + list(children)))
|
||||
|
|
|
|||
|
|
@ -14,7 +14,9 @@ class CollectUserDefinedAttributes(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
|
||||
# Collect user defined attributes.
|
||||
if not instance.data.get("includeUserDefinedAttributes", False):
|
||||
if not instance.data["creator_attributes"].get(
|
||||
"includeUserDefinedAttributes"
|
||||
):
|
||||
return
|
||||
|
||||
if "out_hierarchy" in instance.data:
|
||||
|
|
|
|||
|
|
@ -17,8 +17,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
def _pre_process(self, instance, staging_dir):
|
||||
file_path = os.path.join(staging_dir, "{}.ass".format(instance.name))
|
||||
|
||||
# Mask
|
||||
|
|
@ -70,24 +69,38 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
"mask": mask
|
||||
}
|
||||
|
||||
filenames, nodes_by_id = self._extract(
|
||||
instance.data["contentMembers"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
return attribute_data, kwargs
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
attribute_data, kwargs = self._pre_process(instance, staging_dir)
|
||||
|
||||
filenames = self._extract(
|
||||
instance.data["members"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
self._post_process(
|
||||
instance, filenames, staging_dir, kwargs["startFrame"]
|
||||
)
|
||||
|
||||
def _post_process(self, instance, filenames, staging_dir, frame_start):
|
||||
nodes_by_id = self._nodes_by_id(instance[:])
|
||||
representation = {
|
||||
"name": "ass",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"]
|
||||
"frameStart": frame_start
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
json_path = os.path.join(staging_dir, "{}.json".format(instance.name))
|
||||
json_path = os.path.join(
|
||||
staging_dir, "{}.json".format(instance.name)
|
||||
)
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(nodes_by_id, f)
|
||||
|
||||
|
|
@ -104,13 +117,68 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
def _nodes_by_id(self, nodes):
|
||||
nodes_by_id = defaultdict(list)
|
||||
|
||||
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
|
||||
for node in nodes:
|
||||
id = lib.get_id(node)
|
||||
|
||||
filenames, _ = self._extract(
|
||||
if id is None:
|
||||
continue
|
||||
|
||||
# Converting Maya hierarchy separator "|" to Arnold separator "/".
|
||||
nodes_by_id[id].append(node.replace("|", "/"))
|
||||
|
||||
return nodes_by_id
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
filenames = []
|
||||
with lib.attribute_values(attribute_data):
|
||||
with lib.maintained_selection():
|
||||
self.log.debug(
|
||||
"Writing: {}".format(nodes)
|
||||
)
|
||||
cmds.select(nodes, noExpand=True)
|
||||
|
||||
self.log.debug(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.debug("Exported: {}".format(filenames))
|
||||
|
||||
return filenames
|
||||
|
||||
|
||||
class ExtractArnoldSceneSourceProxy(ExtractArnoldSceneSource):
|
||||
"""Extract the content of the instance to an Arnold Scene Source file."""
|
||||
|
||||
label = "Extract Arnold Scene Source Proxy"
|
||||
hosts = ["maya"]
|
||||
families = ["assProxy"]
|
||||
asciiAss = True
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
attribute_data, kwargs = self._pre_process(instance, staging_dir)
|
||||
|
||||
filenames, _ = self._duplicate_extract(
|
||||
instance.data["members"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
self._post_process(
|
||||
instance, filenames, staging_dir, kwargs["startFrame"]
|
||||
)
|
||||
|
||||
kwargs["filename"] = os.path.join(
|
||||
staging_dir, "{}_proxy.ass".format(instance.name)
|
||||
)
|
||||
|
||||
filenames, _ = self._duplicate_extract(
|
||||
instance.data["proxy"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
|
|
@ -125,12 +193,11 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
def _duplicate_extract(self, nodes, attribute_data, kwargs):
|
||||
self.log.debug(
|
||||
"Writing {} with:\n{}".format(kwargs["filename"], kwargs)
|
||||
)
|
||||
filenames = []
|
||||
nodes_by_id = defaultdict(list)
|
||||
# Duplicating nodes so they are direct children of the world. This
|
||||
# makes the hierarchy of any exported ass file the same.
|
||||
with lib.delete_after() as delete_bin:
|
||||
|
|
@ -147,7 +214,9 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
if not shapes:
|
||||
continue
|
||||
|
||||
duplicate_transform = cmds.duplicate(node)[0]
|
||||
basename = cmds.duplicate(node)[0]
|
||||
parents = cmds.ls(node, long=True)[0].split("|")[:-1]
|
||||
duplicate_transform = "|".join(parents + [basename])
|
||||
|
||||
if cmds.listRelatives(duplicate_transform, parent=True):
|
||||
duplicate_transform = cmds.parent(
|
||||
|
|
@ -172,28 +241,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
duplicate_nodes.extend(shapes)
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
# Copy cbId to mtoa_constant.
|
||||
for node in duplicate_nodes:
|
||||
# Converting Maya hierarchy separator "|" to Arnold
|
||||
# separator "/".
|
||||
nodes_by_id[lib.get_id(node)].append(node.replace("|", "/"))
|
||||
|
||||
with lib.attribute_values(attribute_data):
|
||||
with lib.maintained_selection():
|
||||
self.log.debug(
|
||||
"Writing: {}".format(duplicate_nodes)
|
||||
)
|
||||
cmds.select(duplicate_nodes, noExpand=True)
|
||||
|
||||
self.log.debug(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.debug("Exported: {}".format(filenames))
|
||||
nodes_by_id = self._nodes_by_id(duplicate_nodes)
|
||||
filenames = self._extract(duplicate_nodes, attribute_data, kwargs)
|
||||
|
||||
return filenames, nodes_by_id
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import json
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.maya.api.lib import extract_alembic
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
|
|||
|
|
@ -1,17 +1,29 @@
|
|||
import os
|
||||
from collections import OrderedDict
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
extract_alembic,
|
||||
get_all_children,
|
||||
suspended_refresh,
|
||||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
TextDef,
|
||||
NumberDef,
|
||||
EnumDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
)
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
from ayon_core.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class ExtractAlembic(publish.Extractor):
|
||||
class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals, uvs, creases are preserved, but nothing more,
|
||||
|
|
@ -27,8 +39,35 @@ class ExtractAlembic(publish.Extractor):
|
|||
targets = ["local", "remote"]
|
||||
|
||||
# From settings
|
||||
attr = []
|
||||
attrPrefix = []
|
||||
bake_attributes = []
|
||||
bake_attribute_prefixes = []
|
||||
dataFormat = "ogawa"
|
||||
eulerFilter = False
|
||||
melPerFrameCallback = ""
|
||||
melPostJobCallback = ""
|
||||
overrides = []
|
||||
preRoll = False
|
||||
preRollStartFrame = 0
|
||||
pythonPerFrameCallback = ""
|
||||
pythonPostJobCallback = ""
|
||||
renderableOnly = False
|
||||
stripNamespaces = True
|
||||
uvsOnly = False
|
||||
uvWrite = False
|
||||
userAttr = ""
|
||||
userAttrPrefix = ""
|
||||
verbose = False
|
||||
visibleOnly = False
|
||||
wholeFrameGeo = False
|
||||
worldSpace = True
|
||||
writeColorSets = False
|
||||
writeCreases = False
|
||||
writeFaceSets = False
|
||||
writeNormals = True
|
||||
writeUVSets = False
|
||||
writeVisibility = False
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
|
|
@ -41,16 +80,38 @@ class ExtractAlembic(publish.Extractor):
|
|||
start = float(instance.data.get("frameStartHandle", 1))
|
||||
end = float(instance.data.get("frameEndHandle", 1))
|
||||
|
||||
attrs = instance.data.get("attr", "").split(";")
|
||||
attrs = [value for value in attrs if value.strip()]
|
||||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data
|
||||
)
|
||||
|
||||
attrs = [
|
||||
attr.strip()
|
||||
for attr in attribute_values.get("attr", "").split(";")
|
||||
if attr.strip()
|
||||
]
|
||||
attrs += instance.data.get("userDefinedAttributes", [])
|
||||
attrs += self.bake_attributes
|
||||
attrs += ["cbId"]
|
||||
|
||||
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||
attr_prefixes = [
|
||||
attr.strip()
|
||||
for attr in attribute_values.get("attrPrefix", "").split(";")
|
||||
if attr.strip()
|
||||
]
|
||||
attr_prefixes += self.bake_attribute_prefixes
|
||||
|
||||
user_attrs = [
|
||||
attr.strip()
|
||||
for attr in attribute_values.get("userAttr", "").split(";")
|
||||
if attr.strip()
|
||||
]
|
||||
|
||||
user_attr_prefixes = [
|
||||
attr.strip()
|
||||
for attr in attribute_values.get("userAttrPrefix", "").split(";")
|
||||
if attr.strip()
|
||||
]
|
||||
|
||||
self.log.debug("Extracting pointcache..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
|
|
@ -58,28 +119,82 @@ class ExtractAlembic(publish.Extractor):
|
|||
filename = "{name}.abc".format(**instance.data)
|
||||
path = os.path.join(parent_dir, filename)
|
||||
|
||||
options = {
|
||||
"step": instance.data.get("step", 1.0),
|
||||
"attr": attrs,
|
||||
"attrPrefix": attr_prefixes,
|
||||
"writeVisibility": True,
|
||||
"writeCreases": True,
|
||||
"writeColorSets": instance.data.get("writeColorSets", False),
|
||||
"writeFaceSets": instance.data.get("writeFaceSets", False),
|
||||
"uvWrite": True,
|
||||
"selection": True,
|
||||
"worldSpace": instance.data.get("worldSpace", True)
|
||||
}
|
||||
|
||||
root = None
|
||||
if not instance.data.get("includeParentHierarchy", True):
|
||||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = roots
|
||||
root = roots
|
||||
|
||||
if int(cmds.about(version=True)) >= 2017:
|
||||
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||
options["writeUVSets"] = True
|
||||
kwargs = {
|
||||
"file": path,
|
||||
"attr": attrs,
|
||||
"attrPrefix": attr_prefixes,
|
||||
"userAttr": user_attrs,
|
||||
"userAttrPrefix": user_attr_prefixes,
|
||||
"dataFormat": attribute_values.get("dataFormat", self.dataFormat),
|
||||
"endFrame": end,
|
||||
"eulerFilter": attribute_values.get(
|
||||
"eulerFilter", self.eulerFilter
|
||||
),
|
||||
"preRoll": attribute_values.get("preRoll", self.preRoll),
|
||||
"preRollStartFrame": attribute_values.get(
|
||||
"preRollStartFrame", self.preRollStartFrame
|
||||
),
|
||||
"renderableOnly": attribute_values.get(
|
||||
"renderableOnly", self.renderableOnly
|
||||
),
|
||||
"root": root,
|
||||
"selection": True,
|
||||
"startFrame": start,
|
||||
"step": instance.data.get(
|
||||
"creator_attributes", {}
|
||||
).get("step", 1.0),
|
||||
"stripNamespaces": attribute_values.get(
|
||||
"stripNamespaces", self.stripNamespaces
|
||||
),
|
||||
"uvWrite": attribute_values.get("uvWrite", self.uvWrite),
|
||||
"verbose": attribute_values.get("verbose", self.verbose),
|
||||
"wholeFrameGeo": attribute_values.get(
|
||||
"wholeFrameGeo", self.wholeFrameGeo
|
||||
),
|
||||
"worldSpace": attribute_values.get("worldSpace", self.worldSpace),
|
||||
"writeColorSets": attribute_values.get(
|
||||
"writeColorSets", self.writeColorSets
|
||||
),
|
||||
"writeCreases": attribute_values.get(
|
||||
"writeCreases", self.writeCreases
|
||||
),
|
||||
"writeFaceSets": attribute_values.get(
|
||||
"writeFaceSets", self.writeFaceSets
|
||||
),
|
||||
"writeUVSets": attribute_values.get(
|
||||
"writeUVSets", self.writeUVSets
|
||||
),
|
||||
"writeVisibility": attribute_values.get(
|
||||
"writeVisibility", self.writeVisibility
|
||||
),
|
||||
"uvsOnly": attribute_values.get(
|
||||
"uvsOnly", self.uvsOnly
|
||||
),
|
||||
"melPerFrameCallback": attribute_values.get(
|
||||
"melPerFrameCallback", self.melPerFrameCallback
|
||||
),
|
||||
"melPostJobCallback": attribute_values.get(
|
||||
"melPostJobCallback", self.melPostJobCallback
|
||||
),
|
||||
"pythonPerFrameCallback": attribute_values.get(
|
||||
"pythonPerFrameCallback", self.pythonPostJobCallback
|
||||
),
|
||||
"pythonPostJobCallback": attribute_values.get(
|
||||
"pythonPostJobCallback", self.pythonPostJobCallback
|
||||
),
|
||||
# Note that this converts `writeNormals` to `noNormals` for the
|
||||
# `AbcExport` equivalent in `extract_alembic`
|
||||
"noNormals": not attribute_values.get(
|
||||
"writeNormals", self.writeNormals
|
||||
),
|
||||
}
|
||||
|
||||
if instance.data.get("visibleOnly", False):
|
||||
# If we only want to include nodes that are visible in the frame
|
||||
|
|
@ -87,20 +202,19 @@ class ExtractAlembic(publish.Extractor):
|
|||
# flag does not filter out those that are only hidden on some
|
||||
# frames as it counts "animated" or "connected" visibilities as
|
||||
# if it's always visible.
|
||||
nodes = list(iter_visible_nodes_in_range(nodes,
|
||||
start=start,
|
||||
end=end))
|
||||
nodes = list(
|
||||
iter_visible_nodes_in_range(nodes, start=start, end=end)
|
||||
)
|
||||
|
||||
suspend = not instance.data.get("refresh", False)
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
extract_alembic(
|
||||
file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**options
|
||||
self.log.debug(
|
||||
"Running `extract_alembic` with the keyword arguments: "
|
||||
"{}".format(kwargs)
|
||||
)
|
||||
extract_alembic(**kwargs)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -124,22 +238,17 @@ class ExtractAlembic(publish.Extractor):
|
|||
return
|
||||
|
||||
path = path.replace(".abc", "_proxy.abc")
|
||||
kwargs["file"] = path
|
||||
if not instance.data.get("includeParentHierarchy", True):
|
||||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = instance.data["proxyRoots"]
|
||||
kwargs["root"] = instance.data["proxyRoots"]
|
||||
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(instance.data["proxy"])
|
||||
extract_alembic(
|
||||
file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**options
|
||||
)
|
||||
|
||||
extract_alembic(**kwargs)
|
||||
representation = {
|
||||
"name": "proxy",
|
||||
"ext": "abc",
|
||||
|
|
@ -152,24 +261,265 @@ class ExtractAlembic(publish.Extractor):
|
|||
def get_members_and_roots(self, instance):
|
||||
return instance[:], instance.data.get("setMembers")
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
if not cls.overrides:
|
||||
return []
|
||||
|
||||
override_defs = OrderedDict({
|
||||
"eulerFilter": BoolDef(
|
||||
"eulerFilter",
|
||||
label="Euler Filter",
|
||||
default=cls.eulerFilter,
|
||||
tooltip="Apply Euler filter while sampling rotations."
|
||||
),
|
||||
"renderableOnly": BoolDef(
|
||||
"renderableOnly",
|
||||
label="Renderable Only",
|
||||
default=cls.renderableOnly,
|
||||
tooltip="Only export renderable visible shapes."
|
||||
),
|
||||
"stripNamespaces": BoolDef(
|
||||
"stripNamespaces",
|
||||
label="Strip Namespaces",
|
||||
default=cls.stripNamespaces,
|
||||
tooltip=(
|
||||
"Namespaces will be stripped off of the node before being "
|
||||
"written to Alembic."
|
||||
)
|
||||
),
|
||||
"uvsOnly": BoolDef(
|
||||
"uvsOnly",
|
||||
label="UVs Only",
|
||||
default=cls.uvsOnly,
|
||||
tooltip=(
|
||||
"If this flag is present, only uv data for PolyMesh and "
|
||||
"SubD shapes will be written to the Alembic file."
|
||||
)
|
||||
),
|
||||
"uvWrite": BoolDef(
|
||||
"uvWrite",
|
||||
label="UV Write",
|
||||
default=cls.uvWrite,
|
||||
tooltip=(
|
||||
"Uv data for PolyMesh and SubD shapes will be written to "
|
||||
"the Alembic file."
|
||||
)
|
||||
),
|
||||
"verbose": BoolDef(
|
||||
"verbose",
|
||||
label="Verbose",
|
||||
default=cls.verbose,
|
||||
tooltip="Prints the current frame that is being evaluated."
|
||||
),
|
||||
"visibleOnly": BoolDef(
|
||||
"visibleOnly",
|
||||
label="Visible Only",
|
||||
default=cls.visibleOnly,
|
||||
tooltip="Only export dag objects visible during frame range."
|
||||
),
|
||||
"wholeFrameGeo": BoolDef(
|
||||
"wholeFrameGeo",
|
||||
label="Whole Frame Geo",
|
||||
default=cls.wholeFrameGeo,
|
||||
tooltip=(
|
||||
"Data for geometry will only be written out on whole "
|
||||
"frames."
|
||||
)
|
||||
),
|
||||
"worldSpace": BoolDef(
|
||||
"worldSpace",
|
||||
label="World Space",
|
||||
default=cls.worldSpace,
|
||||
tooltip="Any root nodes will be stored in world space."
|
||||
),
|
||||
"writeColorSets": BoolDef(
|
||||
"writeColorSets",
|
||||
label="Write Color Sets",
|
||||
default=cls.writeColorSets,
|
||||
tooltip="Write vertex colors with the geometry."
|
||||
),
|
||||
"writeCreases": BoolDef(
|
||||
"writeCreases",
|
||||
label="Write Creases",
|
||||
default=cls.writeCreases,
|
||||
tooltip="Write the geometry's edge and vertex crease "
|
||||
"information."
|
||||
),
|
||||
"writeFaceSets": BoolDef(
|
||||
"writeFaceSets",
|
||||
label="Write Face Sets",
|
||||
default=cls.writeFaceSets,
|
||||
tooltip="Write face sets with the geometry."
|
||||
),
|
||||
"writeNormals": BoolDef(
|
||||
"writeNormals",
|
||||
label="Write Normals",
|
||||
default=cls.writeNormals,
|
||||
tooltip="Write normals with the deforming geometry."
|
||||
),
|
||||
"writeUVSets": BoolDef(
|
||||
"writeUVSets",
|
||||
label="Write UV Sets",
|
||||
default=cls.writeUVSets,
|
||||
tooltip=(
|
||||
"Write all uv sets on MFnMeshes as vector 2 indexed "
|
||||
"geometry parameters with face varying scope."
|
||||
)
|
||||
),
|
||||
"writeVisibility": BoolDef(
|
||||
"writeVisibility",
|
||||
label="Write Visibility",
|
||||
default=cls.writeVisibility,
|
||||
tooltip=(
|
||||
"Visibility state will be stored in the Alembic file. "
|
||||
"Otherwise everything written out is treated as visible."
|
||||
)
|
||||
),
|
||||
"preRoll": BoolDef(
|
||||
"preRoll",
|
||||
label="Pre Roll",
|
||||
default=cls.preRoll,
|
||||
tooltip="This frame range will not be sampled."
|
||||
),
|
||||
"preRollStartFrame": NumberDef(
|
||||
"preRollStartFrame",
|
||||
label="Pre Roll Start Frame",
|
||||
tooltip=(
|
||||
"The frame to start scene evaluation at. This is used"
|
||||
" to set the starting frame for time dependent "
|
||||
"translations and can be used to evaluate run-up that"
|
||||
" isn't actually translated."
|
||||
),
|
||||
default=cls.preRollStartFrame
|
||||
),
|
||||
"dataFormat": EnumDef(
|
||||
"dataFormat",
|
||||
label="Data Format",
|
||||
items=["ogawa", "HDF"],
|
||||
default=cls.dataFormat,
|
||||
tooltip="The data format to use to write the file."
|
||||
),
|
||||
"attr": TextDef(
|
||||
"attr",
|
||||
label="Custom Attributes",
|
||||
placeholder="attr1; attr2; ...",
|
||||
default=cls.attr,
|
||||
tooltip=(
|
||||
"Attributes matching by name will be included in the "
|
||||
"Alembic export. Attributes should be separated by "
|
||||
"semi-colon `;`"
|
||||
)
|
||||
),
|
||||
"attrPrefix": TextDef(
|
||||
"attrPrefix",
|
||||
label="Custom Attributes Prefix",
|
||||
placeholder="prefix1; prefix2; ...",
|
||||
default=cls.attrPrefix,
|
||||
tooltip=(
|
||||
"Attributes starting with these prefixes will be included "
|
||||
"in the Alembic export. Attributes should be separated by "
|
||||
"semi-colon `;`"
|
||||
)
|
||||
),
|
||||
"userAttr": TextDef(
|
||||
"userAttr",
|
||||
label="User Attr",
|
||||
placeholder="attr1; attr2; ...",
|
||||
default=cls.userAttr,
|
||||
tooltip=(
|
||||
"Attributes matching by name will be included in the "
|
||||
"Alembic export. Attributes should be separated by "
|
||||
"semi-colon `;`"
|
||||
)
|
||||
),
|
||||
"userAttrPrefix": TextDef(
|
||||
"userAttrPrefix",
|
||||
label="User Attr Prefix",
|
||||
placeholder="prefix1; prefix2; ...",
|
||||
default=cls.userAttrPrefix,
|
||||
tooltip=(
|
||||
"Attributes starting with these prefixes will be included "
|
||||
"in the Alembic export. Attributes should be separated by "
|
||||
"semi-colon `;`"
|
||||
)
|
||||
),
|
||||
"melPerFrameCallback": TextDef(
|
||||
"melPerFrameCallback",
|
||||
label="Mel Per Frame Callback",
|
||||
default=cls.melPerFrameCallback,
|
||||
tooltip=(
|
||||
"When each frame (and the static frame) is evaluated the "
|
||||
"string specified is evaluated as a Mel command."
|
||||
)
|
||||
),
|
||||
"melPostJobCallback": TextDef(
|
||||
"melPostJobCallback",
|
||||
label="Mel Post Job Callback",
|
||||
default=cls.melPostJobCallback,
|
||||
tooltip=(
|
||||
"When the translation has finished the string specified "
|
||||
"is evaluated as a Mel command."
|
||||
)
|
||||
),
|
||||
"pythonPerFrameCallback": TextDef(
|
||||
"pythonPerFrameCallback",
|
||||
label="Python Per Frame Callback",
|
||||
default=cls.pythonPerFrameCallback,
|
||||
tooltip=(
|
||||
"When each frame (and the static frame) is evaluated the "
|
||||
"string specified is evaluated as a python command."
|
||||
)
|
||||
),
|
||||
"pythonPostJobCallback": TextDef(
|
||||
"pythonPostJobCallback",
|
||||
label="Python Post Frame Callback",
|
||||
default=cls.pythonPostJobCallback,
|
||||
tooltip=(
|
||||
"When the translation has finished the string specified "
|
||||
"is evaluated as a python command."
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
defs = super(ExtractAlembic, cls).get_attribute_defs()
|
||||
|
||||
defs.extend([
|
||||
UISeparatorDef("sep_alembic_options"),
|
||||
UILabelDef("Alembic Options"),
|
||||
])
|
||||
|
||||
# The Arguments that can be modified by the Publisher
|
||||
overrides = set(cls.overrides)
|
||||
for key, value in override_defs.items():
|
||||
if key not in overrides:
|
||||
continue
|
||||
|
||||
defs.append(value)
|
||||
|
||||
defs.append(
|
||||
UISeparatorDef("sep_alembic_options_end")
|
||||
)
|
||||
|
||||
return defs
|
||||
|
||||
|
||||
class ExtractAnimation(ExtractAlembic):
|
||||
label = "Extract Animation"
|
||||
label = "Extract Animation (Alembic)"
|
||||
families = ["animation"]
|
||||
|
||||
def get_members_and_roots(self, instance):
|
||||
|
||||
# Collect the out set nodes
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
if len(out_sets) != 1:
|
||||
raise RuntimeError("Couldn't find exactly one out_SET: "
|
||||
"{0}".format(out_sets))
|
||||
raise KnownPublishError(
|
||||
"Couldn't find exactly one out_SET: {0}".format(out_sets)
|
||||
)
|
||||
out_set = out_sets[0]
|
||||
roots = cmds.sets(out_set, query=True)
|
||||
roots = cmds.sets(out_set, query=True) or []
|
||||
|
||||
# Include all descendants
|
||||
nodes = roots + cmds.listRelatives(roots,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
nodes = roots.copy()
|
||||
nodes.extend(get_all_children(roots, ignore_intermediate_objects=True))
|
||||
|
||||
return nodes, roots
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ import os
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
extract_alembic,
|
||||
suspended_refresh,
|
||||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ import os
|
|||
from maya import cmds # noqa
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
extract_alembic,
|
||||
suspended_refresh,
|
||||
maintained_selection
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import copy
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.hosts.maya.api.lib import extract_alembic
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,130 @@
|
|||
import inspect
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import OptionalPyblishPluginMixin
|
||||
from ayon_core.pipeline.publish import RepairAction, PublishValidationError
|
||||
|
||||
|
||||
class ValidateAlembicDefaultsPointcache(
|
||||
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
|
||||
):
|
||||
"""Validate the attributes on the instance are defaults.
|
||||
|
||||
The defaults are defined in the project settings.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache"]
|
||||
hosts = ["maya"]
|
||||
label = "Validate Alembic Options Defaults"
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
plugin_name = "ExtractAlembic"
|
||||
|
||||
@classmethod
|
||||
def _get_settings(cls, context):
|
||||
maya_settings = context.data["project_settings"]["maya"]
|
||||
settings = maya_settings["publish"]["ExtractAlembic"]
|
||||
return settings
|
||||
|
||||
@classmethod
|
||||
def _get_publish_attributes(cls, instance):
|
||||
return instance.data["publish_attributes"][cls.plugin_name]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
settings = self._get_settings(instance.context)
|
||||
attributes = self._get_publish_attributes(instance)
|
||||
|
||||
invalid = {}
|
||||
for key, value in attributes.items():
|
||||
if key not in settings:
|
||||
# This may occur if attributes have changed over time and an
|
||||
# existing instance has older legacy attributes that do not
|
||||
# match the current settings definition.
|
||||
self.log.warning(
|
||||
"Publish attribute %s not found in Alembic Export "
|
||||
"default settings. Ignoring validation for attribute.",
|
||||
key
|
||||
)
|
||||
continue
|
||||
|
||||
default_value = settings[key]
|
||||
|
||||
# Lists are best to compared sorted since we cant rely on the order
|
||||
# of the items.
|
||||
if isinstance(value, list):
|
||||
value = sorted(value)
|
||||
default_value = sorted(default_value)
|
||||
|
||||
if value != default_value:
|
||||
invalid[key] = value, default_value
|
||||
|
||||
if invalid:
|
||||
non_defaults = "\n".join(
|
||||
f"- {key}: {value} \t(default: {default_value})"
|
||||
for key, (value, default_value) in invalid.items()
|
||||
)
|
||||
|
||||
raise PublishValidationError(
|
||||
"Alembic extract options differ from default values:\n"
|
||||
f"{non_defaults}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc(
|
||||
"""### Alembic Extract settings differ from defaults
|
||||
|
||||
The alembic export options differ from the project default values.
|
||||
|
||||
If this is intentional you can disable this validation by
|
||||
disabling **Validate Alembic Options Default**.
|
||||
|
||||
If not you may use the "Repair" action to revert all the options to
|
||||
their default values.
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
# Find create instance twin.
|
||||
create_context = instance.context.data["create_context"]
|
||||
create_instance = create_context.get_instance_by_id(
|
||||
instance.data["instance_id"]
|
||||
)
|
||||
|
||||
# Set the settings values on the create context then save to workfile.
|
||||
settings = cls._get_settings(instance.context)
|
||||
attributes = cls._get_publish_attributes(create_instance)
|
||||
for key in attributes:
|
||||
if key not in settings:
|
||||
# This may occur if attributes have changed over time and an
|
||||
# existing instance has older legacy attributes that do not
|
||||
# match the current settings definition.
|
||||
cls.log.warning(
|
||||
"Publish attribute %s not found in Alembic Export "
|
||||
"default settings. Ignoring repair for attribute.",
|
||||
key
|
||||
)
|
||||
continue
|
||||
attributes[key] = settings[key]
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
class ValidateAlembicDefaultsAnimation(
|
||||
ValidateAlembicDefaultsPointcache
|
||||
):
|
||||
"""Validate the attributes on the instance are defaults.
|
||||
|
||||
The defaults are defined in the project settings.
|
||||
"""
|
||||
label = "Validate Alembic Options Defaults"
|
||||
families = ["animation"]
|
||||
plugin_name = "ExtractAnimation"
|
||||
|
|
@ -1,30 +1,56 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
)
|
||||
from ayon_core.hosts.maya.api.lib import is_visible
|
||||
|
||||
|
||||
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source.
|
||||
|
||||
We require at least 1 root node/parent for the meshes. This is to ensure we
|
||||
can duplicate the nodes and preserve the names.
|
||||
Ensure no nodes are hidden.
|
||||
"""
|
||||
|
||||
If using proxies we need the nodes to share the same names and not be
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass", "assProxy"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
|
||||
def process(self, instance):
|
||||
# Validate against having nodes hidden, which will result in the
|
||||
# extraction to ignore the node.
|
||||
nodes = instance.data["members"] + instance.data.get("proxy", [])
|
||||
nodes = [x for x in nodes if cmds.objectType(x, isAType='dagNode')]
|
||||
hidden_nodes = [
|
||||
x for x in nodes if not is_visible(x, intermediateObject=False)
|
||||
]
|
||||
if hidden_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found hidden nodes:\n\n{}\n\nPlease unhide for"
|
||||
" publishing.".format("\n".join(hidden_nodes))
|
||||
)
|
||||
|
||||
|
||||
class ValidateArnoldSceneSourceProxy(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source Proxy.
|
||||
|
||||
When using proxies we need the nodes to share the same names and not be
|
||||
parent to the world. This ends up needing at least two groups with content
|
||||
nodes and proxy nodes in another.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
families = ["assProxy"]
|
||||
label = "Validate Arnold Scene Source Proxy"
|
||||
|
||||
def _get_nodes_by_name(self, nodes):
|
||||
ungrouped_nodes = []
|
||||
nodes_by_name = {}
|
||||
parents = []
|
||||
same_named_nodes = {}
|
||||
for node in nodes:
|
||||
node_split = node.split("|")
|
||||
if len(node_split) == 2:
|
||||
|
|
@ -35,33 +61,16 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
parents.append(parent)
|
||||
|
||||
node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
|
||||
|
||||
# Check for same same nodes, which can happen in different
|
||||
# hierarchies.
|
||||
if node_name in nodes_by_name:
|
||||
try:
|
||||
same_named_nodes[node_name].append(node)
|
||||
except KeyError:
|
||||
same_named_nodes[node_name] = [
|
||||
nodes_by_name[node_name], node
|
||||
]
|
||||
|
||||
nodes_by_name[node_name] = node
|
||||
|
||||
if same_named_nodes:
|
||||
message = "Found nodes with the same name:"
|
||||
for name, nodes in same_named_nodes.items():
|
||||
message += "\n\n\"{}\":\n{}".format(name, "\n".join(nodes))
|
||||
|
||||
raise PublishValidationError(message)
|
||||
|
||||
return ungrouped_nodes, nodes_by_name, parents
|
||||
|
||||
def process(self, instance):
|
||||
# Validate against nodes directly parented to world.
|
||||
ungrouped_nodes = []
|
||||
|
||||
nodes, content_nodes_by_name, content_parents = (
|
||||
self._get_nodes_by_name(instance.data["contentMembers"])
|
||||
self._get_nodes_by_name(instance.data["members"])
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
|
|
@ -70,24 +79,21 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
# Validate against nodes directly parented to world.
|
||||
if ungrouped_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found nodes parented to the world: {}\n"
|
||||
"All nodes need to be grouped.".format(ungrouped_nodes)
|
||||
)
|
||||
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
# Validate for content and proxy nodes amount being the same.
|
||||
if len(instance.data["contentMembers"]) != len(instance.data["proxy"]):
|
||||
if len(instance.data["members"]) != len(instance.data["proxy"]):
|
||||
raise PublishValidationError(
|
||||
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
|
||||
"be the same.".format(
|
||||
len(instance.data["contentMembers"]),
|
||||
len(instance.data["proxy"])
|
||||
"be the same.\nContent nodes: {}\nProxy nodes:{}".format(
|
||||
len(instance.data["members"]),
|
||||
len(instance.data["proxy"]),
|
||||
instance.data["members"],
|
||||
instance.data["proxy"]
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
families = ["assProxy"]
|
||||
label = "Validate Arnold Scene Source CBID"
|
||||
actions = [RepairAction]
|
||||
optional = False
|
||||
|
|
@ -40,15 +40,11 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
|
||||
@classmethod
|
||||
def get_invalid_couples(cls, instance):
|
||||
content_nodes_by_name = cls._get_nodes_by_name(
|
||||
instance.data["contentMembers"]
|
||||
)
|
||||
proxy_nodes_by_name = cls._get_nodes_by_name(
|
||||
instance.data.get("proxy", [])
|
||||
)
|
||||
nodes_by_name = cls._get_nodes_by_name(instance.data["members"])
|
||||
proxy_nodes_by_name = cls._get_nodes_by_name(instance.data["proxy"])
|
||||
|
||||
invalid_couples = []
|
||||
for content_name, content_node in content_nodes_by_name.items():
|
||||
for content_name, content_node in nodes_by_name.items():
|
||||
proxy_node = proxy_nodes_by_name.get(content_name, None)
|
||||
|
||||
if not proxy_node:
|
||||
|
|
@ -70,7 +66,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
if not instance.data["proxy"]:
|
||||
return
|
||||
|
||||
# Validate for proxy nodes sharing the same cbId as content nodes.
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings
|
||||
|
|
@ -37,7 +38,8 @@ def get_redshift_image_format_labels():
|
|||
return mel.eval("{0}={0}".format(var))
|
||||
|
||||
|
||||
class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
||||
class ValidateRenderSettings(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates the global render settings
|
||||
|
||||
* File Name Prefix must start with: `<Scene>`
|
||||
|
|
@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
* Frame Padding must be:
|
||||
* default: 4
|
||||
|
||||
* Animation must be toggle on, in Render Settings - Common tab:
|
||||
* Animation must be toggled on, in Render Settings - Common tab:
|
||||
* vray: Animation on standard of specific
|
||||
* arnold: Frame / Animation ext: Any choice without "(Single Frame)"
|
||||
* redshift: Animation toggled on
|
||||
|
|
@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Render Settings"
|
||||
label = "Validate Render Settings"
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
ImagePrefixes = {
|
||||
'mentalray': 'defaultRenderGlobals.imageFilePrefix',
|
||||
|
|
@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
DEFAULT_PREFIX = "<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>"
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,132 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
PlaceholderLoadMixin,
|
||||
LoadPlaceholderItem
|
||||
)
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
get_container_transforms,
|
||||
get_node_parent,
|
||||
get_node_index_under_parent
|
||||
)
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import (
|
||||
MayaPlaceholderPlugin,
|
||||
)
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
|
||||
item_class = LoadPlaceholderItem
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
|
||||
# Split builder type: context_assets, linked_assets, all_assets
|
||||
prefix, suffix = placeholder_data["builder_type"].split("_", 1)
|
||||
parts = [prefix]
|
||||
|
||||
# add family if any
|
||||
placeholder_product_type = placeholder_data.get("product_type")
|
||||
if placeholder_product_type is None:
|
||||
placeholder_product_type = placeholder_data.get("family")
|
||||
|
||||
if placeholder_product_type:
|
||||
parts.append(placeholder_product_type)
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = eval(loader_args)
|
||||
for value in loader_args.values():
|
||||
parts.append(str(value))
|
||||
|
||||
parts.append(suffix)
|
||||
placeholder_name = "_".join(parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
containers = []
|
||||
|
||||
loaded_representation_ids = {
|
||||
cmds.getAttr(container + ".representation")
|
||||
for container in containers
|
||||
}
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
|
||||
Args:
|
||||
container (str): Placeholder loaded containers
|
||||
"""
|
||||
|
||||
if not container:
|
||||
return
|
||||
|
||||
# TODO: This currently returns only a single root but a loaded scene
|
||||
# could technically load more than a single root
|
||||
container_root = get_container_transforms(container, root=True)
|
||||
|
||||
# Bugfix: The get_container_transforms does not recognize the load
|
||||
# reference group currently
|
||||
# TODO: Remove this when it does
|
||||
parent = get_node_parent(container_root)
|
||||
if parent:
|
||||
container_root = parent
|
||||
roots = [container_root]
|
||||
|
||||
# Add the loaded roots to the holding sets if they exist
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier) or []
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
# Parent the roots to the place of the placeholder locator and match
|
||||
# its matrix
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
query=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
scene_parent = get_node_parent(placeholder.scene_identifier)
|
||||
for node in set(roots):
|
||||
cmds.xform(node, matrix=placeholder_form, worldSpace=True)
|
||||
|
||||
if scene_parent != get_node_parent(node):
|
||||
if scene_parent:
|
||||
node = cmds.parent(node, scene_parent)[0]
|
||||
else:
|
||||
node = cmds.parent(node, world=True)[0]
|
||||
|
||||
# Move loaded nodes in index order next to their placeholder node
|
||||
cmds.reorder(node, back=True)
|
||||
index = get_node_index_under_parent(placeholder.scene_identifier)
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=index + 1)
|
||||
|
|
@ -0,0 +1,201 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import (
|
||||
MayaPlaceholderPlugin
|
||||
)
|
||||
from ayon_core.lib import NumberDef, TextDef, EnumDef
|
||||
from ayon_core.lib.events import weakref_partial
|
||||
|
||||
|
||||
EXAMPLE_SCRIPT = """
|
||||
# Access maya commands
|
||||
from maya import cmds
|
||||
|
||||
# Access the placeholder node
|
||||
placeholder_node = placeholder.scene_identifier
|
||||
|
||||
# Access the event callback
|
||||
if event is None:
|
||||
print(f"Populating {placeholder}")
|
||||
else:
|
||||
if event.topic == "template.depth_processed":
|
||||
print(f"Processed depth: {event.get('depth')}")
|
||||
elif event.topic == "template.finished":
|
||||
print("Build finished.")
|
||||
""".strip()
|
||||
|
||||
|
||||
class MayaPlaceholderScriptPlugin(MayaPlaceholderPlugin):
|
||||
"""Execute a script at the given `order` during workfile build.
|
||||
|
||||
This is a very low-level placeholder to run Python scripts at a given
|
||||
point in time during the workfile template build.
|
||||
|
||||
It can create either a locator or an objectSet as placeholder node.
|
||||
It defaults to an objectSet, since allowing to run on e.g. other
|
||||
placeholder node members can be useful, e.g. using:
|
||||
|
||||
>>> members = cmds.sets(placeholder.scene_identifier, query=True)
|
||||
|
||||
"""
|
||||
|
||||
identifier = "maya.runscript"
|
||||
label = "Run Python Script"
|
||||
|
||||
use_selection_as_parent = False
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
options = options or {}
|
||||
return [
|
||||
NumberDef(
|
||||
"order",
|
||||
label="Order",
|
||||
default=options.get("order") or 0,
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
maximum=999,
|
||||
tooltip=(
|
||||
"Order"
|
||||
"\nOrder defines asset loading priority (0 to 999)"
|
||||
"\nPriority rule is : \"lowest is first to load\"."
|
||||
)
|
||||
),
|
||||
TextDef(
|
||||
"prepare_script",
|
||||
label="Run at\nprepare",
|
||||
tooltip="Run before populate at prepare order",
|
||||
multiline=True,
|
||||
default=options.get("prepare_script", "")
|
||||
),
|
||||
TextDef(
|
||||
"populate_script",
|
||||
label="Run at\npopulate",
|
||||
tooltip="Run script at populate node order<br>"
|
||||
"This is the <b>default</b> behavior",
|
||||
multiline=True,
|
||||
default=options.get("populate_script", EXAMPLE_SCRIPT)
|
||||
),
|
||||
TextDef(
|
||||
"depth_processed_script",
|
||||
label="Run after\ndepth\niteration",
|
||||
tooltip="Run script after every build depth iteration",
|
||||
multiline=True,
|
||||
default=options.get("depth_processed_script", "")
|
||||
),
|
||||
TextDef(
|
||||
"finished_script",
|
||||
label="Run after\nbuild",
|
||||
tooltip=(
|
||||
"Run script at build finished.<br>"
|
||||
"<b>Note</b>: this even runs if other placeholders had "
|
||||
"errors during the build"
|
||||
),
|
||||
multiline=True,
|
||||
default=options.get("finished_script", "")
|
||||
),
|
||||
EnumDef(
|
||||
"create_nodetype",
|
||||
label="Nodetype",
|
||||
items={
|
||||
"spaceLocator": "Locator",
|
||||
"objectSet": "ObjectSet"
|
||||
},
|
||||
tooltip=(
|
||||
"The placeholder's node type to be created.<br>"
|
||||
"<b>Note</b> this only works on create, not on update"
|
||||
),
|
||||
default=options.get("create_nodetype", "objectSet")
|
||||
),
|
||||
]
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
nodetype = placeholder_data.get("create_nodetype", "objectSet")
|
||||
|
||||
if nodetype == "spaceLocator":
|
||||
super(MayaPlaceholderScriptPlugin, self).create_placeholder(
|
||||
placeholder_data
|
||||
)
|
||||
elif nodetype == "objectSet":
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
# Create maya objectSet on selection
|
||||
selection = cmds.ls(selection=True, long=True)
|
||||
name = self._create_placeholder_name(placeholder_data)
|
||||
node = cmds.sets(selection, name=name)
|
||||
|
||||
self.imprint(node, placeholder_data)
|
||||
|
||||
def prepare_placeholders(self, placeholders):
|
||||
super(MayaPlaceholderScriptPlugin, self).prepare_placeholders(
|
||||
placeholders
|
||||
)
|
||||
for placeholder in placeholders:
|
||||
prepare_script = placeholder.data.get("prepare_script")
|
||||
if not prepare_script:
|
||||
continue
|
||||
|
||||
self.run_script(placeholder, prepare_script)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
|
||||
populate_script = placeholder.data.get("populate_script")
|
||||
depth_script = placeholder.data.get("depth_processed_script")
|
||||
finished_script = placeholder.data.get("finished_script")
|
||||
|
||||
# Run now
|
||||
if populate_script:
|
||||
self.run_script(placeholder, populate_script)
|
||||
|
||||
if not any([depth_script, finished_script]):
|
||||
# No callback scripts to run
|
||||
if not placeholder.data.get("keep_placeholder", True):
|
||||
self.delete_placeholder(placeholder)
|
||||
return
|
||||
|
||||
# Run at each depth processed
|
||||
if depth_script:
|
||||
callback = weakref_partial(
|
||||
self.run_script, placeholder, depth_script)
|
||||
self.builder.add_on_depth_processed_callback(
|
||||
callback, order=placeholder.order)
|
||||
|
||||
# Run at build finish
|
||||
if finished_script:
|
||||
callback = weakref_partial(
|
||||
self.run_script, placeholder, finished_script)
|
||||
self.builder.add_on_finished_callback(
|
||||
callback, order=placeholder.order)
|
||||
|
||||
# If placeholder should be deleted, delete it after finish so
|
||||
# the scripts have access to it up to the last run
|
||||
if not placeholder.data.get("keep_placeholder", True):
|
||||
delete_callback = weakref_partial(
|
||||
self.delete_placeholder, placeholder)
|
||||
self.builder.add_on_finished_callback(
|
||||
delete_callback, order=placeholder.order + 1)
|
||||
|
||||
def run_script(self, placeholder, script, event=None):
|
||||
"""Run script
|
||||
|
||||
Even though `placeholder` is an unused arguments by exposing it as
|
||||
an input argument it means it makes it available through
|
||||
globals()/locals() in the `exec` call, giving the script access
|
||||
to the placeholder.
|
||||
|
||||
For example:
|
||||
>>> node = placeholder.scene_identifier
|
||||
|
||||
In the case the script is running at a callback level (not during
|
||||
populate) then it has access to the `event` as well, otherwise the
|
||||
value is None if it runs during `populate_placeholder` directly.
|
||||
|
||||
For example adding this as the callback script:
|
||||
>>> if event is not None:
|
||||
>>> if event.topic == "on_depth_processed":
|
||||
>>> print(f"Processed depth: {event.get('depth')}")
|
||||
>>> elif event.topic == "on_finished":
|
||||
>>> print("Build finished.")
|
||||
|
||||
"""
|
||||
self.log.debug(f"Running script at event: {event}")
|
||||
exec(script, locals())
|
||||
|
|
@ -7,7 +7,7 @@ from maya import cmds
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
import ayon_core.hosts.maya.lib as maya_lib
|
||||
import ayon_core.hosts.maya.api.lib as maya_lib
|
||||
from . import lib
|
||||
from .alembic import get_alembic_ids_cache
|
||||
|
||||
|
|
|
|||
|
|
@ -1495,18 +1495,28 @@ class WorkfileSettings(object):
|
|||
|
||||
filter_knobs = [
|
||||
"viewerProcess",
|
||||
"wipe_position"
|
||||
"wipe_position",
|
||||
"monitorOutOutputTransform"
|
||||
]
|
||||
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["viewerProcess"]
|
||||
)
|
||||
viewer_process = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
)
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["output_transform"]
|
||||
)
|
||||
output_transform = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
)
|
||||
erased_viewers = []
|
||||
for v in nuke.allNodes(filter="Viewer"):
|
||||
# set viewProcess to preset from settings
|
||||
v["viewerProcess"].setValue(
|
||||
str(viewer_dict["viewerProcess"])
|
||||
)
|
||||
v["viewerProcess"].setValue(viewer_process)
|
||||
|
||||
if str(viewer_dict["viewerProcess"]) \
|
||||
not in v["viewerProcess"].value():
|
||||
if viewer_process not in v["viewerProcess"].value():
|
||||
copy_inputs = v.dependencies()
|
||||
copy_knobs = {k: v[k].value() for k in v.knobs()
|
||||
if k not in filter_knobs}
|
||||
|
|
@ -1524,11 +1534,11 @@ class WorkfileSettings(object):
|
|||
|
||||
# set copied knobs
|
||||
for k, v in copy_knobs.items():
|
||||
print(k, v)
|
||||
nv[k].setValue(v)
|
||||
|
||||
# set viewerProcess
|
||||
nv["viewerProcess"].setValue(str(viewer_dict["viewerProcess"]))
|
||||
nv["viewerProcess"].setValue(viewer_process)
|
||||
nv["monitorOutOutputTransform"].setValue(output_transform)
|
||||
|
||||
if erased_viewers:
|
||||
log.warning(
|
||||
|
|
@ -1547,7 +1557,6 @@ class WorkfileSettings(object):
|
|||
host_name="nuke"
|
||||
)
|
||||
|
||||
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
color_management = workfile_settings["color_management"]
|
||||
native_ocio_config = workfile_settings["native_ocio_config"]
|
||||
|
|
@ -1574,29 +1583,6 @@ class WorkfileSettings(object):
|
|||
residual_path
|
||||
))
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut = workfile_settings["thumbnail_space"]
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
viewer_process_settings, monitor_lut
|
||||
)
|
||||
monitor_lut_data["workingSpaceLUT"] = (
|
||||
workfile_settings["working_space"]
|
||||
)
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
# skip unfilled ocio config path
|
||||
# it will be dict in value
|
||||
if isinstance(value_, dict):
|
||||
continue
|
||||
# skip empty values
|
||||
if not value_:
|
||||
continue
|
||||
if self._root_node[knob].value() not in value_:
|
||||
self._root_node[knob].setValue(str(value_))
|
||||
log.debug("nuke.root()['{}'] changed to: {}".format(
|
||||
knob, value_))
|
||||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
|
|
@ -1611,6 +1597,31 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
workfile_settings["monitor_out_lut"],
|
||||
workfile_settings["monitor_lut"]
|
||||
)
|
||||
monitor_lut_data.update({
|
||||
"workingSpaceLUT": workfile_settings["working_space"],
|
||||
"int8Lut": workfile_settings["int_8_lut"],
|
||||
"int16Lut": workfile_settings["int_16_lut"],
|
||||
"logLut": workfile_settings["log_lut"],
|
||||
"floatLut": workfile_settings["float_lut"]
|
||||
})
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
# skip unfilled ocio config path
|
||||
# it will be dict in value
|
||||
if isinstance(value_, dict):
|
||||
continue
|
||||
# skip empty values
|
||||
if not value_:
|
||||
continue
|
||||
self._root_node[knob].setValue(str(value_))
|
||||
log.debug("nuke.root()['{}'] changed to: {}".format(knob, value_))
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from ayon_core.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_workfile_build_plugin_path,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AVALON_CONTAINER_ID,
|
||||
|
|
@ -52,8 +53,6 @@ from .lib import (
|
|||
MENU_LABEL,
|
||||
)
|
||||
from .workfile_template_builder import (
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin,
|
||||
build_workfile_template,
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
|
|
@ -76,6 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
# registering pyblish gui regarding settings in presets
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
|
|
@ -105,18 +105,11 @@ class NukeHost(
|
|||
def get_workfile_extensions(self):
|
||||
return file_extensions()
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def install(self):
|
||||
''' Installing all requarements for Nuke host
|
||||
'''
|
||||
"""Installing all requirements for Nuke host"""
|
||||
|
||||
pyblish.api.register_host("nuke")
|
||||
|
||||
|
|
@ -125,6 +118,7 @@ class NukeHost(
|
|||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
# Register AYON event for workfiles loading.
|
||||
register_event_callback("workio.open_file", check_inventory_versions)
|
||||
|
|
@ -178,7 +172,6 @@ def add_nuke_callbacks():
|
|||
# set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
|
||||
if nuke_settings["dirmap"]["enabled"]:
|
||||
log.info("Added Nuke's dir-mapping callback ...")
|
||||
# Add dirmap for file paths.
|
||||
|
|
|
|||
|
|
@ -1151,7 +1151,6 @@ def _remove_old_knobs(node):
|
|||
"OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority",
|
||||
"deadlineChunkSize", "deadlineConcurrentTasks", "Deadline"
|
||||
]
|
||||
print(node.name())
|
||||
|
||||
# remove all old knobs
|
||||
for knob in node.allKnobs():
|
||||
|
|
|
|||
|
|
@ -1,30 +1,17 @@
|
|||
import collections
|
||||
import nuke
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin,
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
from .lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
get_main_window,
|
||||
WorkfileSettings,
|
||||
)
|
||||
|
|
@ -54,6 +41,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
class NukePlaceholderPlugin(PlaceholderPlugin):
|
||||
node_color = 4278190335
|
||||
|
||||
|
|
@ -120,843 +108,6 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
|
|||
nuke.delete(placeholder_node)
|
||||
|
||||
|
||||
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "nuke.load"
|
||||
label = "Nuke load"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderLoadPlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
loaded_representation_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
loaded_representation_ids.add(
|
||||
node.knob("repre_id").getValue()
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def _before_placeholder_load(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def _before_repre_load(self, placeholder, representation):
|
||||
placeholder.data["last_repre_id"] = representation["id"]
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
# TODO get from shared populate data!
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_loaded = self._move_to_placeholder_group(
|
||||
placeholder, nodes_loaded
|
||||
)
|
||||
placeholder.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
|
||||
|
||||
self._set_loaded_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = placeholder.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
loaded_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in loaded_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = placeholder.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(placeholder.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_loaded_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,428 @@
|
|||
import nuke
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderCreateMixin,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.workfile_template_builder import (
|
||||
NukePlaceholderPlugin
|
||||
)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
|
@ -0,0 +1,455 @@
|
|||
import nuke
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.workfile_template_builder import (
|
||||
NukePlaceholderPlugin
|
||||
)
|
||||
|
||||
|
||||
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "nuke.load"
|
||||
label = "Nuke load"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderLoadPlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
loaded_representation_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
loaded_representation_ids.add(
|
||||
node.knob("repre_id").getValue()
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def _before_placeholder_load(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def _before_repre_load(self, placeholder, representation):
|
||||
placeholder.data["last_repre_id"] = representation["id"]
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
# TODO get from shared populate data!
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_loaded = self._move_to_placeholder_group(
|
||||
placeholder, nodes_loaded
|
||||
)
|
||||
placeholder.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
|
||||
|
||||
self._set_loaded_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = placeholder.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
loaded_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in loaded_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = placeholder.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(placeholder.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_loaded_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
|
@ -35,8 +35,12 @@ class ImageCreator(Creator):
|
|||
create_empty_group = False
|
||||
|
||||
stub = api.stub() # only after PS is up
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
if pre_create_data.get("use_selection"):
|
||||
try:
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
only_single_item_selected = len(top_level_selected_items) == 1
|
||||
if (
|
||||
only_single_item_selected or
|
||||
|
|
@ -50,11 +54,12 @@ class ImageCreator(Creator):
|
|||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
else:
|
||||
stub.select_layers(stub.get_layers())
|
||||
try:
|
||||
stub.select_layers(stub.get_layers())
|
||||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
except:
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
groups_to_create.append(group)
|
||||
|
||||
# create empty group if nothing selected
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
|
|
|
|||
|
|
@ -10,9 +10,13 @@ class CollectFrameDataFromAssetEntity(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.491
|
||||
label = "Collect Missing Frame Data From Folder"
|
||||
families = ["plate", "pointcache",
|
||||
"vdbcache", "online",
|
||||
"render"]
|
||||
families = [
|
||||
"plate",
|
||||
"pointcache",
|
||||
"vdbcache",
|
||||
"online",
|
||||
"render",
|
||||
]
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -22,16 +26,26 @@ class CollectFrameDataFromAssetEntity(pyblish.api.InstancePlugin):
|
|||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd"
|
||||
"handleEnd",
|
||||
):
|
||||
if key not in instance.data:
|
||||
missing_keys.append(key)
|
||||
|
||||
# Skip the logic if all keys are already collected.
|
||||
# NOTE: In editorial is not 'folderEntity' filled, so it would crash
|
||||
# even if we don't need it.
|
||||
if not missing_keys:
|
||||
return
|
||||
|
||||
keys_set = []
|
||||
folder_attributes = instance.data["folderEntity"]["attrib"]
|
||||
for key in missing_keys:
|
||||
if key in folder_attributes:
|
||||
instance.data[key] = folder_attributes[key]
|
||||
keys_set.append(key)
|
||||
|
||||
if keys_set:
|
||||
self.log.debug(f"Frame range data {keys_set} "
|
||||
"has been collected from folder entity.")
|
||||
self.log.debug(
|
||||
f"Frame range data {keys_set} "
|
||||
"has been collected from folder entity."
|
||||
)
|
||||
|
|
@ -260,11 +260,11 @@ class UEProjectGenerationWorker(UEWorker):
|
|||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# ensure we have PySide2 installed in engine
|
||||
# ensure we have PySide2/6 installed in engine
|
||||
|
||||
self.progress.emit(0)
|
||||
self.stage_begin.emit(
|
||||
(f"Checking PySide2 installation... {stage_count} "
|
||||
(f"Checking Qt bindings installation... {stage_count} "
|
||||
f" out of {stage_count}"))
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
|
|
@ -287,11 +287,30 @@ class UEProjectGenerationWorker(UEWorker):
|
|||
msg = f"Unreal Python not found at {python_path}"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
pyside_cmd = [python_path.as_posix(),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"pyside2"]
|
||||
|
||||
pyside_version = "PySide2"
|
||||
ue_version = self.ue_version.split(".")
|
||||
if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4:
|
||||
# Use PySide6 6.6.3 because 6.7.0 had a bug
|
||||
# - 'QPushButton' can't be added to 'QBoxLayout'
|
||||
pyside_version = "PySide6==6.6.3"
|
||||
|
||||
site_packages_prefix = python_path.parent.as_posix()
|
||||
|
||||
pyside_cmd = [
|
||||
python_path.as_posix(),
|
||||
"-m", "pip",
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
pyside_version,
|
||||
|
||||
]
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
pyside_cmd += ["--target", site_packages_prefix]
|
||||
|
||||
print(f"--- Installing {pyside_version} ...")
|
||||
print(" ".join(pyside_cmd))
|
||||
|
||||
pyside_install = subprocess.Popen(pyside_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
|
|
@ -306,8 +325,8 @@ class UEProjectGenerationWorker(UEWorker):
|
|||
return_code = pyside_install.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to create the project! "
|
||||
"The installation of PySide2 has failed!")
|
||||
msg = (f"Failed to create the project! {return_code} "
|
||||
f"The installation of {pyside_version} has failed!: {pyside_install}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,10 @@ from .local_settings import (
|
|||
get_openpype_username,
|
||||
)
|
||||
from .ayon_connection import initialize_ayon_connection
|
||||
from .cache import (
|
||||
CacheItem,
|
||||
NestedCacheItem,
|
||||
)
|
||||
from .events import (
|
||||
emit_event,
|
||||
register_event_callback
|
||||
|
|
@ -135,6 +139,7 @@ from .path_tools import (
|
|||
)
|
||||
|
||||
from .ayon_info import (
|
||||
is_in_ayon_launcher_process,
|
||||
is_running_from_build,
|
||||
is_using_ayon_console,
|
||||
is_staging_enabled,
|
||||
|
|
@ -157,6 +162,9 @@ __all__ = [
|
|||
|
||||
"initialize_ayon_connection",
|
||||
|
||||
"CacheItem",
|
||||
"NestedCacheItem",
|
||||
|
||||
"emit_event",
|
||||
"register_event_callback",
|
||||
|
||||
|
|
@ -241,6 +249,7 @@ __all__ = [
|
|||
|
||||
"Logger",
|
||||
|
||||
"is_in_ayon_launcher_process",
|
||||
"is_running_from_build",
|
||||
"is_using_ayon_console",
|
||||
"is_staging_enabled",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import platform
|
||||
|
|
@ -25,6 +26,18 @@ def get_ayon_launcher_version():
|
|||
return content["__version__"]
|
||||
|
||||
|
||||
def is_in_ayon_launcher_process():
|
||||
"""Determine if current process is running from AYON launcher.
|
||||
|
||||
Returns:
|
||||
bool: True if running from AYON launcher.
|
||||
|
||||
"""
|
||||
ayon_executable_path = os.path.normpath(os.environ["AYON_EXECUTABLE"])
|
||||
executable_path = os.path.normpath(sys.executable)
|
||||
return ayon_executable_path == executable_path
|
||||
|
||||
|
||||
def is_running_from_build():
|
||||
"""Determine if current process is running from build or code.
|
||||
|
||||
|
|
|
|||
250
client/ayon_core/lib/cache.py
Normal file
250
client/ayon_core/lib/cache.py
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
import time
|
||||
import collections
|
||||
|
||||
InitInfo = collections.namedtuple(
|
||||
"InitInfo",
|
||||
["default_factory", "lifetime"]
|
||||
)
|
||||
|
||||
|
||||
def _default_factory_func():
|
||||
return None
|
||||
|
||||
|
||||
class CacheItem:
|
||||
"""Simple cache item with lifetime and default factory for default value.
|
||||
|
||||
Default factory should return default value that is used on init
|
||||
and on reset.
|
||||
|
||||
Args:
|
||||
default_factory (Optional[callable]): Function that returns default
|
||||
value used on init and on reset.
|
||||
lifetime (Optional[int]): Lifetime of the cache data in seconds.
|
||||
Default lifetime is 120 seconds.
|
||||
|
||||
"""
|
||||
def __init__(self, default_factory=None, lifetime=None):
|
||||
if lifetime is None:
|
||||
lifetime = 120
|
||||
self._lifetime = lifetime
|
||||
self._last_update = None
|
||||
if default_factory is None:
|
||||
default_factory = _default_factory_func
|
||||
self._default_factory = default_factory
|
||||
self._data = default_factory()
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
"""Is cache valid to use.
|
||||
|
||||
Return:
|
||||
bool: True if cache is valid, False otherwise.
|
||||
|
||||
"""
|
||||
if self._last_update is None:
|
||||
return False
|
||||
|
||||
return (time.time() - self._last_update) < self._lifetime
|
||||
|
||||
def set_lifetime(self, lifetime):
|
||||
"""Change lifetime of cache item.
|
||||
|
||||
Args:
|
||||
lifetime (int): Lifetime of the cache data in seconds.
|
||||
"""
|
||||
|
||||
self._lifetime = lifetime
|
||||
|
||||
def set_invalid(self):
|
||||
"""Set cache as invalid."""
|
||||
|
||||
self._last_update = None
|
||||
|
||||
def reset(self):
|
||||
"""Set cache as invalid and reset data."""
|
||||
|
||||
self._last_update = None
|
||||
self._data = self._default_factory()
|
||||
|
||||
def get_data(self):
|
||||
"""Receive cached data.
|
||||
|
||||
Returns:
|
||||
Any: Any data that are cached.
|
||||
|
||||
"""
|
||||
return self._data
|
||||
|
||||
def update_data(self, data):
|
||||
"""Update cache data.
|
||||
|
||||
Args:
|
||||
data (Any): Any data that are cached.
|
||||
|
||||
"""
|
||||
self._data = data
|
||||
self._last_update = time.time()
|
||||
|
||||
|
||||
class NestedCacheItem:
|
||||
"""Helper for cached items stored in nested structure.
|
||||
|
||||
Example:
|
||||
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
|
||||
>>> cache["a"]["b"].is_valid
|
||||
False
|
||||
>>> cache["a"]["b"].get_data()
|
||||
0
|
||||
>>> cache["a"]["b"] = 1
|
||||
>>> cache["a"]["b"].is_valid
|
||||
True
|
||||
>>> cache["a"]["b"].get_data()
|
||||
1
|
||||
>>> cache.reset()
|
||||
>>> cache["a"]["b"].is_valid
|
||||
False
|
||||
|
||||
Args:
|
||||
levels (int): Number of nested levels where read cache is stored.
|
||||
default_factory (Optional[callable]): Function that returns default
|
||||
value used on init and on reset.
|
||||
lifetime (Optional[int]): Lifetime of the cache data in seconds.
|
||||
Default value is based on default value of 'CacheItem'.
|
||||
_init_info (Optional[InitInfo]): Private argument. Init info for
|
||||
nested cache where created from parent item.
|
||||
|
||||
"""
|
||||
def __init__(
|
||||
self, levels=1, default_factory=None, lifetime=None, _init_info=None
|
||||
):
|
||||
if levels < 1:
|
||||
raise ValueError("Nested levels must be greater than 0")
|
||||
self._data_by_key = {}
|
||||
if _init_info is None:
|
||||
_init_info = InitInfo(default_factory, lifetime)
|
||||
self._init_info = _init_info
|
||||
self._levels = levels
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Get cached data.
|
||||
|
||||
Args:
|
||||
key (str): Key of the cache item.
|
||||
|
||||
Returns:
|
||||
Union[NestedCacheItem, CacheItem]: Cache item.
|
||||
|
||||
"""
|
||||
cache = self._data_by_key.get(key)
|
||||
if cache is None:
|
||||
if self._levels > 1:
|
||||
cache = NestedCacheItem(
|
||||
levels=self._levels - 1,
|
||||
_init_info=self._init_info
|
||||
)
|
||||
else:
|
||||
cache = CacheItem(
|
||||
self._init_info.default_factory,
|
||||
self._init_info.lifetime
|
||||
)
|
||||
self._data_by_key[key] = cache
|
||||
return cache
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Update cached data.
|
||||
|
||||
Args:
|
||||
key (str): Key of the cache item.
|
||||
value (Any): Any data that are cached.
|
||||
|
||||
"""
|
||||
if self._levels > 1:
|
||||
raise AttributeError((
|
||||
"{} does not support '__setitem__'. Lower nested level by {}"
|
||||
).format(self.__class__.__name__, self._levels - 1))
|
||||
cache = self[key]
|
||||
cache.update_data(value)
|
||||
|
||||
def get(self, key):
|
||||
"""Get cached data.
|
||||
|
||||
Args:
|
||||
key (str): Key of the cache item.
|
||||
|
||||
Returns:
|
||||
Union[NestedCacheItem, CacheItem]: Cache item.
|
||||
|
||||
"""
|
||||
return self[key]
|
||||
|
||||
def cached_count(self):
|
||||
"""Amount of cached items.
|
||||
|
||||
Returns:
|
||||
int: Amount of cached items.
|
||||
|
||||
"""
|
||||
return len(self._data_by_key)
|
||||
|
||||
def clear_key(self, key):
|
||||
"""Clear cached item by key.
|
||||
|
||||
Args:
|
||||
key (str): Key of the cache item.
|
||||
|
||||
"""
|
||||
self._data_by_key.pop(key, None)
|
||||
|
||||
def clear_invalid(self):
|
||||
"""Clear all invalid cache items.
|
||||
|
||||
Note:
|
||||
To clear all cache items use 'reset'.
|
||||
|
||||
"""
|
||||
changed = {}
|
||||
children_are_nested = self._levels > 1
|
||||
for key, cache in tuple(self._data_by_key.items()):
|
||||
if children_are_nested:
|
||||
output = cache.clear_invalid()
|
||||
if output:
|
||||
changed[key] = output
|
||||
if not cache.cached_count():
|
||||
self._data_by_key.pop(key)
|
||||
elif not cache.is_valid:
|
||||
changed[key] = cache.get_data()
|
||||
self._data_by_key.pop(key)
|
||||
return changed
|
||||
|
||||
def reset(self):
|
||||
"""Reset cache.
|
||||
|
||||
Note:
|
||||
To clear only invalid cache items use 'clear_invalid'.
|
||||
|
||||
"""
|
||||
self._data_by_key = {}
|
||||
|
||||
def set_lifetime(self, lifetime):
|
||||
"""Change lifetime of all children cache items.
|
||||
|
||||
Args:
|
||||
lifetime (int): Lifetime of the cache data in seconds.
|
||||
|
||||
"""
|
||||
self._init_info.lifetime = lifetime
|
||||
for cache in self._data_by_key.values():
|
||||
cache.set_lifetime(lifetime)
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
"""Raise reasonable error when called on wrong level.
|
||||
|
||||
Raises:
|
||||
AttributeError: If called on nested cache item.
|
||||
|
||||
"""
|
||||
raise AttributeError((
|
||||
"{} does not support 'is_valid'. Lower nested level by '{}'"
|
||||
).format(self.__class__.__name__, self._levels))
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
from .deadline_module import DeadlineModule
|
||||
from .version import __version__
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineModule",
|
||||
"__version__"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,6 +49,10 @@ def requests_post(*args, **kwargs):
|
|||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth) # explicit cast to tuple
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.post(*args, **kwargs)
|
||||
|
|
@ -70,6 +74,9 @@ def requests_get(*args, **kwargs):
|
|||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth)
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
@ -434,9 +441,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = context.data.get("defaultDeadline")
|
||||
self._deadline_url = instance.data.get(
|
||||
"deadlineUrl", self._deadline_url)
|
||||
self._deadline_url = instance.data["deadline"]["url"]
|
||||
|
||||
assert self._deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
|
|
@ -460,7 +465,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
job_id = self.process_submission()
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
job_id = self.process_submission(auth)
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
|
|
@ -473,10 +479,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
render_job_id = self.submit(payload)
|
||||
render_job_id = self.submit(payload, auth)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self):
|
||||
def process_submission(self, auth=None):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
|
|
@ -487,7 +493,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload)
|
||||
return self.submit(payload, auth)
|
||||
|
||||
@abstractmethod
|
||||
def get_job_info(self):
|
||||
|
|
@ -577,7 +583,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"AuxFiles": aux_files or self.aux_files
|
||||
}
|
||||
|
||||
def submit(self, payload):
|
||||
def submit(self, payload, auth):
|
||||
"""Submit payload to Deadline API end-point.
|
||||
|
||||
This takes payload in the form of JSON file and POST it to
|
||||
|
|
@ -585,6 +591,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
Args:
|
||||
payload (dict): dict to become json in deadline submission.
|
||||
auth (tuple): (username, password)
|
||||
|
||||
Returns:
|
||||
str: resulting Deadline job id.
|
||||
|
|
@ -594,7 +601,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
response = requests_post(url, json=payload)
|
||||
response = requests_post(url, json=payload,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
|
|
|
|||
|
|
@ -19,23 +19,23 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
|
||||
def initialize(self, studio_settings):
|
||||
# This module is always enabled
|
||||
deadline_urls = {}
|
||||
deadline_servers_info = {}
|
||||
enabled = self.name in studio_settings
|
||||
if enabled:
|
||||
deadline_settings = studio_settings[self.name]
|
||||
deadline_urls = {
|
||||
url_item["name"]: url_item["value"]
|
||||
deadline_servers_info = {
|
||||
url_item["name"]: url_item
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
|
||||
if enabled and not deadline_urls:
|
||||
if enabled and not deadline_servers_info:
|
||||
enabled = False
|
||||
self.log.warning((
|
||||
"Deadline Webservice URLs are not specified. Disabling addon."
|
||||
))
|
||||
|
||||
self.enabled = enabled
|
||||
self.deadline_urls = deadline_urls
|
||||
self.deadline_servers_info = deadline_servers_info
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
|
|
@ -45,13 +45,15 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
}
|
||||
|
||||
@staticmethod
|
||||
def get_deadline_pools(webservice, log=None):
|
||||
def get_deadline_pools(webservice, auth=None, log=None):
|
||||
"""Get pools from Deadline.
|
||||
Args:
|
||||
webservice (str): Server url.
|
||||
log (Logger)
|
||||
auth (Optional[Tuple[str, str]]): Tuple containing username,
|
||||
password
|
||||
log (Optional[Logger]): Logger to log errors to, if provided.
|
||||
Returns:
|
||||
list: Pools.
|
||||
List[str]: Pools.
|
||||
Throws:
|
||||
RuntimeError: If deadline webservice is unreachable.
|
||||
|
||||
|
|
@ -63,7 +65,10 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
|
||||
argument = "{}/api/pools?NamesOnly=true".format(webservice)
|
||||
try:
|
||||
response = requests_get(argument)
|
||||
kwargs = {}
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(argument, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
msg = 'Cannot connect to DL web service {}'.format(webservice)
|
||||
log.error(msg)
|
||||
|
|
|
|||
|
|
@ -13,17 +13,45 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
# Run before collect_render.
|
||||
order = pyblish.api.CollectorOrder + 0.005
|
||||
order = pyblish.api.CollectorOrder + 0.225
|
||||
label = "Deadline Webservice from the Instance"
|
||||
families = ["rendering", "renderlayer"]
|
||||
hosts = ["maya"]
|
||||
targets = ["local"]
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou",
|
||||
"image"] # for Fusion
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
|
||||
instance.data["deadlineUrl"] = \
|
||||
instance.data["deadlineUrl"].strip().rstrip("/")
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
if not instance.data.get("deadline"):
|
||||
instance.data["deadline"] = {}
|
||||
|
||||
# todo: separate logic should be removed, all hosts should have same
|
||||
host_name = instance.context.data["hostName"]
|
||||
if host_name == "maya":
|
||||
deadline_url = self._collect_deadline_url(instance)
|
||||
else:
|
||||
deadline_url = (instance.data.get("deadlineUrl") or # backwards
|
||||
instance.data.get("deadline", {}).get("url"))
|
||||
if deadline_url:
|
||||
instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/")
|
||||
else:
|
||||
instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa
|
||||
self.log.debug(
|
||||
"Using {} for submission.".format(instance.data["deadlineUrl"]))
|
||||
"Using {} for submission".format(instance.data["deadline"]["url"]))
|
||||
|
||||
def _collect_deadline_url(self, render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
|
|
@ -49,13 +77,13 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
["project_settings"]
|
||||
["deadline"]
|
||||
)
|
||||
|
||||
default_server = render_instance.context.data["defaultDeadline"]
|
||||
default_server_url = (render_instance.context.data["deadline"]
|
||||
["defaultUrl"])
|
||||
# QUESTION How and where is this is set? Should be removed?
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
self.log.debug("Using default server.")
|
||||
return default_server
|
||||
return default_server_url
|
||||
|
||||
# Get instance server as sting.
|
||||
if isinstance(instance_server, int):
|
||||
|
|
@ -66,7 +94,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
default_servers = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
for url_item in deadline_settings["deadline_servers_info"]
|
||||
}
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
|
|
|
|||
|
|
@ -18,10 +18,9 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
# Run before collect_deadline_server_instance.
|
||||
order = pyblish.api.CollectorOrder + 0.0025
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = "Default Deadline Webservice"
|
||||
|
||||
pass_mongo_url = False
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
|
|
@ -33,15 +32,17 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
|||
deadline_settings = context.data["project_settings"]["deadline"]
|
||||
deadline_server_name = deadline_settings["deadline_server"]
|
||||
|
||||
deadline_webservice = None
|
||||
dl_server_info = None
|
||||
if deadline_server_name:
|
||||
deadline_webservice = deadline_module.deadline_urls.get(
|
||||
dl_server_info = deadline_module.deadline_servers_info.get(
|
||||
deadline_server_name)
|
||||
|
||||
default_deadline_webservice = deadline_module.deadline_urls["default"]
|
||||
deadline_webservice = (
|
||||
deadline_webservice
|
||||
or default_deadline_webservice
|
||||
)
|
||||
if dl_server_info:
|
||||
deadline_url = dl_server_info["value"]
|
||||
else:
|
||||
default_dl_server_info = deadline_module.deadline_servers_info[0]
|
||||
deadline_url = default_dl_server_info["value"]
|
||||
|
||||
context.data["defaultDeadline"] = deadline_webservice.strip().rstrip("/") # noqa
|
||||
context.data["deadline"] = {}
|
||||
context.data["deadline"]["defaultUrl"] = (
|
||||
deadline_url.strip().rstrip("/"))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect user credentials
|
||||
|
||||
Requires:
|
||||
context -> project_settings
|
||||
instance.data["deadline"]["url"]
|
||||
|
||||
Provides:
|
||||
instance.data["deadline"] -> require_authentication (bool)
|
||||
instance.data["deadline"] -> auth (tuple (str, str)) -
|
||||
(username, password) or None
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_api import get_server_api_connection
|
||||
from ayon_core.modules.deadline.deadline_module import DeadlineModule
|
||||
from ayon_core.modules.deadline import __version__
|
||||
|
||||
|
||||
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
||||
"""Collects user name and password for artist if DL requires authentication
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.250
|
||||
label = "Collect Deadline User Credentials"
|
||||
|
||||
targets = ["local"]
|
||||
hosts = ["aftereffects",
|
||||
"blender",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"nuke",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini"]
|
||||
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou"]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
collected_deadline_url = instance.data["deadline"]["url"]
|
||||
if not collected_deadline_url:
|
||||
raise ValueError("Instance doesn't have '[deadline][url]'.")
|
||||
context_data = instance.context.data
|
||||
deadline_settings = context_data["project_settings"]["deadline"]
|
||||
|
||||
deadline_server_name = None
|
||||
# deadline url might be set directly from instance, need to find
|
||||
# metadata for it
|
||||
for deadline_info in deadline_settings["deadline_urls"]:
|
||||
dl_settings_url = deadline_info["value"].strip().rstrip("/")
|
||||
if dl_settings_url == collected_deadline_url:
|
||||
deadline_server_name = deadline_info["name"]
|
||||
break
|
||||
|
||||
if not deadline_server_name:
|
||||
raise ValueError(f"Collected {collected_deadline_url} doesn't "
|
||||
"match any site configured in Studio Settings")
|
||||
|
||||
instance.data["deadline"]["require_authentication"] = (
|
||||
deadline_info["require_authentication"]
|
||||
)
|
||||
instance.data["deadline"]["auth"] = None
|
||||
|
||||
if not deadline_info["require_authentication"]:
|
||||
return
|
||||
# TODO import 'get_addon_site_settings' when available
|
||||
# in public 'ayon_api'
|
||||
local_settings = get_server_api_connection().get_addon_site_settings(
|
||||
DeadlineModule.name, __version__)
|
||||
local_settings = local_settings["local_settings"]
|
||||
for server_info in local_settings:
|
||||
if deadline_server_name == server_info["server_name"]:
|
||||
instance.data["deadline"]["auth"] = (server_info["username"],
|
||||
server_info["password"])
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Authentication</title>
|
||||
<description>
|
||||
## Deadline authentication is required
|
||||
|
||||
This project has set in Settings that Deadline requires authentication.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Please go to Ayon Server > Site Settings and provide your Deadline username and password.
|
||||
In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator.
|
||||
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -174,7 +174,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload)
|
||||
return self.submit(payload,
|
||||
auth=instance.data["deadline"]["auth"])
|
||||
|
||||
def from_published_scene(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@ import os
|
|||
import re
|
||||
import json
|
||||
import getpass
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit CelAction2D scene to Deadline
|
||||
|
|
@ -30,11 +31,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
|
|
@ -197,7 +194,8 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
|
||||
response = requests.post(self.deadline_url, json=payload)
|
||||
response = requests_post(self.deadline_url, json=payload,
|
||||
auth=instance.data["deadline"]["require_authentication"])
|
||||
|
||||
if not response.ok:
|
||||
self.log.error(
|
||||
|
|
|
|||
|
|
@ -2,17 +2,13 @@ import os
|
|||
import json
|
||||
import getpass
|
||||
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.lib import NumberDef
|
||||
|
||||
|
||||
class FusionSubmitDeadline(
|
||||
|
|
@ -64,11 +60,6 @@ class FusionSubmitDeadline(
|
|||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -80,10 +71,6 @@ class FusionSubmitDeadline(
|
|||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
# add suspend_publish attributeValue to instance data
|
||||
instance.data["suspend_publish"] = attribute_values[
|
||||
"suspend_publish"]
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
|
|
@ -94,11 +81,7 @@ class FusionSubmitDeadline(
|
|||
|
||||
from ayon_core.hosts.fusion.api.lib import get_frame_path
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
|
|
@ -258,7 +241,8 @@ class FusionSubmitDeadline(
|
|||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
response = requests.post(url, json=payload)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from openpype_modules.deadline import abstract_submit_deadline
|
|||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
BoolDef,
|
||||
TextDef,
|
||||
NumberDef
|
||||
)
|
||||
|
|
@ -90,11 +89,6 @@ class HoudiniSubmitDeadline(
|
|||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
),
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
|
|
|
|||
|
|
@ -187,11 +187,13 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
payload_data, project_settings)
|
||||
job_infos, plugin_infos = payload
|
||||
for job_info, plugin_info in zip(job_infos, plugin_infos):
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
else:
|
||||
payload = self._use_published_name(payload_data, project_settings)
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
def _use_published_name(self, data, project_settings):
|
||||
# Not all hosts can import these modules.
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
def process_submission(self, auth=None):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
|
|
@ -332,7 +332,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
export_job = self.submit(vray_export_payload)
|
||||
export_job = self.submit(vray_export_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
|
|
@ -351,7 +352,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
else:
|
||||
# Submit main render job
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
def _tile_render(self, payload):
|
||||
"""Submit as tile render per frame with dependent assembly jobs."""
|
||||
|
|
@ -451,7 +453,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(tile_job_payload)
|
||||
job_id = self.submit(tile_job_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
|
|
@ -559,7 +562,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
assembly_job_id = self.submit(payload)
|
||||
assembly_job_id = self.submit(payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
instance.data["assemblySubmissionJobs"] = assembly_job_ids
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ import json
|
|||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
|
|
@ -76,11 +76,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
),
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
),
|
||||
BoolDef(
|
||||
"workfile_dependency",
|
||||
default=cls.workfile_dependency,
|
||||
|
|
@ -100,20 +95,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
# add suspend_publish attributeValue to instance data
|
||||
instance.data["suspend_publish"] = instance.data["attributeValues"][
|
||||
"suspend_publish"]
|
||||
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
|
|
@ -436,7 +423,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
response = requests.post(self.deadline_url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(self.deadline_url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ import json
|
|||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
|
@ -147,9 +147,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
# TODO: Remove this backwards compatibility of `suspend_publish`
|
||||
if instance.data.get("suspend_publish"):
|
||||
initial_status = "Suspended"
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
|
|
@ -212,7 +209,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
@ -344,11 +343,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
|
|
@ -356,7 +351,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
if "deadline" not in inst:
|
||||
inst["deadline"] = {}
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ import json
|
|||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
import clique
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
|
@ -88,9 +88,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony", "blender"]
|
||||
|
||||
families = ["render.farm", "render.frames_farm",
|
||||
"prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence",
|
||||
families = ["render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop",
|
||||
|
|
@ -224,9 +224,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
# TODO: Remove this backwards compatibility of `suspend_publish`
|
||||
if instance.data.get("suspend_publish"):
|
||||
initial_status = "Suspended"
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
|
|
@ -306,7 +303,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
@ -314,7 +313,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
return deadline_publish_job_id
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
|
@ -461,18 +459,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
}
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
# Inject deadline url to instances.
|
||||
# Inject deadline url to instances to query DL for job id for overrides
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
|
|
@ -8,27 +10,42 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer", "render"]
|
||||
hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"]
|
||||
families = ["renderlayer", "render", "render.farm"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.debug(
|
||||
"We have deadline URL on instance {}".format(deadline_url)
|
||||
)
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
kwargs = {}
|
||||
if instance.data["deadline"]["require_authentication"]:
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
kwargs["auth"] = auth
|
||||
|
||||
if not auth[0]:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"At least username is required to be set in "
|
||||
"Site Settings.")
|
||||
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url)
|
||||
self.responses[deadline_url] = requests_get(deadline_url, **kwargs)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
if response.status_code == 401:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"Provided credentials are not working. "
|
||||
"Please change them in Site Settings")
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
|
|
|
|||
|
|
@ -37,8 +37,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
deadline_url = self.get_deadline_url(instance)
|
||||
pools = self.get_pools(deadline_url)
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
pools = self.get_pools(deadline_url,
|
||||
instance.data["deadline"].get("auth"))
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
|
|
@ -61,22 +62,18 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_deadline_url(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
if instance.data.get("deadlineUrl"):
|
||||
# if custom one is set in instance, use that
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
return deadline_url
|
||||
|
||||
def get_pools(self, deadline_url):
|
||||
def get_pools(self, deadline_url, auth):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url,
|
||||
auth=auth,
|
||||
log=self.log)
|
||||
# some DL return "none" as a pool name
|
||||
if "none" not in pools:
|
||||
pools.append("none")
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
|
|
|
|||
|
|
@ -199,16 +199,16 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
try:
|
||||
response = requests_get(url)
|
||||
kwargs = {}
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(url, **kwargs)
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Deadline is not accessible at "
|
||||
"{}".format(deadline_url))
|
||||
|
|
|
|||
1
client/ayon_core/modules/deadline/version.py
Normal file
1
client/ayon_core/modules/deadline/version.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.1.10"
|
||||
|
|
@ -97,6 +97,15 @@ from .context_tools import (
|
|||
get_current_folder_path,
|
||||
get_current_task_name
|
||||
)
|
||||
|
||||
from .workfile import (
|
||||
discover_workfile_build_plugins,
|
||||
register_workfile_build_plugin,
|
||||
deregister_workfile_build_plugin,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
)
|
||||
|
||||
install = install_host
|
||||
uninstall = uninstall_host
|
||||
|
||||
|
|
@ -198,6 +207,13 @@ __all__ = (
|
|||
"get_current_folder_path",
|
||||
"get_current_task_name",
|
||||
|
||||
# Workfile templates
|
||||
"discover_workfile_build_plugins",
|
||||
"register_workfile_build_plugin",
|
||||
"deregister_workfile_build_plugin",
|
||||
"register_workfile_build_plugin_path",
|
||||
"deregister_workfile_build_plugin_path",
|
||||
|
||||
# Backwards compatible function names
|
||||
"install",
|
||||
"uninstall",
|
||||
|
|
|
|||
|
|
@ -3,11 +3,16 @@ import re
|
|||
import copy
|
||||
import platform
|
||||
import collections
|
||||
import time
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib import Logger, get_local_site_id, StringTemplate
|
||||
from ayon_core.lib import (
|
||||
Logger,
|
||||
get_local_site_id,
|
||||
StringTemplate,
|
||||
CacheItem,
|
||||
NestedCacheItem,
|
||||
)
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
from .exceptions import RootCombinationError, ProjectNotSet
|
||||
|
|
@ -397,62 +402,11 @@ class BaseAnatomy(object):
|
|||
)
|
||||
|
||||
|
||||
class CacheItem:
|
||||
"""Helper to cache data.
|
||||
|
||||
Helper does not handle refresh of data and does not mark data as outdated.
|
||||
Who uses the object should check of outdated state on his own will.
|
||||
"""
|
||||
|
||||
default_lifetime = 10
|
||||
|
||||
def __init__(self, lifetime=None):
|
||||
self._data = None
|
||||
self._cached = None
|
||||
self._lifetime = lifetime or self.default_lifetime
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
"""Cached data/object.
|
||||
|
||||
Returns:
|
||||
Any: Whatever was cached.
|
||||
"""
|
||||
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def is_outdated(self):
|
||||
"""Item has outdated cache.
|
||||
|
||||
Lifetime of cache item expired or was not yet set.
|
||||
|
||||
Returns:
|
||||
bool: Item is outdated.
|
||||
"""
|
||||
|
||||
if self._cached is None:
|
||||
return True
|
||||
return (time.time() - self._cached) > self._lifetime
|
||||
|
||||
def update_data(self, data):
|
||||
"""Update cache of data.
|
||||
|
||||
Args:
|
||||
data (Any): Data to cache.
|
||||
"""
|
||||
|
||||
self._data = data
|
||||
self._cached = time.time()
|
||||
|
||||
|
||||
class Anatomy(BaseAnatomy):
|
||||
_sitesync_addon_cache = CacheItem()
|
||||
_project_cache = collections.defaultdict(CacheItem)
|
||||
_default_site_id_cache = collections.defaultdict(CacheItem)
|
||||
_root_overrides_cache = collections.defaultdict(
|
||||
lambda: collections.defaultdict(CacheItem)
|
||||
)
|
||||
_project_cache = NestedCacheItem(lifetime=10)
|
||||
_sitesync_addon_cache = CacheItem(lifetime=60)
|
||||
_default_site_id_cache = NestedCacheItem(lifetime=60)
|
||||
_root_overrides_cache = NestedCacheItem(2, lifetime=60)
|
||||
|
||||
def __init__(
|
||||
self, project_name=None, site_name=None, project_entity=None
|
||||
|
|
@ -477,18 +431,18 @@ class Anatomy(BaseAnatomy):
|
|||
@classmethod
|
||||
def get_project_entity_from_cache(cls, project_name):
|
||||
project_cache = cls._project_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
if not project_cache.is_valid:
|
||||
project_cache.update_data(ayon_api.get_project(project_name))
|
||||
return copy.deepcopy(project_cache.data)
|
||||
return copy.deepcopy(project_cache.get_data())
|
||||
|
||||
@classmethod
|
||||
def get_sitesync_addon(cls):
|
||||
if cls._sitesync_addon_cache.is_outdated:
|
||||
if not cls._sitesync_addon_cache.is_valid:
|
||||
manager = AddonsManager()
|
||||
cls._sitesync_addon_cache.update_data(
|
||||
manager.get_enabled_addon("sitesync")
|
||||
)
|
||||
return cls._sitesync_addon_cache.data
|
||||
return cls._sitesync_addon_cache.get_data()
|
||||
|
||||
@classmethod
|
||||
def _get_studio_roots_overrides(cls, project_name):
|
||||
|
|
@ -533,14 +487,14 @@ class Anatomy(BaseAnatomy):
|
|||
elif not site_name:
|
||||
# Use sync server to receive active site name
|
||||
project_cache = cls._default_site_id_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
if not project_cache.is_valid:
|
||||
project_cache.update_data(
|
||||
sitesync_addon.get_active_site_type(project_name)
|
||||
)
|
||||
site_name = project_cache.data
|
||||
site_name = project_cache.get_data()
|
||||
|
||||
site_cache = cls._root_overrides_cache[project_name][site_name]
|
||||
if site_cache.is_outdated:
|
||||
if not site_cache.is_valid:
|
||||
if site_name == "studio":
|
||||
# Handle studio root overrides without sync server
|
||||
# - studio root overrides can be done even without sync server
|
||||
|
|
@ -553,4 +507,4 @@ class Anatomy(BaseAnatomy):
|
|||
project_name, site_name
|
||||
)
|
||||
site_cache.update_data(roots_overrides)
|
||||
return site_cache.data
|
||||
return site_cache.get_data()
|
||||
|
|
|
|||
|
|
@ -1987,12 +1987,12 @@ class CreateContext:
|
|||
"Folder '{}' was not found".format(folder_path)
|
||||
)
|
||||
|
||||
task_name = None
|
||||
if task_entity is None:
|
||||
task_name = self.get_current_task_name()
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
current_task_name = self.get_current_task_name()
|
||||
if current_task_name:
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], current_task_name
|
||||
)
|
||||
|
||||
if pre_create_data is None:
|
||||
pre_create_data = {}
|
||||
|
|
@ -2018,7 +2018,7 @@ class CreateContext:
|
|||
|
||||
instance_data = {
|
||||
"folderPath": folder_entity["path"],
|
||||
"task": task_name,
|
||||
"task": task_entity["name"] if task_entity else None,
|
||||
"productType": creator.product_type,
|
||||
"variant": variant
|
||||
}
|
||||
|
|
@ -2053,7 +2053,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
|
|
@ -2163,7 +2163,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2197,7 +2197,7 @@ class CreateContext:
|
|||
try:
|
||||
convertor.find_instances()
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed_info.append(
|
||||
prepare_failed_convertor_operation_info(
|
||||
convertor.identifier, sys.exc_info()
|
||||
|
|
@ -2373,7 +2373,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2440,7 +2440,7 @@ class CreateContext:
|
|||
error_message.format(identifier, exc_info[1])
|
||||
)
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2546,7 +2546,7 @@ class CreateContext:
|
|||
try:
|
||||
self.run_convertor(convertor_identifier)
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed_info.append(
|
||||
prepare_failed_convertor_operation_info(
|
||||
convertor_identifier, sys.exc_info()
|
||||
|
|
|
|||
|
|
@ -225,6 +225,7 @@ def create_skeleton_instance(
|
|||
instance_skeleton_data = {
|
||||
"productType": product_type,
|
||||
"productName": data["productName"],
|
||||
"task": data["task"],
|
||||
"families": families,
|
||||
"folderPath": data["folderPath"],
|
||||
"frameStart": time_data.start,
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ class RenderInstance(object):
|
|||
anatomyData = attr.ib(default=None)
|
||||
outputDir = attr.ib(default=None)
|
||||
context = attr.ib(default=None)
|
||||
deadline = attr.ib(default=None)
|
||||
|
||||
# The source instance the data of this render instance should merge into
|
||||
source_instance = attr.ib(default=None, type=pyblish.api.Instance)
|
||||
|
|
@ -215,13 +216,12 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
|
|||
|
||||
# add additional data
|
||||
data = self.add_additional_data(data)
|
||||
render_instance_dict = attr.asdict(render_instance)
|
||||
|
||||
# Merge into source instance if provided, otherwise create instance
|
||||
instance = render_instance_dict.pop("source_instance", None)
|
||||
instance = render_instance.source_instance
|
||||
if instance is None:
|
||||
instance = context.create_instance(render_instance.name)
|
||||
|
||||
render_instance_dict = attr.asdict(render_instance)
|
||||
instance.data.update(render_instance_dict)
|
||||
instance.data.update(data)
|
||||
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
- 'parent' - direct parent name, project name used if is under
|
||||
project
|
||||
|
||||
Required document fields:
|
||||
Folder: 'path' -> Plan to require: 'folderType'
|
||||
Required entity fields:
|
||||
Folder: 'path', 'folderType'
|
||||
|
||||
Args:
|
||||
folder_entity (Dict[str, Any]): Folder entity.
|
||||
|
|
@ -101,6 +101,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
return {
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"type": folder_entity["folderType"],
|
||||
"path": path,
|
||||
},
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
|
|
|
|||
263
client/ayon_core/pipeline/thumbnails.py
Normal file
263
client/ayon_core/pipeline/thumbnails.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
import os
|
||||
import time
|
||||
import collections
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib.local_settings import get_ayon_appdirs
|
||||
|
||||
|
||||
FileInfo = collections.namedtuple(
|
||||
"FileInfo",
|
||||
("path", "size", "modification_time")
|
||||
)
|
||||
|
||||
|
||||
class ThumbnailsCache:
|
||||
"""Cache of thumbnails on local storage.
|
||||
|
||||
Thumbnails are cached to appdirs to predefined directory. Each project has
|
||||
own subfolder with thumbnails -> that's because each project has own
|
||||
thumbnail id validation and file names are thumbnail ids with matching
|
||||
extension. Extensions are predefined (.png and .jpeg).
|
||||
|
||||
Cache has cleanup mechanism which is triggered on initialized by default.
|
||||
|
||||
The cleanup has 2 levels:
|
||||
1. soft cleanup which remove all files that are older then 'days_alive'
|
||||
2. max size cleanup which remove all files until the thumbnails folder
|
||||
contains less then 'max_filesize'
|
||||
- this is time consuming so it's not triggered automatically
|
||||
|
||||
Args:
|
||||
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
|
||||
"""
|
||||
|
||||
# Lifetime of thumbnails (in seconds)
|
||||
# - default 3 days
|
||||
days_alive = 3
|
||||
# Max size of thumbnail directory (in bytes)
|
||||
# - default 2 Gb
|
||||
max_filesize = 2 * 1024 * 1024 * 1024
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._thumbnails_dir = None
|
||||
self._days_alive_secs = self.days_alive * 24 * 60 * 60
|
||||
if cleanup:
|
||||
self.cleanup()
|
||||
|
||||
def get_thumbnails_dir(self):
|
||||
"""Root directory where thumbnails are stored.
|
||||
|
||||
Returns:
|
||||
str: Path to thumbnails root.
|
||||
"""
|
||||
|
||||
if self._thumbnails_dir is None:
|
||||
self._thumbnails_dir = get_ayon_appdirs("thumbnails")
|
||||
return self._thumbnails_dir
|
||||
|
||||
thumbnails_dir = property(get_thumbnails_dir)
|
||||
|
||||
def get_thumbnails_dir_file_info(self):
|
||||
"""Get information about all files in thumbnails directory.
|
||||
|
||||
Returns:
|
||||
List[FileInfo]: List of file information about all files.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.thumbnails_dir
|
||||
files_info = []
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return files_info
|
||||
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
files_info.append(FileInfo(
|
||||
path, os.path.getsize(path), os.path.getmtime(path)
|
||||
))
|
||||
return files_info
|
||||
|
||||
def get_thumbnails_dir_size(self, files_info=None):
|
||||
"""Got full size of thumbnail directory.
|
||||
|
||||
Args:
|
||||
files_info (List[FileInfo]): Prepared file information about
|
||||
files in thumbnail directory.
|
||||
|
||||
Returns:
|
||||
int: File size of all files in thumbnail directory.
|
||||
"""
|
||||
|
||||
if files_info is None:
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
|
||||
if not files_info:
|
||||
return 0
|
||||
|
||||
return sum(
|
||||
file_info.size
|
||||
for file_info in files_info
|
||||
)
|
||||
|
||||
def cleanup(self, check_max_size=False):
|
||||
"""Cleanup thumbnails directory.
|
||||
|
||||
Args:
|
||||
check_max_size (bool): Also cleanup files to match max size of
|
||||
thumbnails directory.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.get_thumbnails_dir()
|
||||
# Skip if thumbnails dir does not exist yet
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return
|
||||
|
||||
self._soft_cleanup(thumbnails_dir)
|
||||
if check_max_size:
|
||||
self._max_size_cleanup(thumbnails_dir)
|
||||
|
||||
def _soft_cleanup(self, thumbnails_dir):
|
||||
current_time = time.time()
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
modification_time = os.path.getmtime(path)
|
||||
if current_time - modification_time > self._days_alive_secs:
|
||||
os.remove(path)
|
||||
|
||||
def _max_size_cleanup(self, thumbnails_dir):
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
size = self.get_thumbnails_dir_size(files_info)
|
||||
if size < self.max_filesize:
|
||||
return
|
||||
|
||||
sorted_file_info = collections.deque(
|
||||
sorted(files_info, key=lambda item: item.modification_time)
|
||||
)
|
||||
diff = size - self.max_filesize
|
||||
while diff > 0:
|
||||
if not sorted_file_info:
|
||||
break
|
||||
|
||||
file_info = sorted_file_info.popleft()
|
||||
diff -= file_info.size
|
||||
os.remove(file_info.path)
|
||||
|
||||
def get_thumbnail_filepath(self, project_name, thumbnail_id):
|
||||
"""Get thumbnail by thumbnail id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
is not cached yet.
|
||||
"""
|
||||
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
for ext in (
|
||||
".png",
|
||||
".jpeg",
|
||||
):
|
||||
filepath = os.path.join(
|
||||
self.thumbnails_dir, project_name, thumbnail_id + ext
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return filepath
|
||||
return None
|
||||
|
||||
def get_project_dir(self, project_name):
|
||||
"""Path to root directory for specific project.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which root directory path
|
||||
should be returned.
|
||||
|
||||
Returns:
|
||||
str: Path to root of project's thumbnails.
|
||||
"""
|
||||
|
||||
return os.path.join(self.thumbnails_dir, project_name)
|
||||
|
||||
def make_sure_project_dir_exists(self, project_name):
|
||||
project_dir = self.get_project_dir(project_name)
|
||||
if not os.path.exists(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
return project_dir
|
||||
|
||||
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
|
||||
"""Store thumbnail to cache folder.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where the thumbnail belong to.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
content (bytes): Byte content of thumbnail file.
|
||||
mime_type (str): Type of content.
|
||||
|
||||
Returns:
|
||||
str: Path to cached thumbnail image file.
|
||||
"""
|
||||
|
||||
if mime_type == "image/png":
|
||||
ext = ".png"
|
||||
elif mime_type == "image/jpeg":
|
||||
ext = ".jpeg"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
|
||||
|
||||
project_dir = self.make_sure_project_dir_exists(project_name)
|
||||
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
|
||||
with open(thumbnail_path, "wb") as stream:
|
||||
stream.write(content)
|
||||
|
||||
current_time = time.time()
|
||||
os.utime(thumbnail_path, (current_time, current_time))
|
||||
|
||||
return thumbnail_path
|
||||
|
||||
|
||||
class _CacheItems:
|
||||
thumbnails_cache = ThumbnailsCache()
|
||||
|
||||
|
||||
def get_thumbnail_path(project_name, thumbnail_id):
|
||||
"""Get path to thumbnail image.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where thumbnail belongs to.
|
||||
thumbnail_id (Union[str, None]): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
id is not valid or thumbnail was not possible to receive.
|
||||
|
||||
"""
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
filepath = _CacheItems.thumbnails_cache.get_thumbnail_filepath(
|
||||
project_name, thumbnail_id
|
||||
)
|
||||
if filepath is not None:
|
||||
return filepath
|
||||
|
||||
# 'ayon_api' had a bug, public function
|
||||
# 'get_thumbnail_by_id' did not return output of
|
||||
# 'ServerAPI' method.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
|
||||
if result is not None and result.is_valid:
|
||||
return _CacheItems.thumbnails_cache.store_thumbnail(
|
||||
project_name,
|
||||
thumbnail_id,
|
||||
result.content,
|
||||
result.content_type
|
||||
)
|
||||
return None
|
||||
|
|
@ -21,6 +21,15 @@ from .utils import (
|
|||
from .build_workfile import BuildWorkfile
|
||||
|
||||
|
||||
from .workfile_template_builder import (
|
||||
discover_workfile_build_plugins,
|
||||
register_workfile_build_plugin,
|
||||
deregister_workfile_build_plugin,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_workfile_template_key_from_context",
|
||||
"get_workfile_template_key",
|
||||
|
|
@ -39,4 +48,10 @@ __all__ = (
|
|||
"should_open_workfiles_tool_on_launch",
|
||||
|
||||
"BuildWorkfile",
|
||||
|
||||
"discover_workfile_build_plugins",
|
||||
"register_workfile_build_plugin",
|
||||
"deregister_workfile_build_plugin",
|
||||
"register_workfile_build_plugin_path",
|
||||
"deregister_workfile_build_plugin_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ from ayon_core.lib import (
|
|||
filter_profiles,
|
||||
attribute_definitions,
|
||||
)
|
||||
from ayon_core.lib.events import EventSystem, EventCallback, Event
|
||||
from ayon_core.lib.attribute_definitions import get_attributes_keys
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.pipeline.load import (
|
||||
|
|
@ -43,6 +44,13 @@ from ayon_core.pipeline.load import (
|
|||
get_representation_contexts,
|
||||
load_with_repre_context,
|
||||
)
|
||||
from ayon_core.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
|
||||
from ayon_core.pipeline.create import (
|
||||
discover_legacy_creator_plugins,
|
||||
|
|
@ -124,6 +132,8 @@ class AbstractTemplateBuilder(object):
|
|||
self._current_task_entity = _NOT_SET
|
||||
self._linked_folder_entities = _NOT_SET
|
||||
|
||||
self._event_system = EventSystem()
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
|
|
@ -211,10 +221,14 @@ class AbstractTemplateBuilder(object):
|
|||
Returns:
|
||||
List[PlaceholderPlugin]: Plugin classes available for host.
|
||||
"""
|
||||
plugins = []
|
||||
|
||||
# Backwards compatibility
|
||||
if hasattr(self._host, "get_workfile_build_placeholder_plugins"):
|
||||
return self._host.get_workfile_build_placeholder_plugins()
|
||||
return []
|
||||
|
||||
plugins.extend(discover(PlaceholderPlugin))
|
||||
return plugins
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
|
|
@ -257,6 +271,8 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
self._project_settings = None
|
||||
|
||||
self._event_system = EventSystem()
|
||||
|
||||
self.clear_shared_data()
|
||||
self.clear_shared_populate_data()
|
||||
|
||||
|
|
@ -735,6 +751,16 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
placeholder.set_finished()
|
||||
|
||||
# Trigger on_depth_processed event
|
||||
self.emit_event(
|
||||
topic="template.depth_processed",
|
||||
data={
|
||||
"depth": iter_counter,
|
||||
"placeholders_by_scene_id": placeholder_by_scene_id
|
||||
},
|
||||
source="builder"
|
||||
)
|
||||
|
||||
# Clear shared data before getting new placeholders
|
||||
self.clear_shared_populate_data()
|
||||
|
||||
|
|
@ -753,6 +779,16 @@ class AbstractTemplateBuilder(object):
|
|||
placeholder_by_scene_id[identifier] = placeholder
|
||||
placeholders.append(placeholder)
|
||||
|
||||
# Trigger on_finished event
|
||||
self.emit_event(
|
||||
topic="template.finished",
|
||||
data={
|
||||
"depth": iter_counter,
|
||||
"placeholders_by_scene_id": placeholder_by_scene_id,
|
||||
},
|
||||
source="builder"
|
||||
)
|
||||
|
||||
self.refresh()
|
||||
|
||||
def _get_build_profiles(self):
|
||||
|
|
@ -880,6 +916,30 @@ class AbstractTemplateBuilder(object):
|
|||
"create_first_version": create_first_version
|
||||
}
|
||||
|
||||
def emit_event(self, topic, data=None, source=None) -> Event:
|
||||
return self._event_system.emit(topic, data, source)
|
||||
|
||||
def add_event_callback(self, topic, callback, order=None):
|
||||
return self._event_system.add_callback(topic, callback, order=order)
|
||||
|
||||
def add_on_finished_callback(
|
||||
self, callback, order=None
|
||||
) -> EventCallback:
|
||||
return self.add_event_callback(
|
||||
topic="template.finished",
|
||||
callback=callback,
|
||||
order=order
|
||||
)
|
||||
|
||||
def add_on_depth_processed_callback(
|
||||
self, callback, order=None
|
||||
) -> EventCallback:
|
||||
return self.add_event_callback(
|
||||
topic="template.depth_processed",
|
||||
callback=callback,
|
||||
order=order
|
||||
)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class PlaceholderPlugin(object):
|
||||
|
|
@ -1912,3 +1972,23 @@ class CreatePlaceholderItem(PlaceholderItem):
|
|||
|
||||
def create_failed(self, creator_data):
|
||||
self._failed_created_publish_instances.append(creator_data)
|
||||
|
||||
|
||||
def discover_workfile_build_plugins(*args, **kwargs):
|
||||
return discover(PlaceholderPlugin, *args, **kwargs)
|
||||
|
||||
|
||||
def register_workfile_build_plugin(plugin: PlaceholderPlugin):
|
||||
register_plugin(PlaceholderPlugin, plugin)
|
||||
|
||||
|
||||
def deregister_workfile_build_plugin(plugin: PlaceholderPlugin):
|
||||
deregister_plugin(PlaceholderPlugin, plugin)
|
||||
|
||||
|
||||
def register_workfile_build_plugin_path(path: str):
|
||||
register_plugin_path(PlaceholderPlugin, path)
|
||||
|
||||
|
||||
def deregister_workfile_build_plugin_path(path: str):
|
||||
deregister_plugin_path(PlaceholderPlugin, path)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ import collections
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.template_data import get_folder_template_data
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
|
||||
|
|
@ -383,24 +384,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
# - 'folder', 'hierarchy', 'parent', 'folder'
|
||||
folder_entity = instance.data.get("folderEntity")
|
||||
if folder_entity:
|
||||
folder_name = folder_entity["name"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy_parts = folder_path.split("/")
|
||||
hierarchy_parts.pop(0)
|
||||
hierarchy_parts.pop(-1)
|
||||
parent_name = project_entity["name"]
|
||||
if hierarchy_parts:
|
||||
parent_name = hierarchy_parts[-1]
|
||||
|
||||
hierarchy = "/".join(hierarchy_parts)
|
||||
anatomy_data.update({
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
},
|
||||
})
|
||||
folder_data = get_folder_template_data(
|
||||
folder_entity,
|
||||
project_entity["name"]
|
||||
)
|
||||
anatomy_data.update(folder_data)
|
||||
return
|
||||
|
||||
if instance.data.get("newAssetPublishing"):
|
||||
|
|
@ -418,6 +406,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"path": instance.data["folderPath"],
|
||||
# TODO get folder type from hierarchy
|
||||
# Using 'Shot' is current default behavior of editorial
|
||||
# (or 'newAssetPublishing') publishing.
|
||||
"type": "Shot",
|
||||
},
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -121,6 +121,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"setdress",
|
||||
"layout",
|
||||
"ass",
|
||||
"assProxy",
|
||||
"vdbcache",
|
||||
"scene",
|
||||
"vrayproxy",
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue