Merge branch 'develop' into enhancement/AY-5356_CSV-publisher-adding-relative-and-absolute-paths

This commit is contained in:
Jakub Trllo 2024-06-26 16:24:06 +02:00 committed by GitHub
commit 286ebbced4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
67 changed files with 768 additions and 1015 deletions

View file

@ -2,6 +2,8 @@
from .cache import CacheItem, NestedCacheItem
from .projects import (
StatusItem,
StatusStates,
ProjectItem,
ProjectsModel,
PROJECTS_MODEL_SENDER,
@ -21,6 +23,8 @@ __all__ = (
"CacheItem",
"NestedCacheItem",
"StatusItem",
"StatusStates",
"ProjectItem",
"ProjectsModel",
"PROJECTS_MODEL_SENDER",

View file

@ -1,3 +1,4 @@
import time
import collections
import contextlib
from abc import ABCMeta, abstractmethod
@ -535,13 +536,15 @@ class HierarchyModel(object):
def _refresh_tasks_cache(self, project_name, folder_id, sender=None):
if folder_id in self._tasks_refreshing:
while folder_id in self._tasks_refreshing:
time.sleep(0.01)
return
cache = self._task_items[project_name][folder_id]
with self._task_refresh_event_manager(
project_name, folder_id, sender
):
task_items = self._query_tasks(project_name, folder_id)
self._task_items[project_name][folder_id] = task_items
cache.update_data(self._query_tasks(project_name, folder_id))
def _query_tasks(self, project_name, folder_id):
tasks = list(ayon_api.get_tasks(

View file

@ -1,8 +1,8 @@
import contextlib
from abc import ABCMeta, abstractmethod
from abc import ABC, abstractmethod
from typing import Dict, Any
import ayon_api
import six
from ayon_core.style import get_default_entity_icon_color
from ayon_core.lib import CacheItem, NestedCacheItem
@ -10,8 +10,14 @@ from ayon_core.lib import CacheItem, NestedCacheItem
PROJECTS_MODEL_SENDER = "projects.model"
@six.add_metaclass(ABCMeta)
class AbstractHierarchyController:
class StatusStates:
not_started = "not_started"
in_progress = "in_progress"
done = "done"
blocked = "blocked"
class AbstractHierarchyController(ABC):
@abstractmethod
def emit_event(self, topic, data, source):
pass
@ -25,18 +31,24 @@ class StatusItem:
color (str): Status color in hex ("#434a56").
short (str): Short status name ("NRD").
icon (str): Icon name in MaterialIcons ("fiber_new").
state (Literal["not_started", "in_progress", "done", "blocked"]):
Status state.
state (str): Status state.
"""
def __init__(self, name, color, short, icon, state):
self.name = name
self.color = color
self.short = short
self.icon = icon
self.state = state
def __init__(
self,
name: str,
color: str,
short: str,
icon: str,
state: str
):
self.name: str = name
self.color: str = color
self.short: str = short
self.icon: str = icon
self.state: str = state
def to_data(self):
def to_data(self) -> Dict[str, Any]:
return {
"name": self.name,
"color": self.color,
@ -89,7 +101,7 @@ class FolderTypeItem:
def from_project_item(cls, folder_type_data):
return cls(
name=folder_type_data["name"],
short=folder_type_data["shortName"],
short=folder_type_data.get("shortName", ""),
icon=folder_type_data["icon"],
)

View file

@ -3,14 +3,13 @@ import uuid
import ayon_api
from ayon_core.lib import NestedCacheItem, CacheItem
from ayon_core.lib.events import QueuedEventSystem
from ayon_core.pipeline import Anatomy, get_current_context
from ayon_core.host import ILoadHost
from ayon_core.tools.common_models import (
ProjectsModel,
HierarchyModel,
NestedCacheItem,
CacheItem,
ThumbnailsModel,
)

View file

@ -217,7 +217,9 @@ class InventoryModel(QtGui.QStandardItemModel):
version_label = format_version(version_item.version)
is_hero = version_item.version < 0
is_latest = version_item.is_latest
if not is_latest:
# TODO maybe use different colors for last approved and last
# version? Or don't care about color at all?
if not is_latest and not version_item.is_last_approved:
version_color = self.OUTDATED_COLOR
status_name = version_item.status

View file

@ -3,7 +3,9 @@ import collections
import ayon_api
from ayon_api.graphql import GraphQlQuery
from ayon_core.host import ILoadHost
from ayon_core.tools.common_models.projects import StatusStates
# --- Implementation that should be in ayon-python-api ---
@ -149,26 +151,35 @@ class RepresentationInfo:
class VersionItem:
def __init__(self, version_id, product_id, version, status, is_latest):
self.version = version
self.version_id = version_id
self.product_id = product_id
self.version = version
self.status = status
self.is_latest = is_latest
def __init__(
self,
version_id: str,
product_id: str,
version: int,
status: str,
is_latest: bool,
is_last_approved: bool,
):
self.version_id: str = version_id
self.product_id: str = product_id
self.version: int = version
self.status: str = status
self.is_latest: bool = is_latest
self.is_last_approved: bool = is_last_approved
@property
def is_hero(self):
return self.version < 0
@classmethod
def from_entity(cls, version_entity, is_latest):
def from_entity(cls, version_entity, is_latest, is_last_approved):
return cls(
version_id=version_entity["id"],
product_id=version_entity["productId"],
version=version_entity["version"],
status=version_entity["status"],
is_latest=is_latest,
is_last_approved=is_last_approved,
)
@ -275,6 +286,11 @@ class ContainersModel:
if product_id not in self._version_items_by_product_id
}
if missing_ids:
status_items_by_name = {
status_item.name: status_item
for status_item in self._controller.get_project_status_items()
}
def version_sorted(entity):
return entity["version"]
@ -300,9 +316,21 @@ class ContainersModel:
version_entities_by_product_id.items()
):
last_version = abs(version_entities[-1]["version"])
last_approved_id = None
for version_entity in version_entities:
status_item = status_items_by_name.get(
version_entity["status"]
)
if status_item is None:
continue
if status_item.state == StatusStates.done:
last_approved_id = version_entity["id"]
version_items_by_id = {
entity["id"]: VersionItem.from_entity(
entity, abs(entity["version"]) == last_version
entity,
abs(entity["version"]) == last_version,
entity["id"] == last_approved_id
)
for entity in version_entities
}

View file

@ -233,19 +233,38 @@ class SceneInventoryView(QtWidgets.QTreeView):
has_outdated = False
has_loaded_hero_versions = False
has_available_hero_version = False
for version_items_by_id in version_items_by_product_id.values():
has_outdated_approved = False
last_version_by_product_id = {}
for product_id, version_items_by_id in (
version_items_by_product_id.items()
):
_has_outdated_approved = False
_last_approved_version_item = None
for version_item in version_items_by_id.values():
if version_item.is_hero:
has_available_hero_version = True
elif version_item.is_last_approved:
_last_approved_version_item = version_item
_has_outdated_approved = True
if version_item.version_id not in version_ids:
continue
if version_item.is_hero:
has_loaded_hero_versions = True
elif not version_item.is_latest:
has_outdated = True
if (
_has_outdated_approved
and _last_approved_version_item is not None
):
last_version_by_product_id[product_id] = (
_last_approved_version_item
)
has_outdated_approved = True
switch_to_versioned = None
if has_loaded_hero_versions:
update_icon = qtawesome.icon(
@ -261,6 +280,42 @@ class SceneInventoryView(QtWidgets.QTreeView):
lambda: self._on_switch_to_versioned(item_ids)
)
update_to_last_approved_action = None
approved_version_by_item_id = {}
if has_outdated_approved:
for container_item in container_items_by_id.values():
repre_id = container_item.representation_id
repre_info = repre_info_by_id.get(repre_id)
if not repre_info or not repre_info.is_valid:
continue
version_item = last_version_by_product_id.get(
repre_info.product_id
)
if (
version_item is None
or version_item.version_id == repre_info.version_id
):
continue
approved_version_by_item_id[container_item.item_id] = (
version_item.version
)
if approved_version_by_item_id:
update_icon = qtawesome.icon(
"fa.angle-double-up",
color="#00f0b4"
)
update_to_last_approved_action = QtWidgets.QAction(
update_icon,
"Update to last approved",
menu
)
update_to_last_approved_action.triggered.connect(
lambda: self._update_containers_to_approved_versions(
approved_version_by_item_id
)
)
update_to_latest_action = None
if has_outdated or has_loaded_hero_versions:
update_icon = qtawesome.icon(
@ -299,7 +354,9 @@ class SceneInventoryView(QtWidgets.QTreeView):
# set version
set_version_action = None
if active_repre_id is not None:
set_version_icon = qtawesome.icon("fa.hashtag", color=DEFAULT_COLOR)
set_version_icon = qtawesome.icon(
"fa.hashtag", color=DEFAULT_COLOR
)
set_version_action = QtWidgets.QAction(
set_version_icon,
"Set version",
@ -323,6 +380,9 @@ class SceneInventoryView(QtWidgets.QTreeView):
if switch_to_versioned:
menu.addAction(switch_to_versioned)
if update_to_last_approved_action:
menu.addAction(update_to_last_approved_action)
if update_to_latest_action:
menu.addAction(update_to_latest_action)
@ -970,3 +1030,24 @@ class SceneInventoryView(QtWidgets.QTreeView):
"""
versions = [version for _ in range(len(item_ids))]
self._update_containers(item_ids, versions)
def _update_containers_to_approved_versions(
self, approved_version_by_item_id
):
"""Helper to update items to given version (or version per item)
If at least one item is specified this will always try to refresh
the inventory even if errors occurred on any of the items.
Arguments:
approved_version_by_item_id (Dict[str, int]): Version to set by
item id.
"""
versions = []
item_ids = []
for item_id, version in approved_version_by_item_id.items():
item_ids.append(item_id)
versions.append(version)
self._update_containers(item_ids, versions)

View file

@ -192,7 +192,11 @@ class FoldersQtModel(QtGui.QStandardItemModel):
or thread_id != self._current_refresh_thread.id
):
return
folder_items, folder_type_items = thread.get_result()
if thread.failed:
# TODO visualize that refresh failed
folder_items, folder_type_items = {}, {}
else:
folder_items, folder_type_items = thread.get_result()
self._fill_items(folder_items, folder_type_items)
self._current_refresh_thread = None

View file

@ -2,7 +2,9 @@ import os
import sys
import contextlib
import collections
import traceback
from functools import partial
from typing import Union, Any
from qtpy import QtWidgets, QtCore, QtGui
import qtawesome
@ -425,26 +427,39 @@ class RefreshThread(QtCore.QThread):
self._id = thread_id
self._callback = partial(func, *args, **kwargs)
self._exception = None
self._traceback = None
self._result = None
self.finished.connect(self._on_finish_callback)
@property
def id(self):
def id(self) -> str:
return self._id
@property
def failed(self):
def failed(self) -> bool:
return self._exception is not None
def run(self):
try:
self._result = self._callback()
except Exception as exc:
exc_type, exc_value, exc_traceback = sys.exc_info()
err_traceback = "".join(traceback.format_exception(
exc_type, exc_value, exc_traceback
))
print(err_traceback)
self._traceback = err_traceback
self._exception = exc
def get_result(self):
def get_result(self) -> Any:
return self._result
def get_exception(self) -> Union[BaseException, None]:
return self._exception
def get_traceback(self) -> Union[str, None]:
return self._traceback
def _on_finish_callback(self):
"""Trigger custom signal with thread id.

View file

@ -191,12 +191,12 @@ class TasksQtModel(QtGui.QStandardItemModel):
def _thread_getter(self, project_name, folder_id):
task_items = self._controller.get_task_items(
project_name, folder_id
project_name, folder_id, sender=TASKS_MODEL_SENDER_NAME
)
task_type_items = {}
if hasattr(self._controller, "get_task_type_items"):
task_type_items = self._controller.get_task_type_items(
project_name
project_name, sender=TASKS_MODEL_SENDER_NAME
)
return task_items, task_type_items
@ -580,6 +580,7 @@ class TasksWidget(QtWidgets.QWidget):
return
if expected_data is None:
expected_data = self._controller.get_expected_selection_data()
folder_data = expected_data.get("folder")
task_data = expected_data.get("task")
if (

View file

@ -130,7 +130,10 @@ def main(title="Scripts", parent=None, objectName=None):
# Register control + shift callback to add to shelf (maya behavior)
modifiers = QtCore.Qt.ControlModifier | QtCore.Qt.ShiftModifier
menu.register_callback(int(modifiers), to_shelf)
if int(cmds.about(version=True)) <= 2025:
modifiers = int(modifiers)
menu.register_callback(modifiers, to_shelf)
menu.register_callback(0, register_repeat_last)

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "0.3.3-dev.1"
__version__ = "0.4.1-dev.1"

View file

@ -1,6 +1,6 @@
name = "core"
title = "Core"
version = "0.3.3-dev.1"
version = "0.4.1-dev.1"
client_dir = "ayon_core"

View file

@ -448,6 +448,17 @@ DEFAULT_TOOLS_VALUES = {
"task_types": [],
"tasks": [],
"template": "SK_{folder[name]}{variant}"
},
{
"product_types": [
"hda"
],
"hosts": [
"houdini"
],
"task_types": [],
"tasks": [],
"template": "{folder[name]}_{variant}"
}
],
"filter_creator_profiles": []

View file

@ -92,7 +92,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
return None, None
def _collect_scene_placeholders(self):
"""" Cache placeholder data to shared data.
"""Cache placeholder data to shared data.
Returns:
(list) of dicts
"""

View file

@ -94,7 +94,7 @@ class RenderCreator(Creator):
new_instance.creator_attributes["farm"] = use_farm
review = pre_create_data["mark_for_review"]
new_instance. creator_attributes["mark_for_review"] = review
new_instance.creator_attributes["mark_for_review"] = review
api.get_stub().imprint(new_instance.id,
new_instance.data_to_store())

View file

@ -23,8 +23,16 @@ class AERenderInstance(RenderInstance):
class CollectAERender(publish.AbstractCollectRender):
"""Prepares RenderInstance.
order = pyblish.api.CollectorOrder + 0.100
RenderInstance is meant to replace simple dictionaries to provide code
assist and typing. (Currently used only in AE, Harmony though.)
This must run after `collect_review`, but before Deadline plugins (which
should be run only on renderable instances.)
"""
order = pyblish.api.CollectorOrder + 0.125
label = "Collect After Effects Render Layers"
hosts = ["aftereffects"]
@ -173,24 +181,25 @@ class CollectAERender(publish.AbstractCollectRender):
_, ext = os.path.splitext(os.path.basename(file_name))
ext = ext.replace('.', '')
version_str = "v{:03d}".format(render_instance.version)
if "#" not in file_name: # single frame (mov)W
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
render_instance.folderPath,
if "#" not in file_name: # single frame (mov)
file_name = "{}_{}.{}".format(
render_instance.productName,
version_str,
ext
))
expected_files.append(path)
)
file_path = os.path.join(base_dir, file_name)
expected_files.append(file_path)
else:
for frame in range(start, end + 1):
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
render_instance.folderPath,
file_name = "{}_{}.{}.{}".format(
render_instance.productName,
version_str,
str(frame).zfill(self.padding_width),
ext
))
expected_files.append(path)
)
file_path = os.path.join(base_dir, file_name)
expected_files.append(file_path)
return expected_files
def _get_output_dir(self, render_instance):

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'aftereffects' version."""
__version__ = "0.2.1"
__version__ = "0.2.2"

View file

@ -1,6 +1,6 @@
name = "aftereffects"
title = "AfterEffects"
version = "0.2.1"
version = "0.2.2"
client_dir = "ayon_aftereffects"

View file

@ -0,0 +1,36 @@
from pathlib import Path
import pyblish.api
import bpy
class CollectFileDependencies(pyblish.api.ContextPlugin):
"""Gather all files referenced in this scene."""
label = "Collect File Dependencies"
order = pyblish.api.CollectorOrder - 0.49
hosts = ["blender"]
families = ["render"]
@classmethod
def apply_settings(cls, project_settings):
# Disable plug-in if not used for deadline submission anyway
settings = project_settings["deadline"]["publish"]["BlenderSubmitDeadline"] # noqa
cls.enabled = settings.get("asset_dependencies", True)
def process(self, context):
dependencies = set()
# Add alembic files as dependencies
for cache in bpy.data.cache_files:
dependencies.add(
Path(bpy.path.abspath(cache.filepath)).resolve().as_posix())
# Add image files as dependencies
for image in bpy.data.images:
if image.filepath:
dependencies.add(Path(
bpy.path.abspath(image.filepath)).resolve().as_posix())
context.data["fileDependencies"] = list(dependencies)

View file

@ -83,7 +83,7 @@ class ExtractThumbnail(plugin.BlenderExtractor):
instance.data["representations"].append(representation)
def _fix_output_path(self, filepath):
""""Workaround to return correct filepath.
"""Workaround to return correct filepath.
To workaround this we just glob.glob() for any file extensions and
assume the latest modified file is the correct file and return it.

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'blender' version."""
__version__ = "0.2.0"
__version__ = "0.2.1"

View file

@ -1,6 +1,6 @@
name = "blender"
title = "Blender"
version = "0.2.0"
version = "0.2.1"
client_dir = "ayon_blender"

View file

@ -6,5 +6,5 @@ FARM_FAMILIES = [
"vrayscene", "maxrender",
"arnold_rop", "mantra_rop",
"karma_rop", "vray_rop", "redshift_rop",
"renderFarm", "usrender", "publish.hou"
"renderFarm", "usdrender", "publish.hou"
]

View file

@ -57,12 +57,11 @@ class AfterEffectsSubmitDeadline(
dln_job_info.Plugin = "AfterEffects"
dln_job_info.UserName = context.data.get(
"deadlineUser", getpass.getuser())
if self._instance.data["frameEnd"] > self._instance.data["frameStart"]:
# Deadline requires integers in frame range
frame_range = "{}-{}".format(
int(round(self._instance.data["frameStart"])),
int(round(self._instance.data["frameEnd"])))
dln_job_info.Frames = frame_range
# Deadline requires integers in frame range
frame_range = "{}-{}".format(
int(round(self._instance.data["frameStart"])),
int(round(self._instance.data["frameEnd"])))
dln_job_info.Frames = frame_range
dln_job_info.Priority = self.priority
dln_job_info.Pool = self._instance.data.get("primaryPool")

View file

@ -162,7 +162,7 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
return plugin_payload
def process_submission(self):
def process_submission(self, auth=None):
instance = self._instance
expected_files = instance.data["expectedFiles"]

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'deadline' version."""
__version__ = "0.2.0"
__version__ = "0.2.2"

View file

@ -1,6 +1,6 @@
name = "deadline"
title = "Deadline"
version = "0.2.0"
version = "0.2.2"
client_dir = "ayon_deadline"

View file

@ -270,6 +270,7 @@ class BlenderSubmitDeadlineModel(BaseSettingsModel):
optional: bool = SettingsField(title="Optional")
active: bool = SettingsField(title="Active")
use_published: bool = SettingsField(title="Use Published scene")
asset_dependencies: bool = SettingsField(title="Use Asset dependencies")
priority: int = SettingsField(title="Priority")
chunk_size: int = SettingsField(title="Frame per Task")
group: str = SettingsField("", title="Group Name")
@ -413,6 +414,7 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = {
"optional": False,
"active": True,
"use_published": True,
"asset_dependencies": True,
"priority": 50,
"chunk_size": 10,
"group": "none",

View file

@ -4,10 +4,6 @@ from .pipeline import (
containerise
)
from .plugin import (
Creator,
)
from .lib import (
lsattr,
lsattrs,
@ -23,8 +19,6 @@ __all__ = [
"ls",
"containerise",
"Creator",
# Utility functions
"lsattr",
"lsattrs",

View file

@ -148,89 +148,6 @@ def validate_fps():
return True
def create_remote_publish_node(force=True):
"""Function to create a remote publish node in /out
This is a hacked "Shell" node that does *nothing* except for triggering
`colorbleed.lib.publish_remote()` as pre-render script.
All default attributes of the Shell node are hidden to the Artist to
avoid confusion.
Additionally some custom attributes are added that can be collected
by a Collector to set specific settings for the publish, e.g. whether
to separate the jobs per instance or process in one single job.
"""
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
existing = hou.node("/out/REMOTE_PUBLISH")
if existing:
if force:
log.warning("Removing existing '/out/REMOTE_PUBLISH' node..")
existing.destroy()
else:
raise RuntimeError("Node already exists /out/REMOTE_PUBLISH. "
"Please remove manually or set `force` to "
"True.")
# Create the shell node
out = hou.node("/out")
node = out.createNode("shell", node_name="REMOTE_PUBLISH")
node.moveToGoodPosition()
# Set color make it stand out (avalon/pyblish color)
node.setColor(hou.Color(0.439, 0.709, 0.933))
# Set the pre-render script
node.setParms({
"prerender": cmd,
"lprerender": "python" # command language
})
# Lock the attributes to ensure artists won't easily mess things up.
node.parm("prerender").lock(True)
node.parm("lprerender").lock(True)
# Lock up the actual shell command
command_parm = node.parm("command")
command_parm.set("")
command_parm.lock(True)
shellexec_parm = node.parm("shellexec")
shellexec_parm.set(False)
shellexec_parm.lock(True)
# Get the node's parm template group so we can customize it
template = node.parmTemplateGroup()
# Hide default tabs
template.hideFolder("Shell", True)
template.hideFolder("Scripts", True)
# Hide default settings
template.hide("execute", True)
template.hide("renderdialog", True)
template.hide("trange", True)
template.hide("f", True)
template.hide("take", True)
# Add custom settings to this node.
parm_folder = hou.FolderParmTemplate("folder", "Submission Settings")
# Separate Jobs per Instance
parm = hou.ToggleParmTemplate(name="separateJobPerInstance",
label="Separate Job per Instance",
default_value=False)
parm_folder.addParmTemplate(parm)
# Add our custom Submission Settings folder
template.append(parm_folder)
# Apply template back to the node
node.setParmTemplateGroup(template)
def render_rop(ropnode):
"""Render ROP node utility for Publishing.
@ -1038,17 +955,25 @@ def add_self_publish_button(node):
node.setParmTemplateGroup(template)
def get_scene_viewer():
def get_scene_viewer(visible_only=True):
"""
Return an instance of a visible viewport.
There may be many, some could be closed, any visible are current
Arguments:
visible_only (Optional[bool]): Only return viewers that currently
are the active tab (and hence are visible).
Returns:
Optional[hou.SceneViewer]: A scene viewer, if any.
"""
panes = hou.ui.paneTabs()
panes = [x for x in panes if x.type() == hou.paneTabType.SceneViewer]
if visible_only:
return next((pane for pane in panes if pane.isCurrentTab()), None)
panes = sorted(panes, key=lambda x: x.isCurrentTab())
if panes:
return panes[-1]
@ -1067,12 +992,10 @@ def sceneview_snapshot(
So, it's capable of generating snapshots image sequence.
It works in different Houdini context e.g. Objects, Solaris
Example:
This is how the function can be used::
from ayon_houdini.api import lib
sceneview = hou.ui.paneTabOfType(hou.paneTabType.SceneViewer)
lib.sceneview_snapshot(sceneview)
Example::
>>> from ayon_houdini.api import lib
>>> sceneview = hou.ui.paneTabOfType(hou.paneTabType.SceneViewer)
>>> lib.sceneview_snapshot(sceneview)
Notes:
.png output will render poorly, so use .jpg.

View file

@ -6,7 +6,7 @@ import logging
import hou # noqa
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
from ayon_core.tools.utils import host_tools
import pyblish.api
from ayon_core.pipeline import (
@ -23,6 +23,7 @@ from ayon_houdini.api import lib, shelves, creator_node_shelves
from ayon_core.lib import (
register_event_callback,
emit_event,
env_value_to_bool,
)
@ -85,10 +86,9 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
# initialization during start up delays Houdini UI by minutes
# making it extremely slow to launch.
hdefereval.executeDeferred(shelves.generate_shelves)
if not IS_HEADLESS:
import hdefereval # noqa, hdefereval is only available in ui mode
hdefereval.executeDeferred(creator_node_shelves.install)
if env_value_to_bool("AYON_WORKFILE_TOOL_ON_START"):
hdefereval.executeDeferred(lambda: host_tools.show_workfiles(parent=hou.qt.mainWindow()))
def workfile_has_unsaved_changes(self):
return hou.hipFile.hasUnsavedChanges()
@ -221,12 +221,8 @@ def containerise(name,
"""
# Ensure AVALON_CONTAINERS subnet exists
subnet = hou.node(AVALON_CONTAINERS)
if subnet is None:
obj_network = hou.node("/obj")
subnet = obj_network.createNode("subnet",
node_name="AVALON_CONTAINERS")
# Get AVALON_CONTAINERS subnet
subnet = get_or_create_avalon_container()
# Create proper container name
container_name = "{}_{}".format(name, suffix or "CON")
@ -401,6 +397,18 @@ def on_new():
_enforce_start_frame()
def get_or_create_avalon_container() -> "hou.OpNode":
avalon_container = hou.node(AVALON_CONTAINERS)
if avalon_container:
return avalon_container
parent_path, name = AVALON_CONTAINERS.rsplit("/", 1)
parent = hou.node(parent_path)
return parent.createNode(
"subnet", node_name=name
)
def _set_context_settings():
"""Apply the project settings from the project definition

View file

@ -10,8 +10,7 @@ import hou
import pyblish.api
from ayon_core.pipeline import (
CreatorError,
LegacyCreator,
Creator as NewCreator,
Creator,
CreatedInstance,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
@ -26,80 +25,6 @@ from .lib import imprint, read, lsattr, add_self_publish_button
SETTINGS_CATEGORY = "houdini"
class Creator(LegacyCreator):
"""Creator plugin to create instances in Houdini
To support the wide range of node types for render output (Alembic, VDB,
Mantra) the Creator needs a node type to create the correct instance
By default, if none is given, is `geometry`. An example of accepted node
types: geometry, alembic, ifd (mantra)
Please check the Houdini documentation for more node types.
Tip: to find the exact node type to create press the `i` left of the node
when hovering over a node. The information is visible under the name of
the node.
Deprecated:
This creator is deprecated and will be removed in future version.
"""
defaults = ['Main']
def __init__(self, *args, **kwargs):
super(Creator, self).__init__(*args, **kwargs)
self.nodes = []
def process(self):
"""This is the base functionality to create instances in Houdini
The selected nodes are stored in self to be used in an override method.
This is currently necessary in order to support the multiple output
types in Houdini which can only be rendered through their own node.
Default node type if none is given is `geometry`
It also makes it easier to apply custom settings per instance type
Example of override method for Alembic:
def process(self):
instance = super(CreateEpicNode, self, process()
# Set parameters for Alembic node
instance.setParms(
{"sop_path": "$HIP/%s.abc" % self.nodes[0]}
)
Returns:
hou.Node
"""
try:
if (self.options or {}).get("useSelection"):
self.nodes = hou.selectedNodes()
# Get the node type and remove it from the data, not needed
node_type = self.data.pop("node_type", None)
if node_type is None:
node_type = "geometry"
# Get out node
out = hou.node("/out")
instance = out.createNode(node_type, node_name=self.name)
instance.moveToGoodPosition()
imprint(instance, self.data)
self._process(instance)
except hou.Error as er:
six.reraise(
CreatorError,
CreatorError("Creator error: {}".format(er)),
sys.exc_info()[2])
class HoudiniCreatorBase(object):
@staticmethod
def cache_instance_data(shared_data):
@ -148,7 +73,11 @@ class HoudiniCreatorBase(object):
@staticmethod
def create_instance_node(
folder_path, node_name, parent, node_type="geometry"
folder_path,
node_name,
parent,
node_type="geometry",
pre_create_data=None
):
"""Create node representing instance.
@ -157,6 +86,7 @@ class HoudiniCreatorBase(object):
node_name (str): Name of the new node.
parent (str): Name of the parent node.
node_type (str, optional): Type of the node.
pre_create_data (Optional[Dict]): Pre create data.
Returns:
hou.Node: Newly created instance node.
@ -170,7 +100,7 @@ class HoudiniCreatorBase(object):
@six.add_metaclass(ABCMeta)
class HoudiniCreator(NewCreator, HoudiniCreatorBase):
class HoudiniCreator(Creator, HoudiniCreatorBase):
"""Base class for most of the Houdini creator plugins."""
selected_nodes = []
settings_name = None
@ -193,7 +123,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
folder_path = instance_data["folderPath"]
instance_node = self.create_instance_node(
folder_path, product_name, "/out", node_type)
folder_path,
product_name,
"/out",
node_type,
pre_create_data
)
self.customize_node_look(instance_node)

View file

@ -1,13 +1,19 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating publishable Houdini Digital Assets."""
import ayon_api
import hou
from assettools import setToolSubmenu
import ayon_api
from ayon_core.pipeline import (
CreatorError,
get_current_project_name
)
from ayon_core.lib import (
get_ayon_username,
BoolDef
)
from ayon_houdini.api import plugin
import hou
class CreateHDA(plugin.HoudiniCreator):
@ -37,19 +43,38 @@ class CreateHDA(plugin.HoudiniCreator):
return product_name.lower() in existing_product_names_low
def create_instance_node(
self, folder_path, node_name, parent, node_type="geometry"
self,
folder_path,
node_name,
parent,
node_type="geometry",
pre_create_data=None
):
if pre_create_data is None:
pre_create_data = {}
parent_node = hou.node("/obj")
if self.selected_nodes:
# if we have `use selection` enabled, and we have some
# selected nodes ...
subnet = parent_node.collapseIntoSubnet(
self.selected_nodes,
subnet_name="{}_subnet".format(node_name))
subnet.moveToGoodPosition()
to_hda = subnet
if self.selected_nodes[0].type().name() == "subnet":
to_hda = self.selected_nodes[0]
to_hda.setName("{}_subnet".format(node_name), unique_name=True)
else:
parent_node = self.selected_nodes[0].parent()
subnet = parent_node.collapseIntoSubnet(
self.selected_nodes,
subnet_name="{}_subnet".format(node_name))
subnet.moveToGoodPosition()
to_hda = subnet
else:
# Use Obj as the default path
parent_node = hou.node("/obj")
# Find and return the NetworkEditor pane tab with the minimum index
pane = hou.ui.paneTabOfType(hou.paneTabType.NetworkEditor)
if isinstance(pane, hou.NetworkEditor):
# Use the NetworkEditor pane path as the parent path.
parent_node = pane.pwd()
to_hda = parent_node.createNode(
"subnet", node_name="{}_subnet".format(node_name))
if not to_hda.type().definition():
@ -71,7 +96,8 @@ class CreateHDA(plugin.HoudiniCreator):
hda_node = to_hda.createDigitalAsset(
name=type_name,
description=node_name,
hda_file_name="$HIP/{}.hda".format(node_name)
hda_file_name="$HIP/{}.hda".format(node_name),
ignore_external_references=True
)
hda_node.layoutChildren()
elif self._check_existing(folder_path, node_name):
@ -81,21 +107,92 @@ class CreateHDA(plugin.HoudiniCreator):
else:
hda_node = to_hda
hda_node.setName(node_name)
# If user tries to create the same HDA instance more than
# once, then all of them will have the same product name and
# point to the same hda_file_name. But, their node names will
# be incremented.
hda_node.setName(node_name, unique_name=True)
self.customize_node_look(hda_node)
# Set Custom settings.
hda_def = hda_node.type().definition()
if pre_create_data.get("set_user"):
hda_def.setUserInfo(get_ayon_username())
if pre_create_data.get("use_project"):
setToolSubmenu(hda_def, "AYON/{}".format(self.project_name))
return hda_node
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance = super(CreateHDA, self).create(
return super(CreateHDA, self).create(
product_name,
instance_data,
pre_create_data)
return instance
def get_network_categories(self):
# Houdini allows creating sub-network nodes inside
# these categories.
# Therefore this plugin can work in these categories.
return [
hou.objNodeTypeCategory()
hou.chopNodeTypeCategory(),
hou.cop2NodeTypeCategory(),
hou.dopNodeTypeCategory(),
hou.ropNodeTypeCategory(),
hou.lopNodeTypeCategory(),
hou.objNodeTypeCategory(),
hou.sopNodeTypeCategory(),
hou.topNodeTypeCategory(),
hou.vopNodeTypeCategory()
]
def get_pre_create_attr_defs(self):
attrs = super(CreateHDA, self).get_pre_create_attr_defs()
return attrs + [
BoolDef("set_user",
tooltip="Set current user as the author of the HDA",
default=False,
label="Set Current User"),
BoolDef("use_project",
tooltip="Use project name as tab submenu path.\n"
"The location in TAB Menu will be\n"
"'AYON/project_name/your_HDA_name'",
default=True,
label="Use Project as menu entry"),
]
def get_dynamic_data(
self,
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
):
"""
Pass product name from product name templates as dynamic data.
"""
dynamic_data = super(CreateHDA, self).get_dynamic_data(
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
)
dynamic_data.update(
{
"asset": folder_entity["name"],
"folder": {
"label": folder_entity["label"],
"name": folder_entity["name"]
}
}
)
return dynamic_data

View file

@ -1,55 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache alembics."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef
class CreateMantraIFD(plugin.HoudiniCreator):
"""Mantra .ifd Archive"""
identifier = "io.openpype.creators.houdini.mantraifd"
label = "Mantra IFD"
product_type = "mantraifd"
icon = "gears"
def create(self, product_name, instance_data, pre_create_data):
import hou
instance_data.pop("active", None)
instance_data.update({"node_type": "ifd"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateMantraIFD, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
filepath = "{}{}".format(
hou.text.expandString("$HIP/pyblish/"),
"{}.$F4.ifd".format(product_name))
parms = {
# Render frame range
"trange": 1,
# Arnold ROP settings
"soho_diskfile": filepath,
"soho_outputmode": 1
}
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = ["soho_outputmode", "productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,8 +1,13 @@
# -*- coding: utf-8 -*-
import os
from ayon_core.pipeline import get_representation_path
import hou
from ayon_core.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID
)
from ayon_core.pipeline.load import LoadError
from ayon_houdini.api import (
lib,
pipeline,
plugin
)
@ -19,42 +24,43 @@ class HdaLoader(plugin.HoudiniLoader):
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
namespace = namespace or context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
hou.hda.installFile(file_path)
# Get the type name from the HDA definition.
hda_defs = hou.hda.definitionsInFile(file_path)
if not hda_defs:
raise LoadError(f"No HDA definitions found in file: {file_path}")
type_name = hda_defs[0].nodeTypeName()
hda_node = obj.createNode(type_name, node_name)
parent_node = self._create_dedicated_parent_node(hda_defs[-1])
self[:] = [hda_node]
# Get the type name from the HDA definition.
type_name = hda_defs[-1].nodeTypeName()
hda_node = parent_node.createNode(type_name, node_name)
hda_node.moveToGoodPosition()
return pipeline.containerise(
node_name,
namespace,
[hda_node],
context,
self.__class__.__name__,
suffix="",
)
# Imprint it manually
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": node_name,
"namespace": namespace,
"loader": self.__class__.__name__,
"representation": context["representation"]["id"],
}
lib.imprint(hda_node, data)
return hda_node
def update(self, container, context):
import hou
repre_entity = context["representation"]
hda_node = container["node"]
@ -71,4 +77,45 @@ class HdaLoader(plugin.HoudiniLoader):
def remove(self, container):
node = container["node"]
parent = node.parent()
node.destroy()
if parent.path() == pipeline.AVALON_CONTAINERS:
return
# Remove parent if empty.
if not parent.children():
parent.destroy()
def _create_dedicated_parent_node(self, hda_def):
# Get the root node
parent_node = pipeline.get_or_create_avalon_container()
node = None
node_type = None
if hda_def.nodeTypeCategory() == hou.objNodeTypeCategory():
return parent_node
elif hda_def.nodeTypeCategory() == hou.chopNodeTypeCategory():
node_type, node_name = "chopnet", "MOTION"
elif hda_def.nodeTypeCategory() == hou.cop2NodeTypeCategory():
node_type, node_name = "cop2net", "IMAGES"
elif hda_def.nodeTypeCategory() == hou.dopNodeTypeCategory():
node_type, node_name = "dopnet", "DOPS"
elif hda_def.nodeTypeCategory() == hou.ropNodeTypeCategory():
node_type, node_name = "ropnet", "ROPS"
elif hda_def.nodeTypeCategory() == hou.lopNodeTypeCategory():
node_type, node_name = "lopnet", "LOPS"
elif hda_def.nodeTypeCategory() == hou.sopNodeTypeCategory():
node_type, node_name = "geo", "SOPS"
elif hda_def.nodeTypeCategory() == hou.topNodeTypeCategory():
node_type, node_name = "topnet", "TOPS"
# TODO: Create a dedicated parent node based on Vop Node vex context.
elif hda_def.nodeTypeCategory() == hou.vopNodeTypeCategory():
node_type, node_name = "matnet", "MATSandVOPS"
node = parent_node.node(node_name)
if not node:
node = parent_node.createNode(node_type, node_name)
node.moveToGoodPosition()
return node

View file

@ -12,9 +12,7 @@ class CollectDataforCache(plugin.HoudiniInstancePlugin):
# Run after Collect Frames
order = pyblish.api.CollectorOrder + 0.11
families = ["ass", "pointcache",
"mantraifd", "redshiftproxy",
"vdbcache", "model"]
families = ["ass", "pointcache", "redshiftproxy", "vdbcache", "model"]
targets = ["local", "remote"]
label = "Collect Data for Cache"

View file

@ -9,9 +9,7 @@ class CollectChunkSize(plugin.HoudiniInstancePlugin,
"""Collect chunk size for cache submission to Deadline."""
order = pyblish.api.CollectorOrder + 0.05
families = ["ass", "pointcache",
"vdbcache", "mantraifd",
"redshiftproxy", "model"]
families = ["ass", "pointcache", "vdbcache", "redshiftproxy", "model"]
targets = ["local", "remote"]
label = "Collect Chunk Size"
chunk_size = 999999

View file

@ -15,9 +15,8 @@ class CollectFrames(plugin.HoudiniInstancePlugin):
# this plugin runs after CollectRopFrameRange
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Frames"
families = ["vdbcache", "imagesequence", "ass",
"mantraifd", "redshiftproxy", "review",
"pointcache"]
families = ["camera", "vdbcache", "imagesequence", "ass",
"redshiftproxy", "review", "pointcache", "fbx"]
def process(self, instance):
@ -60,7 +59,10 @@ class CollectFrames(plugin.HoudiniInstancePlugin):
# todo: `frames` currently conflicts with "explicit frames" for a
# for a custom frame list. So this should be refactored.
instance.data.update({"frames": result})
instance.data.update({
"frames": result,
"stagingDir": os.path.dirname(output)
})
@staticmethod
def create_file_list(match, start_frame, end_frame):

View file

@ -1,29 +0,0 @@
import hou
import pyblish.api
from ayon_core.pipeline.publish import RepairAction
from ayon_houdini.api import lib, plugin
class CollectRemotePublishSettings(plugin.HoudiniContextPlugin):
"""Collect custom settings of the Remote Publish node."""
order = pyblish.api.CollectorOrder
families = ["*"]
targets = ["deadline"]
label = "Remote Publish Submission Settings"
actions = [RepairAction]
def process(self, context):
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
return
attributes = lib.read(node)
# Debug the settings we have collected
for key, value in sorted(attributes.items()):
self.log.debug("Collected %s: %s" % (key, value))
context.data.update(attributes)

View file

@ -1,51 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractAlembic(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract Alembic"
families = ["abc", "camera"]
targets = ["local", "remote"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
output = ropnode.evalParm("filename")
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
if instance.data.get("frames"):
# list of files
files = instance.data["frames"]
else:
# single file
files = os.path.basename(output)
# We run the render
self.log.info("Writing alembic '%s' to '%s'" % (files,
staging_dir))
render_rop(ropnode)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': files,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -1,63 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractAss(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder + 0.1
label = "Extract Ass"
families = ["ass"]
targets = ["local", "remote"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
output = ropnode.evalParm("ar_ass_file")
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
# We run the render
self.log.info("Writing ASS '%s' to '%s'" % (file_name, staging_dir))
render_rop(ropnode)
# Unfortunately user interrupting the extraction does not raise an
# error and thus still continues to the integrator. To capture that
# we make sure all files exist
files = instance.data["frames"]
missing = []
for file_name in files:
full_path = os.path.normpath(os.path.join(staging_dir, file_name))
if not os.path.exists(full_path):
missing.append(full_path)
if missing:
raise RuntimeError("Failed to complete Arnold ass extraction. "
"Missing output files: {}".format(missing))
if "representations" not in instance.data:
instance.data["representations"] = []
# Allow ass.gz extension as well
ext = "ass.gz" if file_name.endswith(".ass.gz") else "ass"
representation = {
'name': 'ass',
'ext': ext,
"files": files,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
}
instance.data["representations"].append(representation)

View file

@ -1,51 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import lib, plugin
class ExtractBGEO(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract BGEO"
families = ["bgeo"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
output = ropnode.evalParm("sopoutput")
staging_dir, file_name = os.path.split(output)
instance.data["stagingDir"] = staging_dir
# We run the render
self.log.info("Writing bgeo files '{}' to '{}'.".format(
file_name, staging_dir))
# write files
lib.render_rop(ropnode)
output = instance.data["frames"]
_, ext = lib.splitext(
output[0], allowed_multidot_extensions=[
".ass.gz", ".bgeo.sc", ".bgeo.gz",
".bgeo.lzma", ".bgeo.bz2"])
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
"name": "bgeo",
"ext": ext.lstrip("."),
"files": output,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"]
}
instance.data["representations"].append(representation)

View file

@ -1,58 +0,0 @@
import os
import hou
import pyblish.api
from ayon_core.pipeline import publish
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop, splitext
class ExtractComposite(plugin.HoudiniExtractorPlugin,
publish.ColormanagedPyblishPluginMixin):
order = pyblish.api.ExtractorOrder
label = "Extract Composite (Image Sequence)"
families = ["imagesequence"]
def process(self, instance):
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the copoutput parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
output = ropnode.evalParm("copoutput")
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir))
render_rop(ropnode)
output = instance.data["frames"]
_, ext = splitext(output[0], [])
ext = ext.lstrip(".")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
"name": ext,
"ext": ext,
"files": output,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
}
if ext.lower() == "exr":
# Inject colorspace with 'scene_linear' as that's the
# default Houdini working colorspace and all extracted
# OpenEXR images should be in that colorspace.
# https://www.sidefx.com/docs/houdini/render/linear.html#image-formats
self.set_representation_colorspace(
representation, instance.context,
colorspace="scene_linear"
)
instance.data["representations"].append(representation)

View file

@ -1,51 +0,0 @@
# -*- coding: utf-8 -*-
"""Fbx Extractor for houdini. """
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractFBX(plugin.HoudiniExtractorPlugin):
label = "Extract FBX"
families = ["fbx"]
order = pyblish.api.ExtractorOrder + 0.1
def process(self, instance):
# get rop node
ropnode = hou.node(instance.data.get("instance_node"))
output_file = ropnode.evalParm("sopoutput")
# get staging_dir and file_name
staging_dir = os.path.normpath(os.path.dirname(output_file))
file_name = os.path.basename(output_file)
# render rop
self.log.debug("Writing FBX '%s' to '%s'", file_name, staging_dir)
render_rop(ropnode)
# prepare representation
representation = {
"name": "fbx",
"ext": "fbx",
"files": file_name,
"stagingDir": staging_dir
}
# A single frame may also be rendered without start/end frame.
if "frameStartHandle" in instance.data and "frameEndHandle" in instance.data: # noqa
representation["frameStart"] = instance.data["frameStartHandle"]
representation["frameEnd"] = instance.data["frameEndHandle"]
# set value type for 'representations' key to list
if "representations" not in instance.data:
instance.data["representations"] = []
# update instance data
instance.data["stagingDir"] = staging_dir
instance.data["representations"].append(representation)

View file

@ -1,49 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
class ExtractMantraIFD(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract Mantra ifd"
families = ["mantraifd"]
targets = ["local", "remote"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data.get("instance_node"))
output = ropnode.evalParm("soho_diskfile")
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
files = instance.data["frames"]
missing_frames = [
frame
for frame in instance.data["frames"]
if not os.path.exists(
os.path.normpath(os.path.join(staging_dir, frame)))
]
if missing_frames:
raise RuntimeError("Failed to complete Mantra ifd extraction. "
"Missing output files: {}".format(
missing_frames))
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'ifd',
'ext': 'ifd',
'files': files,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStart"],
"frameEnd": instance.data["frameEnd"],
}
instance.data["representations"].append(representation)

View file

@ -1,69 +0,0 @@
import os
import hou
import pyblish.api
from ayon_core.pipeline import publish
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractOpenGL(plugin.HoudiniExtractorPlugin,
publish.ColormanagedPyblishPluginMixin):
order = pyblish.api.ExtractorOrder - 0.01
label = "Extract OpenGL"
families = ["review"]
def process(self, instance):
ropnode = hou.node(instance.data.get("instance_node"))
# This plugin is triggered when marking render as reviewable.
# Therefore, this plugin will run on over wrong instances.
# TODO: Don't run this plugin on wrong instances.
# This plugin should run only on review product type
# with instance node of opengl type.
if ropnode.type().name() != "opengl":
self.log.debug("Skipping OpenGl extraction. Rop node {} "
"is not an OpenGl node.".format(ropnode.path()))
return
output = ropnode.evalParm("picture")
staging_dir = os.path.normpath(os.path.dirname(output))
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
self.log.info("Extracting '%s' to '%s'" % (file_name,
staging_dir))
render_rop(ropnode)
output = instance.data["frames"]
tags = ["review"]
if not instance.data.get("keepImages"):
tags.append("delete")
representation = {
"name": instance.data["imageFormat"],
"ext": instance.data["imageFormat"],
"files": output,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
"tags": tags,
"preview": True,
"camera_name": instance.data.get("review_camera")
}
if ropnode.evalParm("colorcorrect") == 2: # OpenColorIO enabled
colorspace = ropnode.evalParm("ociocolorspace")
# inject colorspace data
self.set_representation_colorspace(
representation, instance.context,
colorspace=colorspace
)
if "representations" not in instance.data:
instance.data["representations"] = []
instance.data["representations"].append(representation)

View file

@ -1,52 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractRedshiftProxy(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder + 0.1
label = "Extract Redshift Proxy"
families = ["redshiftproxy"]
targets = ["local", "remote"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data.get("instance_node"))
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
output = ropnode.evalParm("RS_archive_file")
staging_dir = os.path.normpath(os.path.dirname(output))
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
self.log.info("Writing Redshift Proxy '%s' to '%s'" % (file_name,
staging_dir))
render_rop(ropnode)
output = instance.data["frames"]
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
"name": "rs",
"ext": "rs",
"files": output,
"stagingDir": staging_dir,
}
# A single frame may also be rendered without start/end frame.
if "frameStartHandle" in instance.data and "frameEndHandle" in instance.data: # noqa
representation["frameStart"] = instance.data["frameStartHandle"]
representation["frameEnd"] = instance.data["frameEndHandle"]
instance.data["representations"].append(representation)

View file

@ -0,0 +1,150 @@
import os
import hou
import pyblish.api
from ayon_core.pipeline import publish
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop, splitext
class ExtractROP(plugin.HoudiniExtractorPlugin):
"""Generic Extractor for any ROP node."""
label = "Extract ROP"
order = pyblish.api.ExtractorOrder
families = ["abc", "camera", "bgeo", "pointcache", "fbx",
"vdbcache", "ass", "redshiftproxy", "mantraifd"]
targets = ["local", "remote"]
def process(self, instance: pyblish.api.Instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
rop_node = hou.node(instance.data["instance_node"])
files = instance.data["frames"]
first_file = files[0] if isinstance(files, (list, tuple)) else files
_, ext = splitext(
first_file, allowed_multidot_extensions=[
".ass.gz", ".bgeo.sc", ".bgeo.gz",
".bgeo.lzma", ".bgeo.bz2"]
)
ext = ext.lstrip(".")
self.log.debug(f"Rendering {rop_node.path()} to {first_file}..")
render_rop(rop_node)
self.validate_expected_frames(instance)
# In some cases representation name is not the the extension
# TODO: Preferably we remove this very specific naming
product_type = instance.data["productType"]
name = {
"bgeo": "bgeo",
"rs": "rs",
"ass": "ass"
}.get(product_type, ext)
representation = {
"name": name,
"ext": ext,
"files": instance.data["frames"],
"stagingDir": instance.data["stagingDir"],
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
}
self.update_representation_data(instance, representation)
instance.data.setdefault("representations", []).append(representation)
def validate_expected_frames(self, instance: pyblish.api.Instance):
"""
Validate all expected files in `instance.data["frames"]` exist in
the staging directory.
"""
filenames = instance.data["frames"]
staging_dir = instance.data["stagingDir"]
if isinstance(filenames, str):
# Single frame
filenames = [filenames]
missing_filenames = [
filename for filename in filenames
if not os.path.isfile(os.path.join(staging_dir, filename))
]
if missing_filenames:
raise RuntimeError(f"Missing frames: {missing_filenames}")
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
"""Allow subclass to override the representation data in-place"""
pass
class ExtractOpenGL(ExtractROP,
publish.ColormanagedPyblishPluginMixin):
order = pyblish.api.ExtractorOrder - 0.01
label = "Extract OpenGL"
families = ["review"]
def process(self, instance):
# This plugin is triggered when marking render as reviewable.
# Therefore, this plugin will run over wrong instances.
# TODO: Don't run this plugin on wrong instances.
# This plugin should run only on review product type
# with instance node of opengl type.
instance_node = instance.data.get("instance_node")
if not instance_node:
self.log.debug("Skipping instance without instance node.")
return
rop_node = hou.node(instance_node)
if rop_node.type().name() != "opengl":
self.log.debug("Skipping OpenGl extraction. Rop node {} "
"is not an OpenGl node.".format(rop_node.path()))
return
super(ExtractOpenGL, self).process(instance)
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
tags = ["review"]
if not instance.data.get("keepImages"):
tags.append("delete")
representation.update({
# TODO: Avoid this override?
"name": instance.data["imageFormat"],
"ext": instance.data["imageFormat"],
"tags": tags,
"preview": True,
"camera_name": instance.data.get("review_camera")
})
class ExtractComposite(ExtractROP,
publish.ColormanagedPyblishPluginMixin):
label = "Extract Composite (Image Sequence)"
families = ["imagesequence"]
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
if representation["ext"].lower() != "exr":
return
# Inject colorspace with 'scene_linear' as that's the
# default Houdini working colorspace and all extracted
# OpenEXR images should be in that colorspace.
# https://www.sidefx.com/docs/houdini/render/linear.html#image-formats
self.set_representation_colorspace(
representation, instance.context,
colorspace="scene_linear"
)

View file

@ -1,46 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractVDBCache(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder + 0.1
label = "Extract VDB Cache"
families = ["vdbcache"]
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
sop_output = ropnode.evalParm("sopoutput")
staging_dir = os.path.normpath(os.path.dirname(sop_output))
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(sop_output)
self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir))
render_rop(ropnode)
output = instance.data["frames"]
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
"name": "vdb",
"ext": "vdb",
"files": output,
"stagingDir": staging_dir,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
}
instance.data["representations"].append(representation)

View file

@ -1,50 +0,0 @@
# -*-coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline.publish import RepairContextAction
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import lib, plugin
class ValidateRemotePublishOutNode(plugin.HoudiniContextPlugin):
"""Validate the remote publish out node exists for Deadline to trigger."""
order = pyblish.api.ValidatorOrder - 0.4
families = ["*"]
targets = ["deadline"]
label = "Remote Publish ROP node"
actions = [RepairContextAction]
def process(self, context):
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
raise RuntimeError("Missing REMOTE_PUBLISH node.")
# We ensure it's a shell node and that it has the pre-render script
# set correctly. Plus the shell script it will trigger should be
# completely empty (doing nothing)
if node.type().name() != "shell":
self.raise_error("Must be shell ROP node")
if node.parm("command").eval() != "":
self.raise_error("Must have no command")
if node.parm("shellexec").eval():
self.raise_error("Must not execute in shell")
if node.parm("prerender").eval() != cmd:
self.raise_error("REMOTE_PUBLISH node does not have "
"correct prerender script.")
if node.parm("lprerender").eval() != "python":
self.raise_error("REMOTE_PUBLISH node prerender script "
"type not set to 'python'")
@classmethod
def repair(cls, context):
"""(Re)create the node if it fails to pass validation."""
lib.create_remote_publish_node(force=True)
def raise_error(self, message):
raise PublishValidationError(message)

View file

@ -1,41 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline.publish import RepairContextAction
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateRemotePublishEnabled(plugin.HoudiniContextPlugin):
"""Validate the remote publish node is *not* bypassed."""
order = pyblish.api.ValidatorOrder - 0.39
families = ["*"]
targets = ["deadline"]
label = "Remote Publish ROP enabled"
actions = [RepairContextAction]
def process(self, context):
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
raise PublishValidationError(
"Missing REMOTE_PUBLISH node.", title=self.label)
if node.isBypassed():
raise PublishValidationError(
"REMOTE_PUBLISH must not be bypassed.", title=self.label)
@classmethod
def repair(cls, context):
"""(Re)create the node if it fails to pass validation."""
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
raise PublishValidationError(
"Missing REMOTE_PUBLISH node.", title=cls.label)
cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH")
node.bypass(False)

View file

@ -10,10 +10,9 @@ from ayon_core.pipeline.publish import (
ValidateContentsOrder,
RepairAction,
)
from ayon_core.pipeline.create import get_product_name
from ayon_houdini.api import plugin
from ayon_houdini.api.action import SelectInvalidAction
from ayon_core.pipeline.create import get_product_name
class FixProductNameAction(RepairAction):
@ -26,7 +25,7 @@ class ValidateSubsetName(plugin.HoudiniInstancePlugin,
"""
families = ["staticMesh"]
families = ["staticMesh", "hda"]
label = "Validate Product Name"
order = ValidateContentsOrder + 0.1
actions = [FixProductNameAction, SelectInvalidAction]
@ -67,7 +66,13 @@ class ValidateSubsetName(plugin.HoudiniInstancePlugin,
instance.context.data["hostName"],
instance.data["productType"],
variant=instance.data["variant"],
dynamic_data={"asset": folder_entity["name"]}
dynamic_data={
"asset": folder_entity["name"],
"folder": {
"label": folder_entity["label"],
"name": folder_entity["name"]
}
}
)
if instance.data.get("productName") != product_name:
@ -97,7 +102,13 @@ class ValidateSubsetName(plugin.HoudiniInstancePlugin,
instance.context.data["hostName"],
instance.data["productType"],
variant=instance.data["variant"],
dynamic_data={"asset": folder_entity["name"]}
dynamic_data={
"asset": folder_entity["name"],
"folder": {
"label": folder_entity["label"],
"name": folder_entity["name"]
}
}
)
instance.data["productName"] = product_name

View file

@ -71,6 +71,12 @@ class ValidateWorkfilePaths(
if param.node().type().name() not in cls.node_types:
continue
if param.keyframes():
# Calling `.unexpandedString()` below fails if param has
# keyframes - so for now we will skip those params. These are
# e.g. present in `filecache` nodes.
continue
if any(
v for v in cls.prohibited_vars
if v in param.unexpandedString()):

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'houdini' version."""
__version__ = "0.3.1"
__version__ = "0.3.7"

View file

@ -1,6 +1,6 @@
name = "houdini"
title = "Houdini"
version = "0.3.1"
version = "0.3.7"
client_dir = "ayon_houdini"

View file

@ -51,9 +51,6 @@ class CreatePluginsModel(BaseSettingsModel):
CreateKarmaROP: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create Karma ROP")
CreateMantraIFD: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create Mantra IFD")
CreateMantraROP: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create Mantra ROP")
@ -119,10 +116,6 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
"enabled": True,
"default_variants": ["Main"]
},
"CreateMantraIFD": {
"enabled": True,
"default_variants": ["Main"]
},
"CreateMantraROP": {
"enabled": True,
"default_variants": ["Main"]

View file

@ -12,7 +12,6 @@ from .pipeline import (
MayaHost,
)
from .plugin import (
Creator,
Loader
)
@ -45,7 +44,6 @@ __all__ = [
"containerise",
"MayaHost",
"Creator",
"Loader",
# Workfiles API

View file

@ -1721,7 +1721,7 @@ def is_valid_reference_node(reference_node):
Reference node 'reference_node' is not associated with a reference file.
Note that this does *not* check whether the reference node points to an
existing file. Instead it only returns whether maya considers it valid
existing file. Instead, it only returns whether maya considers it valid
and thus is not an unassociated reference node
Arguments:
@ -1731,9 +1731,18 @@ def is_valid_reference_node(reference_node):
bool: Whether reference node is a valid reference
"""
# maya 2022 is missing `isValidReference` so the check needs to be
# done in different way.
if int(cmds.about(version=True)) < 2023:
try:
cmds.referenceQuery(reference_node, filename=True)
return True
except RuntimeError:
return False
sel = OpenMaya.MSelectionList()
sel.add(reference_node)
depend_node = sel.getDependNode(0)
return OpenMaya.MFnReference(depend_node).isValidReference()

View file

@ -15,10 +15,9 @@ from ayon_core.pipeline import (
Anatomy,
AutoCreator,
CreatedInstance,
Creator as NewCreator,
Creator,
CreatorError,
HiddenCreator,
LegacyCreator,
LoaderPlugin,
get_current_project_name,
get_representation_path,
@ -70,22 +69,6 @@ def get_reference_node_parents(*args, **kwargs):
return lib.get_reference_node_parents(*args, **kwargs)
class Creator(LegacyCreator):
defaults = ['Main']
def process(self):
nodes = list()
with lib.undo_chunk():
if (self.options or {}).get("useSelection"):
nodes = cmds.ls(selection=True)
instance = cmds.sets(nodes, name=self.name)
lib.imprint(instance, self.data)
return instance
@six.add_metaclass(ABCMeta)
class MayaCreatorBase(object):
@ -274,7 +257,7 @@ class MayaCreatorBase(object):
@six.add_metaclass(ABCMeta)
class MayaCreator(NewCreator, MayaCreatorBase):
class MayaCreator(Creator, MayaCreatorBase):
settings_category = "maya"
@ -381,7 +364,7 @@ def ensure_namespace(namespace):
return cmds.namespace(add=namespace)
class RenderlayerCreator(NewCreator, MayaCreatorBase):
class RenderlayerCreator(Creator, MayaCreatorBase):
"""Creator which creates an instance per renderlayer in the workfile.
Create and manages renderlayer product per renderLayer in workfile.

View file

@ -9,11 +9,16 @@ class CreateSetDress(plugin.MayaCreator):
label = "Set Dress"
product_type = "setdress"
icon = "cubes"
exactSetMembersOnly = True
shader = True
default_variants = ["Main", "Anim"]
def get_instance_attr_defs(self):
return [
BoolDef("exactSetMembersOnly",
label="Exact Set Members Only",
default=True)
default=self.exactSetMembersOnly),
BoolDef("shader",
label="Include shader",
default=self.shader)
]

View file

@ -0,0 +1,29 @@
import pyblish.api
from ayon_core.pipeline import OptionalPyblishPluginMixin
from ayon_maya.api import plugin
class CollectFbxModel(plugin.MayaInstancePlugin,
OptionalPyblishPluginMixin):
"""Collect Camera for FBX export."""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Fbx Model"
families = ["model"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
if not instance.data.get("families"):
instance.data["families"] = []
if "fbx" not in instance.data["families"]:
instance.data["families"].append("fbx")
for key in {
"bakeComplexAnimation", "bakeResampleAnimation",
"skins", "constraints", "lights"}:
instance.data[key] = False

View file

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
"""Extract data as Maya scene (raw)."""
import os
import contextlib
from ayon_core.lib import BoolDef
from ayon_core.pipeline import AVALON_CONTAINER_ID, AYON_CONTAINER_ID
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
from ayon_maya.api.lib import maintained_selection
from ayon_maya.api.lib import maintained_selection, shader
from ayon_maya.api import plugin
from maya import cmds
@ -88,17 +88,21 @@ class ExtractMayaSceneRaw(plugin.MayaExtractorPlugin, AYONPyblishPluginMixin):
)
with maintained_selection():
cmds.select(selection, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=attribute_values[
"preserve_references"
],
constructionHistory=True,
shader=True,
constraints=True,
expressions=True)
with contextlib.ExitStack() as stack:
if not instance.data.get("shader", True):
# Fix bug where export without shader may import the geometry 'green'
# due to the lack of any shader on import.
stack.enter_context(shader(selection, shadingEngine="initialShadingGroup"))
cmds.file(path,
force=True,
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary",
exportSelected=True,
preserveReferences=attribute_values["preserve_references"],
constructionHistory=True,
shader=instance.data.get("shader", True),
constraints=True,
expressions=True)
if "representations" not in instance.data:
instance.data["representations"] = []

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'maya' version."""
__version__ = "0.2.4"
__version__ = "0.2.8"

View file

@ -1,6 +1,6 @@
name = "maya"
title = "Maya"
version = "0.2.4"
version = "0.2.8"
client_dir = "ayon_maya"
ayon_required_addons = {

View file

@ -124,6 +124,14 @@ class CreateVrayProxyModel(BaseSettingsModel):
default_factory=list, title="Default Products")
class CreateSetDressModel(BaseSettingsModel):
enabled: bool = SettingsField(True)
exactSetMembersOnly: bool = SettingsField(title="Exact Set Members Only")
shader: bool = SettingsField(title="Include shader")
default_variants: list[str] = SettingsField(
default_factory=list, title="Default Products")
class CreateMultishotLayout(BasicCreatorModel):
shotParent: str = SettingsField(title="Shot Parent Folder")
groupLoadedAssets: bool = SettingsField(title="Group Loaded Assets")
@ -217,8 +225,8 @@ class CreatorsModel(BaseSettingsModel):
default_factory=BasicCreatorModel,
title="Create Rig"
)
CreateSetDress: BasicCreatorModel = SettingsField(
default_factory=BasicCreatorModel,
CreateSetDress: CreateSetDressModel = SettingsField(
default_factory=CreateSetDressModel,
title="Create Set Dress"
)
CreateVrayProxy: CreateVrayProxyModel = SettingsField(
@ -396,6 +404,8 @@ DEFAULT_CREATORS_SETTINGS = {
},
"CreateSetDress": {
"enabled": True,
"exactSetMembersOnly": True,
"shader": True,
"default_variants": [
"Main",
"Anim"

View file

@ -625,6 +625,10 @@ class PublishersModel(BaseSettingsModel):
default_factory=CollectFbxCameraModel,
title="Collect Camera for FBX export",
)
CollectFbxModel: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Collect Model for FBX export",
)
CollectGLTF: CollectGLTFModel = SettingsField(
default_factory=CollectGLTFModel,
title="Collect Assets for GLB/GLTF export"
@ -1047,6 +1051,11 @@ DEFAULT_PUBLISH_SETTINGS = {
"CollectFbxCamera": {
"enabled": False
},
"CollectFbxModel": {
"enabled": False,
"optional": True,
"active": True
},
"CollectGLTF": {
"enabled": False
},