mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/add-maya2025-support
This commit is contained in:
commit
3f28ed1624
65 changed files with 1152 additions and 500 deletions
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import (
|
|||
import ayon_core.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator,
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ import sys
|
|||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectCelactionCliKwargs(pyblish.api.Collector):
|
||||
class CollectCelactionCliKwargs(pyblish.api.ContextPlugin):
|
||||
""" Collects all keyword arguments passed from the terminal """
|
||||
|
||||
label = "Collect Celaction Cli Kwargs"
|
||||
order = pyblish.api.Collector.order - 0.1
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
|
||||
def process(self, context):
|
||||
args = list(sys.argv[1:])
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import contextlib
|
|||
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
|
@ -52,9 +54,15 @@ def update_frame_range(start, end, comp=None, set_render_range=True,
|
|||
comp.SetAttrs(attrs)
|
||||
|
||||
|
||||
def set_current_context_framerange():
|
||||
def set_current_context_framerange(folder_entity=None):
|
||||
"""Set Comp's frame range based on current folder."""
|
||||
folder_entity = get_current_project_folder()
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
fields={"attrib.frameStart",
|
||||
"attrib.frameEnd",
|
||||
"attrib.handleStart",
|
||||
"attrib.handleEnd"})
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
start = folder_attributes["frameStart"]
|
||||
end = folder_attributes["frameEnd"]
|
||||
|
|
@ -65,9 +73,24 @@ def set_current_context_framerange():
|
|||
handle_end=handle_end)
|
||||
|
||||
|
||||
def set_current_context_resolution():
|
||||
def set_current_context_fps(folder_entity=None):
|
||||
"""Set Comp's frame rate (FPS) to based on current asset"""
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(fields={"attrib.fps"})
|
||||
|
||||
fps = float(folder_entity["attrib"].get("fps", 24.0))
|
||||
comp = get_current_comp()
|
||||
comp.SetPrefs({
|
||||
"Comp.FrameFormat.Rate": fps,
|
||||
})
|
||||
|
||||
|
||||
def set_current_context_resolution(folder_entity=None):
|
||||
"""Set Comp's resolution width x height default based on current folder"""
|
||||
folder_entity = get_current_project_folder()
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
fields={"attrib.resolutionWidth", "attrib.resolutionHeight"})
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
width = folder_attributes["resolutionWidth"]
|
||||
height = folder_attributes["resolutionHeight"]
|
||||
|
|
@ -285,3 +308,98 @@ def comp_lock_and_undo_chunk(
|
|||
finally:
|
||||
comp.Unlock()
|
||||
comp.EndUndo(keep_undo)
|
||||
|
||||
|
||||
def update_content_on_context_change():
|
||||
"""Update all Creator instances to current asset"""
|
||||
host = registered_host()
|
||||
context = host.get_current_context()
|
||||
|
||||
folder_path = context["folder_path"]
|
||||
task = context["task_name"]
|
||||
|
||||
create_context = CreateContext(host, reset=True)
|
||||
|
||||
for instance in create_context.instances:
|
||||
instance_folder_path = instance.get("folderPath")
|
||||
if instance_folder_path and instance_folder_path != folder_path:
|
||||
instance["folderPath"] = folder_path
|
||||
instance_task = instance.get("task")
|
||||
if instance_task and instance_task != task:
|
||||
instance["task"] = task
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
def prompt_reset_context():
|
||||
"""Prompt the user what context settings to reset.
|
||||
This prompt is used on saving to a different task to allow the scene to
|
||||
get matched to the new context.
|
||||
"""
|
||||
# TODO: Cleanup this prototyped mess of imports and odd dialog
|
||||
from ayon_core.tools.attribute_defs.dialog import (
|
||||
AttributeDefinitionsDialog
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from ayon_core.lib import BoolDef, UILabelDef
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
definitions = [
|
||||
UILabelDef(
|
||||
label=(
|
||||
"You are saving your workfile into a different folder or task."
|
||||
"\n\n"
|
||||
"Would you like to update some settings to the new context?\n"
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"fps",
|
||||
label="FPS",
|
||||
tooltip="Reset Comp FPS",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"frame_range",
|
||||
label="Frame Range",
|
||||
tooltip="Reset Comp start and end frame ranges",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"resolution",
|
||||
label="Comp Resolution",
|
||||
tooltip="Reset Comp resolution",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"instances",
|
||||
label="Publish instances",
|
||||
tooltip="Update all publish instance's folder and task to match "
|
||||
"the new folder and task",
|
||||
default=True
|
||||
),
|
||||
]
|
||||
|
||||
dialog = AttributeDefinitionsDialog(definitions)
|
||||
dialog.setWindowFlags(
|
||||
dialog.windowFlags() | QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
dialog.setWindowTitle("Saving to different context.")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
folder_entity = get_current_project_folder()
|
||||
if options["frame_range"]:
|
||||
set_current_context_framerange(folder_entity)
|
||||
|
||||
if options["fps"]:
|
||||
set_current_context_fps(folder_entity)
|
||||
|
||||
if options["resolution"]:
|
||||
set_current_context_resolution(folder_entity)
|
||||
|
||||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import os
|
|||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
|
||||
import pyblish.api
|
||||
from qtpy import QtCore
|
||||
|
|
@ -28,7 +29,8 @@ from ayon_core.tools.utils import host_tools
|
|||
|
||||
from .lib import (
|
||||
get_current_comp,
|
||||
validate_comp_prefs
|
||||
validate_comp_prefs,
|
||||
prompt_reset_context
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
@ -40,6 +42,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class FusionLogHandler(logging.Handler):
|
||||
# Keep a reference to fusion's Print function (Remote Object)
|
||||
|
|
@ -103,8 +108,10 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
# Register events
|
||||
register_event_callback("open", on_after_open)
|
||||
register_event_callback("workfile.save.before", before_workfile_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("taskChanged", on_task_changed)
|
||||
|
||||
# region workfile io api
|
||||
def has_unsaved_changes(self):
|
||||
|
|
@ -168,6 +175,19 @@ def on_save(event):
|
|||
comp = event["sender"]
|
||||
validate_comp_prefs(comp)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global _about_to_save
|
||||
print(f"Task changed: {_about_to_save}")
|
||||
# TODO: Only do this if not headless
|
||||
if _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
prompt_reset_context()
|
||||
|
||||
|
||||
def on_after_open(event):
|
||||
comp = event["sender"]
|
||||
|
|
@ -201,6 +221,28 @@ def on_after_open(event):
|
|||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
|
||||
def before_workfile_save(event):
|
||||
# Due to Fusion's external python process design we can't really
|
||||
# detect whether the current Fusion environment matches the one the artists
|
||||
# expects it to be. For example, our pipeline python process might
|
||||
# have been shut down, and restarted - which will restart it to the
|
||||
# environment Fusion started with; not necessarily where the artist
|
||||
# is currently working.
|
||||
# The `_about_to_save` var is used to detect context changes when
|
||||
# saving into another asset. If we keep it False it will be ignored
|
||||
# as context change. As such, before we change tasks we will only
|
||||
# consider it the current filepath is within the currently known
|
||||
# AVALON_WORKDIR. This way we avoid false positives of thinking it's
|
||||
# saving to another context and instead sometimes just have false negatives
|
||||
# where we fail to show the "Update on task change" prompt.
|
||||
comp = get_current_comp()
|
||||
filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
workdir = os.environ.get("AYON_WORKDIR")
|
||||
if Path(workdir) in Path(filepath).parents:
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def ls():
|
||||
"""List containers from active Fusion scene
|
||||
|
||||
|
|
@ -337,7 +379,6 @@ class FusionEventHandler(QtCore.QObject):
|
|||
>>> handler = FusionEventHandler(parent=window)
|
||||
>>> handler.start()
|
||||
|
||||
|
||||
"""
|
||||
ACTION_IDS = [
|
||||
"Comp_Save",
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ def create_interactive(creator_identifier, **kwargs):
|
|||
pane = stateutils.activePane(kwargs)
|
||||
if isinstance(pane, hou.NetworkEditor):
|
||||
pwd = pane.pwd()
|
||||
project_name = context.get_current_project_name(),
|
||||
project_name = context.get_current_project_name()
|
||||
folder_path = context.get_current_folder_path()
|
||||
task_name = context.get_current_task_name()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
ABOUT_TO_SAVE = False
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
|
@ -292,8 +292,8 @@ def ls():
|
|||
|
||||
|
||||
def before_workfile_save(event):
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = True
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def before_save():
|
||||
|
|
@ -307,18 +307,14 @@ def on_save():
|
|||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = False
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global ABOUT_TO_SAVE
|
||||
if not IS_HEADLESS and ABOUT_TO_SAVE:
|
||||
global _about_to_save
|
||||
if not IS_HEADLESS and _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,10 @@ from __future__ import absolute_import
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
from ayon_core.pipeline.publish import (
|
||||
get_errored_instances_from_context,
|
||||
get_errored_plugins_from_context
|
||||
)
|
||||
|
||||
|
||||
class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
||||
|
|
@ -112,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action):
|
|||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context,
|
||||
plugin=plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
if issubclass(plugin, pyblish.api.ContextPlugin):
|
||||
errored_plugins = get_errored_plugins_from_context(context)
|
||||
if plugin in errored_plugins:
|
||||
invalid = plugin.get_invalid(context)
|
||||
else:
|
||||
errored_instances = get_errored_instances_from_context(
|
||||
context, plugin=plugin
|
||||
)
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
|
|
|||
|
|
@ -113,7 +113,9 @@ def override_toolbox_ui():
|
|||
annotation="Look Manager",
|
||||
label="Look Manager",
|
||||
image=os.path.join(icons, "lookmanager.png"),
|
||||
command=show_look_assigner,
|
||||
command=lambda: show_look_assigner(
|
||||
parent=parent_widget
|
||||
),
|
||||
width=icon_size,
|
||||
height=icon_size,
|
||||
parent=parent
|
||||
|
|
|
|||
|
|
@ -1876,18 +1876,9 @@ def list_looks(project_name, folder_id):
|
|||
list[dict[str, Any]]: List of look products.
|
||||
|
||||
"""
|
||||
# # get all products with look leading in
|
||||
# the name associated with the asset
|
||||
# TODO this should probably look for product type 'look' instead of
|
||||
# checking product name that can not start with product type
|
||||
product_entities = ayon_api.get_products(
|
||||
project_name, folder_ids=[folder_id]
|
||||
)
|
||||
return [
|
||||
product_entity
|
||||
for product_entity in product_entities
|
||||
if product_entity["name"].startswith("look")
|
||||
]
|
||||
return list(ayon_api.get_products(
|
||||
project_name, folder_ids=[folder_id], product_types={"look"}
|
||||
))
|
||||
|
||||
|
||||
def assign_look_by_version(nodes, version_id):
|
||||
|
|
@ -1906,12 +1897,15 @@ def assign_look_by_version(nodes, version_id):
|
|||
project_name = get_current_project_name()
|
||||
|
||||
# Get representations of shader file and relationships
|
||||
look_representation = ayon_api.get_representation_by_name(
|
||||
project_name, "ma", version_id
|
||||
)
|
||||
json_representation = ayon_api.get_representation_by_name(
|
||||
project_name, "json", version_id
|
||||
)
|
||||
representations = list(ayon_api.get_representations(
|
||||
project_name=project_name,
|
||||
representation_names={"ma", "json"},
|
||||
version_ids=[version_id]
|
||||
))
|
||||
look_representation = next(
|
||||
repre for repre in representations if repre["name"] == "ma")
|
||||
json_representation = next(
|
||||
repre for repre in representations if repre["name"] == "json")
|
||||
|
||||
# See if representation is already loaded, if so reuse it.
|
||||
host = registered_host()
|
||||
|
|
@ -1948,7 +1942,7 @@ def assign_look_by_version(nodes, version_id):
|
|||
apply_shaders(relationships, shader_nodes, nodes)
|
||||
|
||||
|
||||
def assign_look(nodes, product_name="lookDefault"):
|
||||
def assign_look(nodes, product_name="lookMain"):
|
||||
"""Assigns a look to a node.
|
||||
|
||||
Optimizes the nodes by grouping by folder id and finding
|
||||
|
|
@ -1981,14 +1975,10 @@ def assign_look(nodes, product_name="lookDefault"):
|
|||
product_entity["id"]
|
||||
for product_entity in product_entities_by_folder_id.values()
|
||||
}
|
||||
last_version_entities = ayon_api.get_last_versions(
|
||||
last_version_entities_by_product_id = ayon_api.get_last_versions(
|
||||
project_name,
|
||||
product_ids
|
||||
)
|
||||
last_version_entities_by_product_id = {
|
||||
last_version_entity["productId"]: last_version_entity
|
||||
for last_version_entity in last_version_entities
|
||||
}
|
||||
|
||||
for folder_id, asset_nodes in grouped.items():
|
||||
product_entity = product_entities_by_folder_id.get(folder_id)
|
||||
|
|
@ -2651,31 +2641,114 @@ def reset_scene_resolution():
|
|||
set_scene_resolution(width, height, pixelAspect)
|
||||
|
||||
|
||||
def set_context_settings():
|
||||
def set_context_settings(
|
||||
fps=True,
|
||||
resolution=True,
|
||||
frame_range=True,
|
||||
colorspace=True
|
||||
):
|
||||
"""Apply the project settings from the project definition
|
||||
|
||||
Settings can be overwritten by an folder if the folder.attrib contains
|
||||
Settings can be overwritten by an asset if the asset.data contains
|
||||
any information regarding those settings.
|
||||
|
||||
Examples of settings:
|
||||
fps
|
||||
resolution
|
||||
renderer
|
||||
Args:
|
||||
fps (bool): Whether to set the scene FPS.
|
||||
resolution (bool): Whether to set the render resolution.
|
||||
frame_range (bool): Whether to reset the time slide frame ranges.
|
||||
colorspace (bool): Whether to reset the colorspace.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
# Set project fps
|
||||
set_scene_fps(get_fps_for_current_context())
|
||||
if fps:
|
||||
# Set project fps
|
||||
set_scene_fps(get_fps_for_current_context())
|
||||
|
||||
reset_scene_resolution()
|
||||
if resolution:
|
||||
reset_scene_resolution()
|
||||
|
||||
# Set frame range.
|
||||
reset_frame_range()
|
||||
if frame_range:
|
||||
reset_frame_range(fps=False)
|
||||
|
||||
# Set colorspace
|
||||
set_colorspace()
|
||||
if colorspace:
|
||||
set_colorspace()
|
||||
|
||||
|
||||
def prompt_reset_context():
|
||||
"""Prompt the user what context settings to reset.
|
||||
This prompt is used on saving to a different task to allow the scene to
|
||||
get matched to the new context.
|
||||
"""
|
||||
# TODO: Cleanup this prototyped mess of imports and odd dialog
|
||||
from ayon_core.tools.attribute_defs.dialog import (
|
||||
AttributeDefinitionsDialog
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from ayon_core.lib import BoolDef, UILabelDef
|
||||
|
||||
definitions = [
|
||||
UILabelDef(
|
||||
label=(
|
||||
"You are saving your workfile into a different folder or task."
|
||||
"\n\n"
|
||||
"Would you like to update some settings to the new context?\n"
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"fps",
|
||||
label="FPS",
|
||||
tooltip="Reset workfile FPS",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"frame_range",
|
||||
label="Frame Range",
|
||||
tooltip="Reset workfile start and end frame ranges",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"resolution",
|
||||
label="Resolution",
|
||||
tooltip="Reset workfile resolution",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"colorspace",
|
||||
label="Colorspace",
|
||||
tooltip="Reset workfile resolution",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"instances",
|
||||
label="Publish instances",
|
||||
tooltip="Update all publish instance's folder and task to match "
|
||||
"the new folder and task",
|
||||
default=True
|
||||
),
|
||||
]
|
||||
|
||||
dialog = AttributeDefinitionsDialog(definitions)
|
||||
dialog.setWindowTitle("Saving to different context.")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
with suspended_refresh():
|
||||
set_context_settings(
|
||||
fps=options["fps"],
|
||||
resolution=options["resolution"],
|
||||
frame_range=options["frame_range"],
|
||||
colorspace=options["colorspace"]
|
||||
)
|
||||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
|
||||
|
||||
# Valid FPS
|
||||
|
|
|
|||
|
|
@ -67,6 +67,9 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
|||
|
||||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "maya"
|
||||
|
|
@ -581,6 +584,10 @@ def on_save():
|
|||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_open():
|
||||
"""On scene open let's assume the containers have changed."""
|
||||
|
|
@ -650,6 +657,11 @@ def on_task_changed():
|
|||
lib.set_context_settings()
|
||||
lib.update_content_on_context_change()
|
||||
|
||||
global _about_to_save
|
||||
if not lib.IS_HEADLESS and _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
||||
def before_workfile_open():
|
||||
if handle_workfile_locks():
|
||||
|
|
@ -664,6 +676,9 @@ def before_workfile_save(event):
|
|||
if workdir_path:
|
||||
create_workspace_mel(workdir_path, project_name)
|
||||
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def workfile_save_before_xgen(event):
|
||||
"""Manage Xgen external files when switching context.
|
||||
|
|
|
|||
|
|
@ -142,9 +142,21 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
with namespaced(namespace):
|
||||
# Create inside the namespace
|
||||
image_plane_transform, image_plane_shape = cmds.imagePlane(
|
||||
fileName=context["representation"]["data"]["path"],
|
||||
fileName=self.filepath_from_context(context),
|
||||
camera=camera
|
||||
)
|
||||
|
||||
# Set colorspace
|
||||
colorspace = self.get_colorspace(context["representation"])
|
||||
if colorspace:
|
||||
cmds.setAttr(
|
||||
"{}.ignoreColorSpaceFileRules".format(image_plane_shape),
|
||||
True
|
||||
)
|
||||
cmds.setAttr("{}.colorSpace".format(image_plane_shape),
|
||||
colorspace, type="string")
|
||||
|
||||
# Set offset frame range
|
||||
start_frame = cmds.playbackOptions(query=True, min=True)
|
||||
end_frame = cmds.playbackOptions(query=True, max=True)
|
||||
|
||||
|
|
@ -216,6 +228,15 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
repre_entity["id"],
|
||||
type="string")
|
||||
|
||||
colorspace = self.get_colorspace(repre_entity)
|
||||
if colorspace:
|
||||
cmds.setAttr(
|
||||
"{}.ignoreColorSpaceFileRules".format(image_plane_shape),
|
||||
True
|
||||
)
|
||||
cmds.setAttr("{}.colorSpace".format(image_plane_shape),
|
||||
colorspace, type="string")
|
||||
|
||||
# Set frame range.
|
||||
start_frame = folder_entity["attrib"]["frameStart"]
|
||||
end_frame = folder_entity["attrib"]["frameEnd"]
|
||||
|
|
@ -243,3 +264,12 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
def get_colorspace(self, representation):
|
||||
|
||||
data = representation.get("data", {}).get("colorspaceData", {})
|
||||
if not data:
|
||||
return
|
||||
|
||||
colorspace = data.get("colorspace")
|
||||
return colorspace
|
||||
|
|
|
|||
|
|
@ -1,24 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect render data.
|
||||
|
||||
This collector will go through render layers in maya and prepare all data
|
||||
needed to create instances and their representations for submission and
|
||||
publishing on farm.
|
||||
This collector will go through renderlayer instances and prepare all data
|
||||
needed to detect the expected rendered files for a layer, with resolution,
|
||||
frame ranges and collects the data needed for publishing on the farm.
|
||||
|
||||
Requires:
|
||||
instance -> families
|
||||
instance -> setMembers
|
||||
instance -> folderPath
|
||||
|
||||
context -> currentFile
|
||||
context -> workspaceDir
|
||||
context -> user
|
||||
|
||||
Optional:
|
||||
|
||||
Provides:
|
||||
instance -> label
|
||||
instance -> productName
|
||||
instance -> subset
|
||||
instance -> attachTo
|
||||
instance -> setMembers
|
||||
instance -> publish
|
||||
|
|
@ -26,6 +21,8 @@ Provides:
|
|||
instance -> frameEnd
|
||||
instance -> byFrameStep
|
||||
instance -> renderer
|
||||
instance -> family
|
||||
instance -> asset
|
||||
instance -> time
|
||||
instance -> author
|
||||
instance -> source
|
||||
|
|
@ -71,8 +68,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
|
||||
# TODO: Re-add force enable of workfile instance?
|
||||
# TODO: Re-add legacy layer support with LAYER_ prefix but in Creator
|
||||
# TODO: Set and collect active state of RenderLayer in Creator using
|
||||
# renderlayer.isRenderable()
|
||||
context = instance.context
|
||||
|
||||
layer = instance.data["transientData"]["layer"]
|
||||
|
|
@ -112,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
except UnsupportedRendererException as exc:
|
||||
raise KnownPublishError(exc)
|
||||
render_products = layer_render_products.layer_data.products
|
||||
assert render_products, "no render products generated"
|
||||
if not render_products:
|
||||
self.log.error(
|
||||
"No render products generated for '%s'. You might not have "
|
||||
"any render camera in the renderlayer or render end frame is "
|
||||
"lower than start frame.",
|
||||
instance.name
|
||||
)
|
||||
expected_files = []
|
||||
multipart = False
|
||||
for product in render_products:
|
||||
|
|
@ -130,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
})
|
||||
|
||||
has_cameras = any(product.camera for product in render_products)
|
||||
assert has_cameras, "No render cameras found."
|
||||
|
||||
self.log.debug("multipart: {}".format(
|
||||
multipart))
|
||||
assert expected_files, "no file names were generated, this is a bug"
|
||||
self.log.debug(
|
||||
"expected files: {}".format(
|
||||
json.dumps(expected_files, indent=4, sort_keys=True)
|
||||
if render_products and not has_cameras:
|
||||
self.log.error(
|
||||
"No render cameras found for: %s",
|
||||
instance
|
||||
)
|
||||
)
|
||||
if not expected_files:
|
||||
self.log.warning(
|
||||
"No file names were generated, this is a bug.")
|
||||
|
||||
for render_product in render_products:
|
||||
self.log.debug(render_product)
|
||||
self.log.debug("multipart: {}".format(multipart))
|
||||
self.log.debug("expected files: {}".format(
|
||||
json.dumps(expected_files, indent=4, sort_keys=True)
|
||||
))
|
||||
|
||||
# if we want to attach render to product, check if we have AOV's
|
||||
# in expectedFiles. If so, raise error as we cannot attach AOV
|
||||
|
|
@ -151,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
# append full path
|
||||
aov_dict = {}
|
||||
image_directory = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
publish_meta_path = None
|
||||
publish_meta_path = "NOT-SET"
|
||||
aov_dict = {}
|
||||
for aov in expected_files:
|
||||
full_paths = []
|
||||
aov_first_key = list(aov.keys())[0]
|
||||
|
|
@ -169,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
publish_meta_path = os.path.dirname(full_path)
|
||||
aov_dict[aov_first_key] = full_paths
|
||||
full_exp_files = [aov_dict]
|
||||
self.log.debug(full_exp_files)
|
||||
|
||||
if publish_meta_path is None:
|
||||
raise KnownPublishError("Unable to detect any expected output "
|
||||
"images for: {}. Make sure you have a "
|
||||
"renderable camera and a valid frame "
|
||||
"range set for your renderlayer."
|
||||
"".format(instance.name))
|
||||
|
||||
frame_start_render = int(self.get_render_attribute(
|
||||
"startFrame", layer=layer_name))
|
||||
|
|
@ -222,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
common_publish_meta_path = "/" + common_publish_meta_path
|
||||
|
||||
self.log.debug(
|
||||
"Publish meta path: {}".format(common_publish_meta_path))
|
||||
"Publish meta path: {}".format(common_publish_meta_path)
|
||||
)
|
||||
|
||||
# Get layer specific settings, might be overrides
|
||||
colorspace_data = lib.get_color_management_preferences()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from maya import cmds
|
|||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractGPUCache(publish.Extractor):
|
||||
class ExtractGPUCache(publish.Extractor,
|
||||
publish.OptionalPyblishPluginMixin):
|
||||
"""Extract the content of the instance to a GPU cache file."""
|
||||
|
||||
label = "GPU Cache"
|
||||
|
|
@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor):
|
|||
useBaseTessellation = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
|
|||
|
|
@ -26,6 +26,10 @@ class ExtractAlembic(publish.Extractor):
|
|||
families = ["pointcache", "model", "vrayproxy.alembic"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
# From settings
|
||||
bake_attributes = []
|
||||
bake_attribute_prefixes = []
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
|
|
@ -40,10 +44,12 @@ class ExtractAlembic(publish.Extractor):
|
|||
attrs = instance.data.get("attr", "").split(";")
|
||||
attrs = [value for value in attrs if value.strip()]
|
||||
attrs += instance.data.get("userDefinedAttributes", [])
|
||||
attrs += self.bake_attributes
|
||||
attrs += ["cbId"]
|
||||
|
||||
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||
attr_prefixes += self.bake_attribute_prefixes
|
||||
|
||||
self.log.debug("Extracting pointcache..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Shape IDs mismatch original shape</title>
|
||||
<description>## Shapes mismatch IDs with original shape
|
||||
|
||||
Meshes are detected where the (deformed) mesh has a different `cbId` than
|
||||
the same mesh in its deformation history.
|
||||
Theses should normally be the same.
|
||||
|
||||
### How to repair?
|
||||
|
||||
By using the repair action the IDs from the shape in history will be
|
||||
copied to the deformed shape. For **animation** instances using the
|
||||
repair action usually is usually the correct fix.
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### How does this happen?
|
||||
|
||||
When a deformer is applied in the scene on a referenced mesh that had no
|
||||
deformers then Maya will create a new shape node for the mesh that
|
||||
does not have the original id. Then on scene save new ids get created for the
|
||||
meshes lacking a `cbId` and thus the mesh then has a different `cbId` than
|
||||
the mesh in the deformation history.
|
||||
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Non-Manifold Edges/Vertices</title>
|
||||
<description>## Non-Manifold Edges/Vertices
|
||||
|
||||
Meshes found with non-manifold edges or vertices.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Run select invalid to select the invalid components.
|
||||
|
||||
You can also try the _cleanup matching polygons_ action which will perform a
|
||||
cleanup like Maya's `Mesh > Cleanup...` modeling tool.
|
||||
|
||||
It is recommended to always select the invalid to see where the issue is
|
||||
because if you run any repair on it you will need to double check the topology
|
||||
is still like you wanted.
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### What is non-manifold topology?
|
||||
|
||||
_Non-manifold topology_ polygons have a configuration that cannot be unfolded
|
||||
into a continuous flat piece, for example:
|
||||
|
||||
- Three or more faces share an edge
|
||||
- Two or more faces share a single vertex but no edge.
|
||||
- Adjacent faces have opposite normals
|
||||
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -6,7 +6,7 @@ from ayon_core.hosts.maya.api import lib
|
|||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin,
|
||||
get_plugin_settings,
|
||||
apply_plugin_settings_automatically
|
||||
|
|
@ -56,40 +56,39 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin,
|
|||
# if a deformer has been created on the shape
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
# TODO: Message formatting can be improved
|
||||
raise PublishValidationError("Nodes found with mismatching "
|
||||
"IDs: {0}".format(invalid),
|
||||
title="Invalid node ids")
|
||||
|
||||
# Use the short names
|
||||
invalid = cmds.ls(invalid)
|
||||
invalid.sort()
|
||||
|
||||
# Construct a human-readable list
|
||||
invalid = "\n".join("- {}".format(node) for node in invalid)
|
||||
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=(
|
||||
"Nodes have different IDs than their input "
|
||||
"history: \n{0}".format(invalid)
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Get all nodes which do not match the criteria"""
|
||||
|
||||
invalid = []
|
||||
types_to_skip = ["locator"]
|
||||
types = ["mesh", "nurbsCurve", "nurbsSurface"]
|
||||
|
||||
# get asset id
|
||||
nodes = instance.data.get("out_hierarchy", instance[:])
|
||||
for node in nodes:
|
||||
for node in cmds.ls(nodes, type=types, long=True):
|
||||
|
||||
# We only check when the node is *not* referenced
|
||||
if cmds.referenceQuery(node, isNodeReferenced=True):
|
||||
continue
|
||||
|
||||
# Check if node is a shape as deformers only work on shapes
|
||||
obj_type = cmds.objectType(node, isAType="shape")
|
||||
if not obj_type:
|
||||
continue
|
||||
|
||||
# Skip specific types
|
||||
if cmds.objectType(node) in types_to_skip:
|
||||
continue
|
||||
|
||||
# Get the current id of the node
|
||||
node_id = lib.get_id(node)
|
||||
if not node_id:
|
||||
invalid.append(node)
|
||||
continue
|
||||
|
||||
history_id = lib.get_id_from_sibling(node)
|
||||
if history_id is not None and node_id != history_id:
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateColorSets(pyblish.api.Validator,
|
||||
class ValidateColorSets(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate all meshes in the instance have unlocked normals
|
||||
|
||||
|
|
|
|||
|
|
@ -47,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin,
|
|||
shape, destination=True, type="shadingEngine"
|
||||
) or []
|
||||
for shading_engine in shading_engines:
|
||||
name = (
|
||||
cmds.listConnections(shading_engine + ".surfaceShader")[0]
|
||||
+ "SG"
|
||||
materials = cmds.listConnections(
|
||||
shading_engine + ".surfaceShader",
|
||||
source=True, destination=False
|
||||
)
|
||||
if not materials:
|
||||
cls.log.warning(
|
||||
"Shading engine '{}' has no material connected to its "
|
||||
".surfaceShader attribute.".format(shading_engine))
|
||||
continue
|
||||
|
||||
material = materials[0] # there should only ever be one input
|
||||
name = material + "SG"
|
||||
if shading_engine != name:
|
||||
invalid.append(shading_engine)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateMeshNgons(pyblish.api.Validator,
|
||||
class ValidateMeshNgons(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure that meshes don't have ngons
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"):
|
|||
return prefix + (suffix + prefix).join(values)
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator,
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure that meshes don't have a negative scale.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,99 @@
|
|||
from maya import cmds
|
||||
from maya import cmds, mel
|
||||
|
||||
import pyblish.api
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateMeshOrder,
|
||||
PublishValidationError,
|
||||
PublishXmlValidationError,
|
||||
RepairAction,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
def poly_cleanup(version=4,
|
||||
meshes=None,
|
||||
# Version 1
|
||||
all_meshes=False,
|
||||
select_only=False,
|
||||
history_on=True,
|
||||
quads=False,
|
||||
nsided=False,
|
||||
concave=False,
|
||||
holed=False,
|
||||
nonplanar=False,
|
||||
zeroGeom=False,
|
||||
zeroGeomTolerance=1e-05,
|
||||
zeroEdge=False,
|
||||
zeroEdgeTolerance=1e-05,
|
||||
zeroMap=False,
|
||||
zeroMapTolerance=1e-05,
|
||||
# Version 2
|
||||
shared_uvs=False,
|
||||
non_manifold=False,
|
||||
# Version 3
|
||||
lamina=False,
|
||||
# Version 4
|
||||
invalid_components=False):
|
||||
"""Wrapper around `polyCleanupArgList` mel command"""
|
||||
|
||||
# Get all inputs named as `dict` to easily do conversions and formatting
|
||||
values = locals()
|
||||
|
||||
# Convert booleans to 1 or 0
|
||||
for key in [
|
||||
"all_meshes",
|
||||
"select_only",
|
||||
"history_on",
|
||||
"quads",
|
||||
"nsided",
|
||||
"concave",
|
||||
"holed",
|
||||
"nonplanar",
|
||||
"zeroGeom",
|
||||
"zeroEdge",
|
||||
"zeroMap",
|
||||
"shared_uvs",
|
||||
"non_manifold",
|
||||
"lamina",
|
||||
"invalid_components",
|
||||
]:
|
||||
values[key] = 1 if values[key] else 0
|
||||
|
||||
cmd = (
|
||||
'polyCleanupArgList {version} {{ '
|
||||
'"{all_meshes}",' # 0: All selectable meshes
|
||||
'"{select_only}",' # 1: Only perform a selection
|
||||
'"{history_on}",' # 2: Keep construction history
|
||||
'"{quads}",' # 3: Check for quads polys
|
||||
'"{nsided}",' # 4: Check for n-sides polys
|
||||
'"{concave}",' # 5: Check for concave polys
|
||||
'"{holed}",' # 6: Check for holed polys
|
||||
'"{nonplanar}",' # 7: Check for non-planar polys
|
||||
'"{zeroGeom}",' # 8: Check for 0 area faces
|
||||
'"{zeroGeomTolerance}",' # 9: Tolerance for face areas
|
||||
'"{zeroEdge}",' # 10: Check for 0 length edges
|
||||
'"{zeroEdgeTolerance}",' # 11: Tolerance for edge length
|
||||
'"{zeroMap}",' # 12: Check for 0 uv face area
|
||||
'"{zeroMapTolerance}",' # 13: Tolerance for uv face areas
|
||||
'"{shared_uvs}",' # 14: Unshare uvs that are shared
|
||||
# across vertices
|
||||
'"{non_manifold}",' # 15: Check for nonmanifold polys
|
||||
'"{lamina}",' # 16: Check for lamina polys
|
||||
'"{invalid_components}"' # 17: Remove invalid components
|
||||
' }};'.format(**values)
|
||||
)
|
||||
|
||||
mel.eval("source polyCleanupArgList")
|
||||
if not all_meshes and meshes:
|
||||
# Allow to specify meshes to run over by selecting them
|
||||
cmds.select(meshes, replace=True)
|
||||
mel.eval(cmd)
|
||||
|
||||
|
||||
class CleanupMatchingPolygons(RepairAction):
|
||||
label = "Cleanup matching polygons"
|
||||
|
||||
|
||||
def _as_report_list(values, prefix="- ", suffix="\n"):
|
||||
"""Return list as bullet point list for a report"""
|
||||
if not values:
|
||||
|
|
@ -16,7 +101,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"):
|
|||
return prefix + (suffix + prefix).join(values)
|
||||
|
||||
|
||||
class ValidateMeshNonManifold(pyblish.api.Validator,
|
||||
class ValidateMeshNonManifold(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure that meshes don't have non-manifold edges or vertices
|
||||
|
||||
|
|
@ -29,7 +114,8 @@ class ValidateMeshNonManifold(pyblish.api.Validator,
|
|||
hosts = ['maya']
|
||||
families = ['model']
|
||||
label = 'Mesh Non-Manifold Edges/Vertices'
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction,
|
||||
CleanupMatchingPolygons]
|
||||
optional = True
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -39,9 +125,11 @@ class ValidateMeshNonManifold(pyblish.api.Validator,
|
|||
|
||||
invalid = []
|
||||
for mesh in meshes:
|
||||
if (cmds.polyInfo(mesh, nonManifoldVertices=True) or
|
||||
cmds.polyInfo(mesh, nonManifoldEdges=True)):
|
||||
invalid.append(mesh)
|
||||
components = cmds.polyInfo(mesh,
|
||||
nonManifoldVertices=True,
|
||||
nonManifoldEdges=True)
|
||||
if components:
|
||||
invalid.extend(components)
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -49,12 +137,34 @@ class ValidateMeshNonManifold(pyblish.api.Validator,
|
|||
"""Process all the nodes in the instance 'objectSet'"""
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Meshes found with non-manifold edges/vertices:\n\n{0}".format(
|
||||
_as_report_list(sorted(invalid))
|
||||
),
|
||||
title="Non-Manifold Edges/Vertices"
|
||||
# Report only the meshes instead of all component indices
|
||||
invalid_meshes = {
|
||||
component.split(".", 1)[0] for component in invalid
|
||||
}
|
||||
invalid_meshes = _as_report_list(sorted(invalid_meshes))
|
||||
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=(
|
||||
"Meshes found with non-manifold "
|
||||
"edges/vertices:\n\n{0}".format(invalid_meshes)
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid_components = cls.get_invalid(instance)
|
||||
if not invalid_components:
|
||||
cls.log.info("No invalid components found to cleanup.")
|
||||
return
|
||||
|
||||
invalid_meshes = {
|
||||
component.split(".", 1)[0] for component in invalid_components
|
||||
}
|
||||
poly_cleanup(meshes=list(invalid_meshes),
|
||||
select_only=True,
|
||||
non_manifold=True)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"):
|
|||
return prefix + (suffix + prefix).join(values)
|
||||
|
||||
|
||||
class ValidateMeshNormalsUnlocked(pyblish.api.Validator,
|
||||
class ValidateMeshNormalsUnlocked(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate all meshes in the instance have unlocked normals
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"):
|
|||
return prefix + (suffix + prefix).join(values)
|
||||
|
||||
|
||||
class ValidateNoAnimation(pyblish.api.Validator,
|
||||
class ValidateNoAnimation(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure no keyframes on nodes in the Instance.
|
||||
|
||||
|
|
|
|||
|
|
@ -19,22 +19,17 @@ def _as_report_list(values, prefix="- ", suffix="\n"):
|
|||
|
||||
def has_shape_children(node):
|
||||
# Check if any descendants
|
||||
allDescendents = cmds.listRelatives(node,
|
||||
allDescendents=True,
|
||||
fullPath=True)
|
||||
if not allDescendents:
|
||||
all_descendents = cmds.listRelatives(node,
|
||||
allDescendents=True,
|
||||
fullPath=True)
|
||||
if not all_descendents:
|
||||
return False
|
||||
|
||||
# Check if there are any shapes at all
|
||||
shapes = cmds.ls(allDescendents, shapes=True)
|
||||
shapes = cmds.ls(all_descendents, shapes=True, noIntermediate=True)
|
||||
if not shapes:
|
||||
return False
|
||||
|
||||
# Check if all descendent shapes are intermediateObjects;
|
||||
# if so we consider this node a null node and return False.
|
||||
if all(cmds.getAttr('{0}.intermediateObject'.format(x)) for x in shapes):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import re
|
||||
import inspect
|
||||
|
||||
import pyblish.api
|
||||
from maya import cmds
|
||||
|
|
@ -36,7 +37,10 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Invalid cameras for render.")
|
||||
raise PublishValidationError(
|
||||
"Invalid render cameras.",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -51,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin,
|
|||
RenderSettings.get_image_prefix_attr(renderer)
|
||||
)
|
||||
|
||||
|
||||
renderlayer = instance.data["renderlayer"]
|
||||
if len(cameras) > 1:
|
||||
if re.search(cls.R_CAMERA_TOKEN, file_prefix):
|
||||
# if there is <Camera> token in prefix and we have more then
|
||||
# 1 camera, all is ok.
|
||||
return
|
||||
cls.log.error("Multiple renderable cameras found for %s: %s " %
|
||||
(instance.data["setMembers"], cameras))
|
||||
return [instance.data["setMembers"]] + cameras
|
||||
cls.log.error(
|
||||
"Multiple renderable cameras found for %s: %s ",
|
||||
renderlayer, ", ".join(cameras))
|
||||
return [renderlayer] + cameras
|
||||
|
||||
elif len(cameras) < 1:
|
||||
cls.log.error("No renderable cameras found for %s " %
|
||||
instance.data["setMembers"])
|
||||
return [instance.data["setMembers"]]
|
||||
cls.log.error("No renderable cameras found for %s ", renderlayer)
|
||||
return [renderlayer]
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Render Cameras Invalid
|
||||
|
||||
Your render cameras are misconfigured. You may have no render
|
||||
camera set or have multiple cameras with a render filename
|
||||
prefix that does not include the `<Camera>` token.
|
||||
|
||||
See the logs for more details about the cameras.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,11 +6,12 @@ import ayon_core.hosts.maya.api.action
|
|||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateShapeRenderStats(pyblish.api.Validator,
|
||||
class ValidateShapeRenderStats(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure all render stats are set to the default values."""
|
||||
|
||||
|
|
@ -20,7 +21,6 @@ class ValidateShapeRenderStats(pyblish.api.Validator,
|
|||
label = 'Shape Default Render Stats'
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
optional = True
|
||||
|
||||
defaults = {'castsShadows': 1,
|
||||
'receiveShadows': 1,
|
||||
|
|
@ -37,14 +37,13 @@ class ValidateShapeRenderStats(pyblish.api.Validator,
|
|||
# It seems the "surfaceShape" and those derived from it have
|
||||
# `renderStat` attributes.
|
||||
shapes = cmds.ls(instance, long=True, type='surfaceShape')
|
||||
invalid = []
|
||||
invalid = set()
|
||||
for shape in shapes:
|
||||
_iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items)
|
||||
for attr, default_value in _iteritems():
|
||||
for attr, default_value in cls.defaults.items():
|
||||
if cmds.attributeQuery(attr, node=shape, exists=True):
|
||||
value = cmds.getAttr('{}.{}'.format(shape, attr))
|
||||
if value != default_value:
|
||||
invalid.append(shape)
|
||||
invalid.add(shape)
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -52,17 +51,36 @@ class ValidateShapeRenderStats(pyblish.api.Validator,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if not invalid:
|
||||
return
|
||||
|
||||
if invalid:
|
||||
raise ValueError("Shapes with non-default renderStats "
|
||||
"found: {0}".format(invalid))
|
||||
defaults_str = "\n".join(
|
||||
"- {}: {}\n".format(key, value)
|
||||
for key, value in self.defaults.items()
|
||||
)
|
||||
description = (
|
||||
"## Shape Default Render Stats\n"
|
||||
"Shapes are detected with non-default render stats.\n\n"
|
||||
"To ensure a model's shapes behave like a shape would by default "
|
||||
"we require the render stats to have not been altered in "
|
||||
"the published models.\n\n"
|
||||
"### How to repair?\n"
|
||||
"You can reset the default values on the shapes by using the "
|
||||
"repair action."
|
||||
)
|
||||
|
||||
raise PublishValidationError(
|
||||
"Shapes with non-default renderStats "
|
||||
"found: {0}".format(", ".join(sorted(invalid))),
|
||||
description=description,
|
||||
detail="The expected default values "
|
||||
"are:\n\n{}".format(defaults_str)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
for shape in cls.get_invalid(instance):
|
||||
_iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items)
|
||||
for attr, default_value in _iteritems():
|
||||
|
||||
for attr, default_value in cls.defaults.items():
|
||||
if cmds.attributeQuery(attr, node=shape, exists=True):
|
||||
plug = '{0}.{1}'.format(shape, attr)
|
||||
value = cmds.getAttr(plug)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateShapeZero(pyblish.api.Validator,
|
||||
class ValidateShapeZero(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Shape components may not have any "tweak" values
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from maya import cmds
|
||||
import inspect
|
||||
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
|
||||
import ayon_core.hosts.maya.api.action
|
||||
|
|
@ -10,7 +11,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateTransformZero(pyblish.api.Validator,
|
||||
class ValidateTransformZero(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Transforms can't have any values
|
||||
|
||||
|
|
@ -57,7 +58,7 @@ class ValidateTransformZero(pyblish.api.Validator,
|
|||
if ('_LOC' in transform) or ('_loc' in transform):
|
||||
continue
|
||||
mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True)
|
||||
if not all(abs(x-y) < cls._tolerance
|
||||
if not all(abs(x - y) < cls._tolerance
|
||||
for x, y in zip(cls._identity, mat)):
|
||||
invalid.append(transform)
|
||||
|
||||
|
|
@ -69,14 +70,24 @@ class ValidateTransformZero(pyblish.api.Validator,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
|
||||
names = "<br>".join(
|
||||
" - {}".format(node) for node in invalid
|
||||
)
|
||||
|
||||
raise PublishValidationError(
|
||||
title="Transform Zero",
|
||||
description=self.get_description(),
|
||||
message="The model publish allows no transformations. You must"
|
||||
" <b>freeze transformations</b> to continue.<br><br>"
|
||||
"Nodes found with transform values: "
|
||||
"Nodes found with transform values:<br>"
|
||||
"{0}".format(names))
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc("""### Transform can't have any values
|
||||
|
||||
The model publish allows no transformations.
|
||||
|
||||
You must **freeze transformations** to continue.
|
||||
|
||||
""")
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateUniqueNames(pyblish.api.Validator,
|
||||
class ValidateUniqueNames(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""transform names should be unique
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator,
|
||||
class ValidateYetiRigInputShapesInInstance(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate if all input nodes are part of the instance's hierarchy"""
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments):
|
|||
index += 1
|
||||
|
||||
|
||||
def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"):
|
||||
def vrayproxy_assign_look(vrayproxy, product_name="lookMain"):
|
||||
# type: (str, str) -> None
|
||||
"""Assign look to vray proxy.
|
||||
|
||||
|
|
|
|||
|
|
@ -389,7 +389,13 @@ def imprint(node, data, tab=None):
|
|||
|
||||
"""
|
||||
for knob in create_knobs(data, tab):
|
||||
node.addKnob(knob)
|
||||
# If knob name exists we set the value. Technically there could be
|
||||
# multiple knobs with the same name, but the intent is not to have
|
||||
# duplicated knobs so we do not account for that.
|
||||
if knob.name() in node.knobs().keys():
|
||||
node[knob.name()].setValue(knob.value())
|
||||
else:
|
||||
node.addKnob(knob)
|
||||
|
||||
|
||||
@deprecated
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@ import nuke
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractScriptSave(pyblish.api.Extractor):
|
||||
class ExtractScriptSave(pyblish.api.InstancePlugin):
|
||||
"""Save current Nuke workfile script"""
|
||||
label = 'Script Save'
|
||||
order = pyblish.api.Extractor.order - 0.1
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
|||
"""Load mesh for project"""
|
||||
|
||||
product_types = {"*"}
|
||||
representations = ["abc", "fbx", "obj", "gltf"]
|
||||
representations = ["abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"]
|
||||
|
||||
label = "Load mesh"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -25,8 +25,9 @@ from ayon_core.hosts.tvpaint.lib import (
|
|||
)
|
||||
|
||||
|
||||
class ExtractSequence(pyblish.api.Extractor):
|
||||
class ExtractSequence(pyblish.api.InstancePlugin):
|
||||
label = "Extract Sequence"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
hosts = ["tvpaint"]
|
||||
families = ["review", "render"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package helping with colorizing and formatting terminal output."""
|
||||
# ::
|
||||
# //. ... .. ///. //.
|
||||
# ///\\\ \\\ \\ ///\\\ ///
|
||||
# /// \\ \\\ \\ /// \\ /// //
|
||||
# \\\ // \\\ // \\\ // \\\// ./
|
||||
# \\\// \\\// \\\// \\\' //
|
||||
# \\\ \\\ \\\ \\\//
|
||||
# ''' ''' ''' '''
|
||||
# ..---===[[ PyP3 Setup ]]===---...
|
||||
#
|
||||
import re
|
||||
import time
|
||||
import threading
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$")
|
|||
|
||||
IMAGE_EXTENSIONS = {
|
||||
".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave",
|
||||
".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr",
|
||||
".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr",
|
||||
".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc",
|
||||
".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2",
|
||||
".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg",
|
||||
|
|
|
|||
|
|
@ -11,19 +11,17 @@ class ClockifyStart(LauncherAction):
|
|||
order = 500
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, session):
|
||||
def is_compatible(self, selection):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AYON_TASK_NAME" in session:
|
||||
return True
|
||||
return False
|
||||
return selection.is_task_selected
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
def process(self, selection, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
user_id = self.clockify_api.user_id
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
project_name = selection.project_name
|
||||
folder_path = selection.folder_path
|
||||
task_name = selection.task_name
|
||||
description = "/".join([folder_path.lstrip("/"), task_name])
|
||||
|
||||
# fetch folder entity
|
||||
|
|
|
|||
|
|
@ -19,15 +19,18 @@ class ClockifySync(LauncherAction):
|
|||
order = 500
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, session):
|
||||
def is_compatible(self, selection):
|
||||
"""Check if there's some projects to sync"""
|
||||
if selection.is_project_selected:
|
||||
return True
|
||||
|
||||
try:
|
||||
next(ayon_api.get_projects())
|
||||
return True
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
def process(self, selection, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
user_id = self.clockify_api.user_id
|
||||
|
|
@ -37,10 +40,9 @@ class ClockifySync(LauncherAction):
|
|||
raise ClockifyPermissionsCheckFailed(
|
||||
"Current CLockify user is missing permissions for this action!"
|
||||
)
|
||||
project_name = session.get("AYON_PROJECT_NAME") or ""
|
||||
|
||||
if project_name.strip():
|
||||
projects_to_sync = [ayon_api.get_project(project_name)]
|
||||
if selection.is_project_selected:
|
||||
projects_to_sync = [selection.project_entity]
|
||||
else:
|
||||
projects_to_sync = ayon_api.get_projects()
|
||||
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -225,6 +225,8 @@ class FusionSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -273,6 +273,8 @@ class HarmonySubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"IS_TEST"
|
||||
"IS_TEST",
|
||||
]
|
||||
|
||||
environment = {
|
||||
|
|
|
|||
|
|
@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
|
|||
|
|
@ -376,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
keys = [
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
|
|
@ -388,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"TOOL_ENV",
|
||||
"FOUNDRY_LICENSE",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
]
|
||||
|
||||
# add allowed keys from preset if any
|
||||
|
|
|
|||
|
|
@ -133,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
|
|
|
|||
|
|
@ -210,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import logging
|
||||
import warnings
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
|
|
@ -10,6 +14,288 @@ from ayon_core.pipeline.plugin_discover import (
|
|||
from .load.utils import get_representation_path_from_context
|
||||
|
||||
|
||||
class LauncherActionSelection:
|
||||
"""Object helper to pass selection to actions.
|
||||
|
||||
Object support backwards compatibility for 'session' from OpenPype where
|
||||
environment variable keys were used to define selection.
|
||||
|
||||
Args:
|
||||
project_name (str): Selected project name.
|
||||
folder_id (str): Selected folder id.
|
||||
task_id (str): Selected task id.
|
||||
folder_path (Optional[str]): Selected folder path.
|
||||
task_name (Optional[str]): Selected task name.
|
||||
project_entity (Optional[dict[str, Any]]): Project entity.
|
||||
folder_entity (Optional[dict[str, Any]]): Folder entity.
|
||||
task_entity (Optional[dict[str, Any]]): Task entity.
|
||||
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
project_name,
|
||||
folder_id,
|
||||
task_id,
|
||||
folder_path=None,
|
||||
task_name=None,
|
||||
project_entity=None,
|
||||
folder_entity=None,
|
||||
task_entity=None
|
||||
):
|
||||
self._project_name = project_name
|
||||
self._folder_id = folder_id
|
||||
self._task_id = task_id
|
||||
|
||||
self._folder_path = folder_path
|
||||
self._task_name = task_name
|
||||
|
||||
self._project_entity = project_entity
|
||||
self._folder_entity = folder_entity
|
||||
self._task_entity = task_entity
|
||||
|
||||
def __getitem__(self, key):
|
||||
warnings.warn(
|
||||
(
|
||||
"Using deprecated access to selection data. Please use"
|
||||
" attributes and methods"
|
||||
" defined by 'LauncherActionSelection'."
|
||||
),
|
||||
category=DeprecationWarning
|
||||
)
|
||||
if key in {"AYON_PROJECT_NAME", "AVALON_PROJECT"}:
|
||||
return self.project_name
|
||||
if key in {"AYON_FOLDER_PATH", "AVALON_ASSET"}:
|
||||
return self.folder_path
|
||||
if key in {"AYON_TASK_NAME", "AVALON_TASK"}:
|
||||
return self.task_name
|
||||
raise KeyError(f"Key: {key} not found")
|
||||
|
||||
def __iter__(self):
|
||||
for key in self.keys():
|
||||
yield key
|
||||
|
||||
def __contains__(self, key):
|
||||
warnings.warn(
|
||||
(
|
||||
"Using deprecated access to selection data. Please use"
|
||||
" attributes and methods"
|
||||
" defined by 'LauncherActionSelection'."
|
||||
),
|
||||
category=DeprecationWarning
|
||||
)
|
||||
# Fake missing keys check for backwards compatibility
|
||||
if key in {
|
||||
"AYON_PROJECT_NAME",
|
||||
"AVALON_PROJECT",
|
||||
}:
|
||||
return self._project_name is not None
|
||||
if key in {
|
||||
"AYON_FOLDER_PATH",
|
||||
"AVALON_ASSET",
|
||||
}:
|
||||
return self._folder_id is not None
|
||||
if key in {
|
||||
"AYON_TASK_NAME",
|
||||
"AVALON_TASK",
|
||||
}:
|
||||
return self._task_id is not None
|
||||
return False
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""
|
||||
|
||||
Deprecated:
|
||||
Added for backwards compatibility with older actions.
|
||||
|
||||
"""
|
||||
warnings.warn(
|
||||
(
|
||||
"Using deprecated access to selection data. Please use"
|
||||
" attributes and methods"
|
||||
" defined by 'LauncherActionSelection'."
|
||||
),
|
||||
category=DeprecationWarning
|
||||
)
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def items(self):
|
||||
"""
|
||||
|
||||
Deprecated:
|
||||
Added for backwards compatibility with older actions.
|
||||
|
||||
"""
|
||||
for key, value in (
|
||||
("AYON_PROJECT_NAME", self.project_name),
|
||||
("AYON_FOLDER_PATH", self.folder_path),
|
||||
("AYON_TASK_NAME", self.task_name),
|
||||
):
|
||||
if value is not None:
|
||||
yield (key, value)
|
||||
|
||||
def keys(self):
|
||||
"""
|
||||
|
||||
Deprecated:
|
||||
Added for backwards compatibility with older actions.
|
||||
|
||||
"""
|
||||
for key, _ in self.items():
|
||||
yield key
|
||||
|
||||
def values(self):
|
||||
"""
|
||||
|
||||
Deprecated:
|
||||
Added for backwards compatibility with older actions.
|
||||
|
||||
"""
|
||||
for _, value in self.items():
|
||||
yield value
|
||||
|
||||
def get_project_name(self):
|
||||
"""Selected project name.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Selected project name.
|
||||
|
||||
"""
|
||||
return self._project_name
|
||||
|
||||
def get_folder_id(self):
|
||||
"""Selected folder id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Selected folder id.
|
||||
|
||||
"""
|
||||
return self._folder_id
|
||||
|
||||
def get_folder_path(self):
|
||||
"""Selected folder path.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Selected folder path.
|
||||
|
||||
"""
|
||||
if self._folder_id is None:
|
||||
return None
|
||||
if self._folder_path is None:
|
||||
self._folder_path = self.folder_entity["path"]
|
||||
return self._folder_path
|
||||
|
||||
def get_task_id(self):
|
||||
"""Selected task id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Selected task id.
|
||||
|
||||
"""
|
||||
return self._task_id
|
||||
|
||||
def get_task_name(self):
|
||||
"""Selected task name.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Selected task name.
|
||||
|
||||
"""
|
||||
if self._task_id is None:
|
||||
return None
|
||||
if self._task_name is None:
|
||||
self._task_name = self.task_entity["name"]
|
||||
return self._task_name
|
||||
|
||||
def get_project_entity(self):
|
||||
"""Project entity for the selection.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Project entity.
|
||||
|
||||
"""
|
||||
if self._project_name is None:
|
||||
return None
|
||||
if self._project_entity is None:
|
||||
self._project_entity = ayon_api.get_project(self._project_name)
|
||||
return self._project_entity
|
||||
|
||||
def get_folder_entity(self):
|
||||
"""Folder entity for the selection.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Folder entity.
|
||||
|
||||
"""
|
||||
if self._project_name is None or self._folder_id is None:
|
||||
return None
|
||||
if self._folder_entity is None:
|
||||
self._folder_entity = ayon_api.get_folder_by_id(
|
||||
self._project_name, self._folder_id
|
||||
)
|
||||
return self._folder_entity
|
||||
|
||||
def get_task_entity(self):
|
||||
"""Task entity for the selection.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Task entity.
|
||||
|
||||
"""
|
||||
if (
|
||||
self._project_name is None
|
||||
or self._task_id is None
|
||||
):
|
||||
return None
|
||||
if self._task_entity is None:
|
||||
self._task_entity = ayon_api.get_task_by_id(
|
||||
self._project_name, self._task_id
|
||||
)
|
||||
return self._task_entity
|
||||
|
||||
@property
|
||||
def is_project_selected(self):
|
||||
"""Return whether a project is selected.
|
||||
|
||||
Returns:
|
||||
bool: Whether a project is selected.
|
||||
|
||||
"""
|
||||
return self._project_name is not None
|
||||
|
||||
@property
|
||||
def is_folder_selected(self):
|
||||
"""Return whether a folder is selected.
|
||||
|
||||
Returns:
|
||||
bool: Whether a folder is selected.
|
||||
|
||||
"""
|
||||
return self._folder_id is not None
|
||||
|
||||
@property
|
||||
def is_task_selected(self):
|
||||
"""Return whether a task is selected.
|
||||
|
||||
Returns:
|
||||
bool: Whether a task is selected.
|
||||
|
||||
"""
|
||||
return self._task_id is not None
|
||||
|
||||
project_name = property(get_project_name)
|
||||
folder_id = property(get_folder_id)
|
||||
task_id = property(get_task_id)
|
||||
folder_path = property(get_folder_path)
|
||||
task_name = property(get_task_name)
|
||||
|
||||
project_entity = property(get_project_entity)
|
||||
folder_entity = property(get_folder_entity)
|
||||
task_entity = property(get_task_entity)
|
||||
|
||||
|
||||
class LauncherAction(object):
|
||||
"""A custom action available"""
|
||||
name = None
|
||||
|
|
@ -21,17 +307,23 @@ class LauncherAction(object):
|
|||
log = logging.getLogger("LauncherAction")
|
||||
log.propagate = True
|
||||
|
||||
def is_compatible(self, session):
|
||||
def is_compatible(self, selection):
|
||||
"""Return whether the class is compatible with the Session.
|
||||
|
||||
Args:
|
||||
session (dict[str, Union[str, None]]): Session data with
|
||||
AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME.
|
||||
"""
|
||||
selection (LauncherActionSelection): Data with selection.
|
||||
|
||||
"""
|
||||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
def process(self, selection, **kwargs):
|
||||
"""Process the action.
|
||||
|
||||
Args:
|
||||
selection (LauncherActionSelection): Data with selection.
|
||||
**kwargs: Additional arguments.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -97,8 +97,8 @@ def install_host(host):
|
|||
"""Install `host` into the running Python session.
|
||||
|
||||
Args:
|
||||
host (module): A Python module containing the Avalon
|
||||
avalon host-interface.
|
||||
host (HostBase): A host interface object.
|
||||
|
||||
"""
|
||||
global _is_installed
|
||||
|
||||
|
|
@ -154,6 +154,13 @@ def install_host(host):
|
|||
|
||||
|
||||
def install_ayon_plugins(project_name=None, host_name=None):
|
||||
"""Install AYON core plugins and make sure the core is initialized.
|
||||
|
||||
Args:
|
||||
project_name (Optional[str]): Name of project to install plugins for.
|
||||
host_name (Optional[str]): Name of host to install plugins for.
|
||||
|
||||
"""
|
||||
# Make sure global AYON connection has set site id and version
|
||||
# - this is necessary if 'install_host' is not called
|
||||
initialize_ayon_connection()
|
||||
|
|
@ -223,6 +230,12 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
|
||||
|
||||
def install_openpype_plugins(project_name=None, host_name=None):
|
||||
"""Install AYON core plugins and make sure the core is initialized.
|
||||
|
||||
Deprecated:
|
||||
Use `install_ayon_plugins` instead.
|
||||
|
||||
"""
|
||||
install_ayon_plugins(project_name, host_name)
|
||||
|
||||
|
||||
|
|
@ -281,47 +294,6 @@ def deregister_host():
|
|||
_registered_host["_"] = None
|
||||
|
||||
|
||||
def debug_host():
|
||||
"""A debug host, useful to debugging features that depend on a host"""
|
||||
|
||||
host = types.ModuleType("debugHost")
|
||||
|
||||
def ls():
|
||||
containers = [
|
||||
{
|
||||
"representation": "ee-ft-a-uuid1",
|
||||
"schema": "openpype:container-1.0",
|
||||
"name": "Bruce01",
|
||||
"objectName": "Bruce01_node",
|
||||
"namespace": "_bruce01_",
|
||||
"version": 3,
|
||||
},
|
||||
{
|
||||
"representation": "aa-bc-s-uuid2",
|
||||
"schema": "openpype:container-1.0",
|
||||
"name": "Bruce02",
|
||||
"objectName": "Bruce01_node",
|
||||
"namespace": "_bruce02_",
|
||||
"version": 2,
|
||||
}
|
||||
]
|
||||
|
||||
for container in containers:
|
||||
yield container
|
||||
|
||||
host.__dict__.update({
|
||||
"ls": ls,
|
||||
"open_file": lambda fname: None,
|
||||
"save_file": lambda fname: None,
|
||||
"current_file": lambda: os.path.expanduser("~/temp.txt"),
|
||||
"has_unsaved_changes": lambda: False,
|
||||
"work_root": lambda: os.path.expanduser("~/temp"),
|
||||
"file_extensions": lambda: ["txt"],
|
||||
})
|
||||
|
||||
return host
|
||||
|
||||
|
||||
def get_current_host_name():
|
||||
"""Current host name.
|
||||
|
||||
|
|
@ -347,7 +319,8 @@ def get_global_context():
|
|||
Use 'get_current_context' to make sure you'll get current host integration
|
||||
context info.
|
||||
|
||||
Example:
|
||||
Example::
|
||||
|
||||
{
|
||||
"project_name": "Commercial",
|
||||
"folder_path": "Bunny",
|
||||
|
|
@ -515,88 +488,13 @@ def get_current_context_template_data(settings=None):
|
|||
)
|
||||
|
||||
|
||||
def get_workdir_from_session(session=None, template_key=None):
|
||||
"""Template data for template fill from session keys.
|
||||
|
||||
Args:
|
||||
session (Union[Dict[str, str], None]): The Session to use. If not
|
||||
provided use the currently active global Session.
|
||||
template_key (str): Prepared template key from which workdir is
|
||||
calculated.
|
||||
|
||||
Returns:
|
||||
str: Workdir path.
|
||||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
project_name = get_current_project_name()
|
||||
host_name = get_current_host_name()
|
||||
template_data = get_template_data_from_session(session)
|
||||
|
||||
if not template_key:
|
||||
task_type = template_data["task"]["type"]
|
||||
template_key = get_workfile_template_key(
|
||||
project_name,
|
||||
task_type,
|
||||
host_name,
|
||||
)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
template_obj = anatomy.get_template_item("work", template_key, "directory")
|
||||
path = template_obj.format_strict(template_data)
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
return path
|
||||
|
||||
|
||||
def get_custom_workfile_template_from_session(
|
||||
session=None, project_settings=None
|
||||
):
|
||||
"""Filter and fill workfile template profiles by current context.
|
||||
|
||||
This function cab be used only inside host where context is set.
|
||||
|
||||
Args:
|
||||
session (Optional[Dict[str, str]]): Session from which are taken
|
||||
data.
|
||||
project_settings(Optional[Dict[str, Any]]): Project settings.
|
||||
|
||||
Returns:
|
||||
str: Path to template or None if none of profiles match current
|
||||
context. (Existence of formatted path is not validated.)
|
||||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
task_name = context["task_name"]
|
||||
host_name = get_current_host_name()
|
||||
|
||||
return get_custom_workfile_template_by_string_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
host_name,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
|
||||
def get_current_context_custom_workfile_template(project_settings=None):
|
||||
"""Filter and fill workfile template profiles by current context.
|
||||
|
||||
This function can be used only inside host where context is set.
|
||||
This function can be used only inside host where current context is set.
|
||||
|
||||
Args:
|
||||
project_settings(Optional[Dict[str, Any]]): Project settings.
|
||||
project_settings (Optional[dict[str, Any]]): Project settings
|
||||
|
||||
Returns:
|
||||
str: Path to template or None if none of profiles match current
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i
|
|||
|
||||
Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant).
|
||||
|
||||
Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`.
|
||||
Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`.
|
||||
|
||||
Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
import logging
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
|
||||
Session = {}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.warning(
|
||||
"DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in"
|
||||
" future versions of ayon-core addon."
|
||||
"\nReading from Session won't give you updated information and changing"
|
||||
" values won't affect global state of a process."
|
||||
)
|
||||
|
||||
|
||||
def session_data_from_environment(context_keys=False):
|
||||
return {}
|
||||
|
||||
|
||||
def is_installed():
|
||||
return False
|
||||
|
||||
|
||||
def install():
|
||||
pass
|
||||
|
||||
|
||||
def uninstall():
|
||||
pass
|
||||
|
||||
|
||||
def active_project(*args, **kwargs):
|
||||
return get_current_project_name()
|
||||
|
||||
|
||||
def current_project(*args, **kwargs):
|
||||
return get_current_project_name()
|
||||
|
|
@ -18,18 +18,14 @@ class OpenTaskPath(LauncherAction):
|
|||
icon = "folder-open"
|
||||
order = 500
|
||||
|
||||
def is_compatible(self, session):
|
||||
def is_compatible(self, selection):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
return bool(session.get("AYON_FOLDER_PATH"))
|
||||
return selection.is_folder_selected
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
def process(self, selection, **kwargs):
|
||||
from qtpy import QtCore, QtWidgets
|
||||
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session.get("AYON_TASK_NAME", None)
|
||||
|
||||
path = self._get_workdir(project_name, folder_path, task_name)
|
||||
path = self._get_workdir(selection)
|
||||
if not path:
|
||||
return
|
||||
|
||||
|
|
@ -60,16 +56,17 @@ class OpenTaskPath(LauncherAction):
|
|||
path = path.split(field, 1)[0]
|
||||
return path
|
||||
|
||||
def _get_workdir(self, project_name, folder_path, task_name):
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
def _get_workdir(self, selection):
|
||||
data = get_template_data(
|
||||
selection.project_entity,
|
||||
selection.folder_entity,
|
||||
selection.task_entity
|
||||
)
|
||||
|
||||
data = get_template_data(project_entity, folder_entity, task_entity)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy = Anatomy(
|
||||
selection.project_name,
|
||||
project_entity=selection.project_entity
|
||||
)
|
||||
workdir = anatomy.get_template_item(
|
||||
"work", "default", "folder"
|
||||
).format(data)
|
||||
|
|
|
|||
|
|
@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor):
|
|||
).format(host_name, product_type, task_name, profile))
|
||||
return
|
||||
|
||||
burnins_per_repres = self._get_burnins_per_representations(
|
||||
instance, burnin_defs
|
||||
)
|
||||
if not burnins_per_repres:
|
||||
self.log.debug(
|
||||
"Skipped instance. No representations found matching a burnin"
|
||||
"definition in: %s", burnin_defs
|
||||
)
|
||||
return
|
||||
|
||||
burnin_options = self._get_burnin_options()
|
||||
|
||||
# Prepare basic data for processing
|
||||
|
|
@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
# Args that will execute the script
|
||||
executable_args = ["run", scriptpath]
|
||||
burnins_per_repres = self._get_burnins_per_representations(
|
||||
instance, burnin_defs
|
||||
)
|
||||
for repre, repre_burnin_defs in burnins_per_repres:
|
||||
# Create copy of `_burnin_data` and `_temp_data` for repre.
|
||||
burnin_data = copy.deepcopy(_burnin_data)
|
||||
|
|
|
|||
|
|
@ -619,7 +619,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# Prepare input and output filepaths
|
||||
self.input_output_paths(new_repre, output_def, temp_data)
|
||||
|
||||
# Set output frames len to 1 when ouput is single image
|
||||
# Set output frames len to 1 when output is single image
|
||||
if (
|
||||
temp_data["output_ext_is_image"]
|
||||
and not temp_data["output_is_sequence"]
|
||||
|
|
@ -955,7 +955,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("New representation ext: `{}`".format(output_ext))
|
||||
|
||||
# Output is image file sequence witht frames
|
||||
# Output is image file sequence with frames
|
||||
output_ext_is_image = bool(output_ext in self.image_exts)
|
||||
output_is_sequence = bool(
|
||||
output_ext_is_image
|
||||
|
|
@ -967,7 +967,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
frame_end = temp_data["output_frame_end"]
|
||||
|
||||
filename_base = "{}_{}".format(filename, filename_suffix)
|
||||
# Temporary tempalte for frame filling. Example output:
|
||||
# Temporary template for frame filling. Example output:
|
||||
# "basename.%04d.exr" when `frame_end` == 1001
|
||||
repr_file = "{}.%{:0>2}d.{}".format(
|
||||
filename_base, len(str(frame_end)), output_ext
|
||||
|
|
@ -997,7 +997,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Creating dir: {}".format(dst_staging_dir))
|
||||
os.makedirs(dst_staging_dir)
|
||||
|
||||
# Store stagingDir to representaion
|
||||
# Store stagingDir to representation
|
||||
new_repre["stagingDir"] = dst_staging_dir
|
||||
|
||||
# Store paths to temp data
|
||||
|
|
@ -1228,16 +1228,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
reformat_in_baking = bool("reformated" in new_repre["tags"])
|
||||
self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking))
|
||||
|
||||
# Get instance data
|
||||
pixel_aspect = temp_data["pixel_aspect"]
|
||||
|
||||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from upstream process"
|
||||
))
|
||||
pixel_aspect = 1
|
||||
|
||||
# NOTE Skipped using instance's resolution
|
||||
full_input_path_single_file = temp_data["full_input_path_single_file"]
|
||||
try:
|
||||
|
|
@ -1268,7 +1258,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from upstream process"
|
||||
"reformatted from upstream process"
|
||||
))
|
||||
pixel_aspect = 1
|
||||
output_width = input_width
|
||||
|
|
@ -1374,7 +1364,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# Make sure output width and height is not an odd number
|
||||
# When this can happen:
|
||||
# - if output definition has set width and height with odd number
|
||||
# - `instance.data` contain width and height with odd numbeer
|
||||
# - `instance.data` contain width and height with odd number
|
||||
if output_width % 2 != 0:
|
||||
self.log.warning((
|
||||
"Converting output width from odd to even number. {} -> {}"
|
||||
|
|
@ -1555,7 +1545,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
custom_tags (list): Custom Tags of processed representation.
|
||||
|
||||
Returns:
|
||||
list: Containg all output definitions matching entered tags.
|
||||
list: Containing all output definitions matching entered tags.
|
||||
"""
|
||||
|
||||
filtered_outputs = []
|
||||
|
|
@ -1820,8 +1810,8 @@ class OverscanCrop:
|
|||
"""
|
||||
# crop=width:height:x:y - explicit start x, y position
|
||||
# crop=width:height - x, y are related to center by width/height
|
||||
# pad=width:heigth:x:y - explicit start x, y position
|
||||
# pad=width:heigth - x, y are set to 0 by default
|
||||
# pad=width:height:x:y - explicit start x, y position
|
||||
# pad=width:height - x, y are set to 0 by default
|
||||
|
||||
width = self.width()
|
||||
height = self.height()
|
||||
|
|
@ -1869,7 +1859,7 @@ class OverscanCrop:
|
|||
# Replace "px" (and spaces before) with single space
|
||||
string_value = re.sub(r"([ ]+)?px", " ", string_value)
|
||||
string_value = re.sub(r"([ ]+)%", "%", string_value)
|
||||
# Make sure +/- sign at the beggining of string is next to number
|
||||
# Make sure +/- sign at the beginning of string is next to number
|
||||
string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value)
|
||||
# Make sure +/- sign in the middle has zero spaces before number under
|
||||
# which belongs
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ Scene contains one or more outdated loaded containers, eg. versions loaded into
|
|||
### How to repair?
|
||||
|
||||
Use 'Scene Inventory' and update all highlighted old container to latest OR
|
||||
refresh Publish and switch 'Validate Containers' toggle on 'Options' tab.
|
||||
refresh Publish and switch 'Validate Containers' toggle on 'Context' tab.
|
||||
|
||||
WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets.
|
||||
</description>
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from ayon_core.lib import Logger, AYONSettingsRegistry
|
|||
from ayon_core.pipeline.actions import (
|
||||
discover_launcher_actions,
|
||||
LauncherAction,
|
||||
LauncherActionSelection,
|
||||
)
|
||||
from ayon_core.pipeline.workfile import should_use_last_workfile_on_launch
|
||||
|
||||
|
|
@ -69,11 +70,6 @@ class ApplicationAction(LauncherAction):
|
|||
project_entities = {}
|
||||
|
||||
_log = None
|
||||
required_session_keys = (
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME"
|
||||
)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
|
|
@ -81,18 +77,16 @@ class ApplicationAction(LauncherAction):
|
|||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def is_compatible(self, session):
|
||||
for key in self.required_session_keys:
|
||||
if not session.get(key):
|
||||
return False
|
||||
def is_compatible(self, selection):
|
||||
if not selection.is_task_selected:
|
||||
return False
|
||||
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
project_entity = self.project_entities[project_name]
|
||||
project_entity = self.project_entities[selection.project_name]
|
||||
apps = project_entity["attrib"].get("applications")
|
||||
if not apps or self.application.full_name not in apps:
|
||||
return False
|
||||
|
||||
project_settings = self.project_settings[project_name]
|
||||
project_settings = self.project_settings[selection.project_name]
|
||||
only_available = project_settings["applications"]["only_available"]
|
||||
if only_available and not self.application.find_executable():
|
||||
return False
|
||||
|
|
@ -112,7 +106,7 @@ class ApplicationAction(LauncherAction):
|
|||
dialog.setDetailedText(details)
|
||||
dialog.exec_()
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
def process(self, selection, **kwargs):
|
||||
"""Process the full Application action"""
|
||||
|
||||
from ayon_core.lib import (
|
||||
|
|
@ -120,14 +114,11 @@ class ApplicationAction(LauncherAction):
|
|||
ApplicationLaunchFailed,
|
||||
)
|
||||
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
try:
|
||||
self.application.launch(
|
||||
project_name=project_name,
|
||||
folder_path=folder_path,
|
||||
task_name=task_name,
|
||||
project_name=selection.project_name,
|
||||
folder_path=selection.folder_path,
|
||||
task_name=selection.task_name,
|
||||
**self.data
|
||||
)
|
||||
|
||||
|
|
@ -335,11 +326,11 @@ class ActionsModel:
|
|||
"""
|
||||
not_open_workfile_actions = self._get_no_last_workfile_for_context(
|
||||
project_name, folder_id, task_id)
|
||||
session = self._prepare_session(project_name, folder_id, task_id)
|
||||
selection = self._prepare_selection(project_name, folder_id, task_id)
|
||||
output = []
|
||||
action_items = self._get_action_items(project_name)
|
||||
for identifier, action in self._get_action_objects().items():
|
||||
if not action.is_compatible(session):
|
||||
if not action.is_compatible(selection):
|
||||
continue
|
||||
|
||||
action_item = action_items[identifier]
|
||||
|
|
@ -374,7 +365,7 @@ class ActionsModel:
|
|||
)
|
||||
|
||||
def trigger_action(self, project_name, folder_id, task_id, identifier):
|
||||
session = self._prepare_session(project_name, folder_id, task_id)
|
||||
selection = self._prepare_selection(project_name, folder_id, task_id)
|
||||
failed = False
|
||||
error_message = None
|
||||
action_label = identifier
|
||||
|
|
@ -403,7 +394,7 @@ class ActionsModel:
|
|||
)
|
||||
action.data["start_last_workfile"] = start_last_workfile
|
||||
|
||||
action.process(session)
|
||||
action.process(selection)
|
||||
except Exception as exc:
|
||||
self.log.warning("Action trigger failed.", exc_info=True)
|
||||
failed = True
|
||||
|
|
@ -440,29 +431,8 @@ class ActionsModel:
|
|||
.get(task_id, {})
|
||||
)
|
||||
|
||||
def _prepare_session(self, project_name, folder_id, task_id):
|
||||
folder_path = None
|
||||
if folder_id:
|
||||
folder = self._controller.get_folder_entity(
|
||||
project_name, folder_id)
|
||||
if folder:
|
||||
folder_path = folder["path"]
|
||||
|
||||
task_name = None
|
||||
if task_id:
|
||||
task = self._controller.get_task_entity(project_name, task_id)
|
||||
if task:
|
||||
task_name = task["name"]
|
||||
|
||||
return {
|
||||
"AYON_PROJECT_NAME": project_name,
|
||||
"AYON_FOLDER_PATH": folder_path,
|
||||
"AYON_TASK_NAME": task_name,
|
||||
# Deprecated - kept for backwards compatibility
|
||||
"AVALON_PROJECT": project_name,
|
||||
"AVALON_ASSET": folder_path,
|
||||
"AVALON_TASK": task_name,
|
||||
}
|
||||
def _prepare_selection(self, project_name, folder_id, task_id):
|
||||
return LauncherActionSelection(project_name, folder_id, task_id)
|
||||
|
||||
def _get_discovered_action_classes(self):
|
||||
if self._discovered_actions is None:
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
# TODO remove - kept for kitsu addon which imported it
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
|
||||
class PressHoverButton(QtWidgets.QPushButton):
|
||||
"""
|
||||
Deprecated:
|
||||
Use `openpype.tools.utils.PressHoverButton` instead.
|
||||
"""
|
||||
_mouse_pressed = False
|
||||
_mouse_hovered = False
|
||||
change_state = QtCore.Signal(bool)
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
self._mouse_pressed = True
|
||||
self._mouse_hovered = True
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mousePressEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
self._mouse_pressed = False
|
||||
self._mouse_hovered = False
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
super(PressHoverButton, self).mouseReleaseEvent(event)
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos())
|
||||
under_mouse = self.rect().contains(mouse_pos)
|
||||
if under_mouse != self._mouse_hovered:
|
||||
self._mouse_hovered = under_mouse
|
||||
self.change_state.emit(self._mouse_hovered)
|
||||
|
||||
super(PressHoverButton, self).mouseMoveEvent(event)
|
||||
|
|
@ -9,7 +9,7 @@ from ayon_server.settings import (
|
|||
task_types_enum,
|
||||
)
|
||||
|
||||
from ayon_server.types import ColorRGB_uint8, ColorRGBA_uint8
|
||||
from ayon_server.types import ColorRGBA_uint8
|
||||
|
||||
|
||||
class ValidateBaseModel(BaseSettingsModel):
|
||||
|
|
@ -221,7 +221,12 @@ class OIIOToolArgumentsModel(BaseSettingsModel):
|
|||
|
||||
class ExtractOIIOTranscodeOutputModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField("", title="Name")
|
||||
name: str = SettingsField(
|
||||
"",
|
||||
title="Name",
|
||||
description="Output name (no space)",
|
||||
regex=r"[a-zA-Z0-9_]([a-zA-Z0-9_\.\-]*[a-zA-Z0-9_])?$",
|
||||
)
|
||||
extension: str = SettingsField("", title="Extension")
|
||||
transcoding_type: str = SettingsField(
|
||||
"colorspace",
|
||||
|
|
|
|||
|
|
@ -299,6 +299,16 @@ class ExtractAlembicModel(BaseSettingsModel):
|
|||
families: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Families")
|
||||
bake_attributes: list[str] = SettingsField(
|
||||
default_factory=list, title="Bake Attributes",
|
||||
description="List of attributes that will be included in the alembic "
|
||||
"export.",
|
||||
)
|
||||
bake_attribute_prefixes: list[str] = SettingsField(
|
||||
default_factory=list, title="Bake Attribute Prefixes",
|
||||
description="List of attribute prefixes for attributes that will be "
|
||||
"included in the alembic export.",
|
||||
)
|
||||
|
||||
|
||||
class ExtractObjModel(BaseSettingsModel):
|
||||
|
|
@ -306,6 +316,12 @@ class ExtractObjModel(BaseSettingsModel):
|
|||
optional: bool = SettingsField(title="Optional")
|
||||
|
||||
|
||||
class ExtractModelModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
|
||||
class ExtractMayaSceneRawModel(BaseSettingsModel):
|
||||
"""Add loaded instances to those published families:"""
|
||||
enabled: bool = SettingsField(title="ExtractMayaSceneRaw")
|
||||
|
|
@ -362,7 +378,9 @@ class ExtractLookModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ExtractGPUCacheModel(BaseSettingsModel):
|
||||
enabled: bool = True
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
families: list[str] = SettingsField(default_factory=list, title="Families")
|
||||
step: float = SettingsField(1.0, ge=1.0, title="Step")
|
||||
stepSave: int = SettingsField(1, ge=1, title="Step Save")
|
||||
|
|
@ -789,6 +807,10 @@ class PublishersModel(BaseSettingsModel):
|
|||
default_factory=ExtractGPUCacheModel,
|
||||
title="Extract GPU Cache",
|
||||
)
|
||||
ExtractModel: ExtractModelModel = SettingsField(
|
||||
default_factory=ExtractModelModel,
|
||||
title="Extract Model (Maya Scene)"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_SUFFIX_NAMING = {
|
||||
|
|
@ -1184,7 +1206,9 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"pointcache",
|
||||
"model",
|
||||
"vrayproxy.alembic"
|
||||
]
|
||||
],
|
||||
"bake_attributes": [],
|
||||
"bake_attribute_prefixes": []
|
||||
},
|
||||
"ExtractObj": {
|
||||
"enabled": False,
|
||||
|
|
@ -1329,6 +1353,8 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
},
|
||||
"ExtractGPUCache": {
|
||||
"enabled": False,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"families": [
|
||||
"model",
|
||||
"animation",
|
||||
|
|
@ -1341,5 +1367,10 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optimizeAnimationsForMotionBlur": True,
|
||||
"writeMaterials": True,
|
||||
"useBaseTessellation": True
|
||||
},
|
||||
"ExtractModel": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.11"
|
||||
__version__ = "0.1.13"
|
||||
|
|
|
|||
|
|
@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"extensions": [
|
||||
".exr",
|
||||
".png",
|
||||
".dng",
|
||||
".dpx",
|
||||
".jpg",
|
||||
".tiff",
|
||||
|
|
@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"extensions": [
|
||||
".exr",
|
||||
".png",
|
||||
".dng",
|
||||
".dpx",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
|
|
@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
".exr",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".dng",
|
||||
".dpx",
|
||||
".bmp",
|
||||
".tif",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.3"
|
||||
__version__ = "0.1.4"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue