mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into enhancement/houdini_prompt_save_task_change
This commit is contained in:
commit
4f021503f9
76 changed files with 353 additions and 247 deletions
|
|
@ -1075,7 +1075,7 @@ class AddonsManager:
|
|||
"""Print out report of time spent on addons initialization parts.
|
||||
|
||||
Reporting is not automated must be implemented for each initialization
|
||||
part separatelly. Reports must be stored to `_report` attribute.
|
||||
part separately. Reports must be stored to `_report` attribute.
|
||||
Print is skipped if `_report` is empty.
|
||||
|
||||
Attribute `_report` is dictionary where key is "label" describing
|
||||
|
|
@ -1267,7 +1267,7 @@ class TrayAddonsManager(AddonsManager):
|
|||
def add_doubleclick_callback(self, addon, callback):
|
||||
"""Register doubleclick callbacks on tray icon.
|
||||
|
||||
Currently there is no way how to determine which is launched. Name of
|
||||
Currently, there is no way how to determine which is launched. Name of
|
||||
callback can be defined with `doubleclick_callback` attribute.
|
||||
|
||||
Missing feature how to define default callback.
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ def _process_app_events() -> Optional[float]:
|
|||
|
||||
|
||||
class LaunchQtApp(bpy.types.Operator):
|
||||
"""A Base class for opertors to launch a Qt app."""
|
||||
"""A Base class for operators to launch a Qt app."""
|
||||
|
||||
_app: QtWidgets.QApplication
|
||||
_window = Union[QtWidgets.QDialog, ModuleType]
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ class BlendLoader(plugin.AssetLoader):
|
|||
obj.animation_data_create()
|
||||
obj.animation_data.action = actions[obj.name]
|
||||
|
||||
# Restore the old data, but reset memebers, as they don't exist anymore
|
||||
# Restore the old data, but reset members, as they don't exist anymore
|
||||
# This avoids a crash, because the memory addresses of those members
|
||||
# are not valid anymore
|
||||
old_data["members"] = []
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
|
|||
tree = bpy.context.scene.node_tree
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
output_node = None
|
||||
# Remove all output nodes that inlcude "AYON" in the name.
|
||||
# Remove all output nodes that include "AYON" in the name.
|
||||
# There should be only one.
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == output_type and "AYON" in node.name:
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ class CelactionPrelaunchHook(PreLaunchHook):
|
|||
def workfile_path(self):
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
# copy workfile from template if doesn't exist any on path
|
||||
if not os.path.exists(workfile_path):
|
||||
# TODO add ability to set different template workfile path via
|
||||
# settings
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
render_path = r_template_item["path"].format_strict(anatomy_data)
|
||||
self.log.debug("__ render_path: `{}`".format(render_path))
|
||||
|
||||
# create dir if it doesnt exists
|
||||
# create dir if it doesn't exists
|
||||
try:
|
||||
if not os.path.isdir(render_dir):
|
||||
os.makedirs(render_dir, exist_ok=True)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ from .lib import (
|
|||
reset_segment_selection,
|
||||
get_segment_attributes,
|
||||
get_clips_in_reels,
|
||||
get_reformated_filename,
|
||||
get_reformatted_filename,
|
||||
get_frame_from_filename,
|
||||
get_padding_from_filename,
|
||||
maintained_object_duplication,
|
||||
|
|
@ -101,7 +101,7 @@ __all__ = [
|
|||
"reset_segment_selection",
|
||||
"get_segment_attributes",
|
||||
"get_clips_in_reels",
|
||||
"get_reformated_filename",
|
||||
"get_reformatted_filename",
|
||||
"get_frame_from_filename",
|
||||
"get_padding_from_filename",
|
||||
"maintained_object_duplication",
|
||||
|
|
|
|||
|
|
@ -607,7 +607,7 @@ def get_clips_in_reels(project):
|
|||
return output_clips
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -615,10 +615,10 @@ def get_reformated_filename(filename, padded=True):
|
|||
filename (str): file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
@ -980,7 +980,7 @@ class MediaInfoFile(object):
|
|||
|
||||
@property
|
||||
def file_pattern(self):
|
||||
"""Clips file patter
|
||||
"""Clips file pattern.
|
||||
|
||||
Returns:
|
||||
str: file pattern. ex. file.[1-2].exr
|
||||
|
|
|
|||
|
|
@ -1018,7 +1018,7 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
self.feed_version_name))
|
||||
else:
|
||||
self.log.debug("adding new track element ..")
|
||||
# create new track as it doesnt exists yet
|
||||
# create new track as it doesn't exist yet
|
||||
# set current version to feeds on tmp
|
||||
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
|
|
|
|||
|
|
@ -256,7 +256,7 @@ def create_otio_reference(clip_data, fps=None):
|
|||
|
||||
if not otio_ex_ref_item:
|
||||
dirname, file_name = os.path.split(path)
|
||||
file_name = utils.get_reformated_filename(file_name, padded=False)
|
||||
file_name = utils.get_reformatted_filename(file_name, padded=False)
|
||||
reformated_path = os.path.join(dirname, file_name)
|
||||
# in case old OTIO or video file create `ExternalReference`
|
||||
otio_ex_ref_item = otio.schema.ExternalReference(
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ def frames_to_seconds(frames, framerate):
|
|||
return otio.opentime.to_seconds(rt)
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -29,10 +29,10 @@ def get_reformated_filename(filename, padded=True):
|
|||
filename (str): file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
presets = deepcopy(self.presets)
|
||||
gui_inputs = self.get_gui_inputs()
|
||||
|
||||
# get key pares from presets and match it on ui inputs
|
||||
# get key pairs from presets and match it on ui inputs
|
||||
for k, v in gui_inputs.items():
|
||||
if v["type"] in ("dict", "section"):
|
||||
# nested dictionary (only one level allowed
|
||||
|
|
@ -236,7 +236,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Source resolution",
|
||||
"target": "tag",
|
||||
"toolTip": "Is resloution taken from timeline or source?", # noqa
|
||||
"toolTip": "Is resolution taken from timeline or source?", # noqa
|
||||
"order": 4},
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
self.otio_timeline = context.data["otioTimeline"]
|
||||
self.fps = context.data["fps"]
|
||||
|
||||
# process all sellected
|
||||
# process all selected
|
||||
for segment in selected_segments:
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
|
|
|
|||
|
|
@ -396,7 +396,7 @@ class FtrackEntityOperator:
|
|||
|
||||
entity = session.query(query).first()
|
||||
|
||||
# if entity doesnt exist then create one
|
||||
# if entity doesn't exist then create one
|
||||
if not entity:
|
||||
entity = self.create_ftrack_entity(
|
||||
session,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
from ayon_core.lib import EnumDef
|
||||
from ayon_core.lib import (
|
||||
UILabelDef,
|
||||
NumberDef,
|
||||
EnumDef
|
||||
)
|
||||
|
||||
from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver
|
||||
from ayon_core.hosts.fusion.api.lib import get_current_comp
|
||||
|
||||
|
||||
class CreateSaver(GenericCreateSaver):
|
||||
|
|
@ -45,6 +50,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
self._get_reviewable_bool(),
|
||||
self._get_frame_range_enum(),
|
||||
self._get_image_format_enum(),
|
||||
*self._get_custom_frame_range_attribute_defs()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
|
|
@ -53,6 +59,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
"current_folder": "Current Folder context",
|
||||
"render_range": "From render in/out",
|
||||
"comp_range": "From composition timeline",
|
||||
"custom_range": "Custom frame range",
|
||||
}
|
||||
|
||||
return EnumDef(
|
||||
|
|
@ -61,3 +68,82 @@ class CreateSaver(GenericCreateSaver):
|
|||
label="Frame range source",
|
||||
default=self.default_frame_range_option
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_custom_frame_range_attribute_defs() -> list:
|
||||
|
||||
# Define custom frame range defaults based on current comp
|
||||
# timeline settings (if a comp is currently open)
|
||||
comp = get_current_comp()
|
||||
if comp is not None:
|
||||
attrs = comp.GetAttrs()
|
||||
frame_defaults = {
|
||||
"frameStart": int(attrs["COMPN_GlobalStart"]),
|
||||
"frameEnd": int(attrs["COMPN_GlobalEnd"]),
|
||||
"handleStart": int(
|
||||
attrs["COMPN_RenderStart"] - attrs["COMPN_GlobalStart"]
|
||||
),
|
||||
"handleEnd": int(
|
||||
attrs["COMPN_GlobalEnd"] - attrs["COMPN_RenderEnd"]
|
||||
),
|
||||
}
|
||||
else:
|
||||
frame_defaults = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1100,
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
return [
|
||||
UILabelDef(
|
||||
label="<br><b>Custom Frame Range</b><br>"
|
||||
"<i>only used with 'Custom frame range' source</i>"
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameStart",
|
||||
label="Frame Start",
|
||||
default=frame_defaults["frameStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameEnd",
|
||||
label="Frame End",
|
||||
default=frame_defaults["frameEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleStart",
|
||||
label="Handle Start",
|
||||
default=frame_defaults["handleStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start handles for the export, this will be "
|
||||
"added before the start frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleEnd",
|
||||
label="Handle End",
|
||||
default=frame_defaults["handleEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end handles for the export, this will be added "
|
||||
"after the end frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -57,6 +57,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
|||
start_with_handle = comp_start
|
||||
end_with_handle = comp_end
|
||||
|
||||
if frame_range_source == "custom_range":
|
||||
start = int(instance.data["custom_frameStart"])
|
||||
end = int(instance.data["custom_frameEnd"])
|
||||
handle_start = int(instance.data["custom_handleStart"])
|
||||
handle_end = int(instance.data["custom_handleEnd"])
|
||||
start_with_handle = start - handle_start
|
||||
end_with_handle = end + handle_end
|
||||
|
||||
frame = instance.data["creator_attributes"].get("frame")
|
||||
# explicitly publishing only single frame
|
||||
if frame is not None:
|
||||
|
|
|
|||
|
|
@ -568,7 +568,7 @@ def save_scene():
|
|||
"""Save the Harmony scene safely.
|
||||
|
||||
The built-in (to Avalon) background zip and moving of the Harmony scene
|
||||
folder, interfers with server/client communication by sending two requests
|
||||
folder, interferes with server/client communication by sending two requests
|
||||
at the same time. This only happens when sending "scene.saveAll()". This
|
||||
method prevents this double request and safely saves the scene.
|
||||
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ def get_current_track(sequence, name, audio=False):
|
|||
Creates new if none is found.
|
||||
|
||||
Args:
|
||||
sequence (hiero.core.Sequence): hiero sequene object
|
||||
sequence (hiero.core.Sequence): hiero sequence object
|
||||
name (str): name of track we want to return
|
||||
audio (bool)[optional]: switch to AudioTrack
|
||||
|
||||
|
|
@ -846,8 +846,8 @@ def create_nuke_workfile_clips(nuke_workfiles, seq=None):
|
|||
[{
|
||||
'path': 'P:/Jakub_testy_pipeline/test_v01.nk',
|
||||
'name': 'test',
|
||||
'handleStart': 15, # added asymetrically to handles
|
||||
'handleEnd': 10, # added asymetrically to handles
|
||||
'handleStart': 15, # added asymmetrically to handles
|
||||
'handleEnd': 10, # added asymmetrically to handles
|
||||
"clipIn": 16,
|
||||
"frameStart": 991,
|
||||
"frameEnd": 1023,
|
||||
|
|
@ -1192,7 +1192,7 @@ def get_sequence_pattern_and_padding(file):
|
|||
|
||||
Return:
|
||||
string: any matching sequence pattern
|
||||
int: padding of sequnce numbering
|
||||
int: padding of sequence numbering
|
||||
"""
|
||||
foundall = re.findall(
|
||||
r"(#+)|(%\d+d)|(?<=[^a-zA-Z0-9])(\d+)(?=\.\w+$)", file)
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if isinstance(track, hiero.core.AudioTrack):
|
||||
kind = 'Audio'
|
||||
|
||||
# Gather TrackItems involved in trasition
|
||||
# Gather TrackItems involved in transition
|
||||
item_in, item_out = get_neighboring_trackitems(
|
||||
otio_item,
|
||||
otio_track,
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True):
|
|||
path (str): path url or simple file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
kind = "Audio"
|
||||
|
||||
try:
|
||||
# Gather TrackItems involved in trasition
|
||||
# Gather TrackItems involved in transition
|
||||
item_in, item_out = get_neighboring_trackitems(
|
||||
otio_item,
|
||||
otio_track,
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ class CreateShotClip(phiero.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Source resolution",
|
||||
"target": "tag",
|
||||
"toolTip": "Is resloution taken from timeline or source?", # noqa
|
||||
"toolTip": "Is resolution taken from timeline or source?", # noqa
|
||||
"order": 4},
|
||||
}
|
||||
},
|
||||
|
|
@ -211,7 +211,7 @@ class CreateShotClip(phiero.Creator):
|
|||
presets = deepcopy(self.presets)
|
||||
gui_inputs = deepcopy(self.gui_inputs)
|
||||
|
||||
# get key pares from presets and match it on ui inputs
|
||||
# get key pairs from presets and match it on ui inputs
|
||||
for k, v in gui_inputs.items():
|
||||
if v["type"] in ("dict", "section"):
|
||||
# nested dictionary (only one level allowed
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
tracks_effect_items = self.collect_sub_track_items(all_tracks)
|
||||
context.data["tracksEffectItems"] = tracks_effect_items
|
||||
|
||||
# process all sellected timeline track items
|
||||
# process all selected timeline track items
|
||||
for track_item in selected_timeline_items:
|
||||
data = {}
|
||||
clip_name = track_item.name()
|
||||
|
|
@ -62,7 +62,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
}:
|
||||
continue
|
||||
|
||||
# get clips subtracks and anotations
|
||||
# get clips subtracks and annotations
|
||||
annotations = self.clip_annotations(source_clip)
|
||||
subtracks = self.clip_subtrack(track_item)
|
||||
self.log.debug("Annotations: {}".format(annotations))
|
||||
|
|
@ -439,10 +439,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
for item in subTrackItems:
|
||||
if "TimeWarp" in item.name():
|
||||
continue
|
||||
# avoid all anotation
|
||||
# avoid all annotation
|
||||
if isinstance(item, hiero.core.Annotation):
|
||||
continue
|
||||
# # avoid all not anaibled
|
||||
# avoid all disabled
|
||||
if not item.isEnabled():
|
||||
continue
|
||||
subtracks.append(item)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import sys
|
|||
import os
|
||||
import errno
|
||||
import re
|
||||
import uuid
|
||||
import logging
|
||||
import json
|
||||
from contextlib import contextmanager
|
||||
|
|
@ -44,84 +43,6 @@ def get_folder_fps(folder_entity=None):
|
|||
return folder_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
exists = node.parm("id")
|
||||
if not exists:
|
||||
imprint(node, {"id": unique_id})
|
||||
|
||||
if not exists and overwrite:
|
||||
node.setParm("id", unique_id)
|
||||
|
||||
|
||||
def get_id(node):
|
||||
"""Get the `cbId` attribute of the given node.
|
||||
|
||||
Args:
|
||||
node (hou.Node): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str: cbId attribute of the node.
|
||||
|
||||
"""
|
||||
|
||||
if node is not None:
|
||||
return node.parm("id")
|
||||
|
||||
|
||||
def generate_ids(nodes, folder_id=None):
|
||||
"""Returns new unique ids for the given nodes.
|
||||
|
||||
Note: This does not assign the new ids, it only generates the values.
|
||||
|
||||
To assign new ids using this method:
|
||||
>>> nodes = ["a", "b", "c"]
|
||||
>>> for node, id in generate_ids(nodes):
|
||||
>>> set_id(node, id)
|
||||
|
||||
To also override any existing values (and assign regenerated ids):
|
||||
>>> nodes = ["a", "b", "c"]
|
||||
>>> for node, id in generate_ids(nodes):
|
||||
>>> set_id(node, id, overwrite=True)
|
||||
|
||||
Args:
|
||||
nodes (list): List of nodes.
|
||||
folder_id (str): Folder id . Use current folder id if is ``None``.
|
||||
|
||||
Returns:
|
||||
list: A list of (node, id) tuples.
|
||||
|
||||
"""
|
||||
|
||||
if folder_id is None:
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
# Get folder id of current context folder
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"id"}
|
||||
)
|
||||
if not folder_entity:
|
||||
raise ValueError("No current folder is set.")
|
||||
|
||||
folder_id = folder_entity["id"]
|
||||
|
||||
node_ids = []
|
||||
for node in nodes:
|
||||
_, uid = str(uuid.uuid4()).rsplit("-", 1)
|
||||
unique_id = "{}:{}".format(folder_id, uid)
|
||||
node_ids.append((node, unique_id))
|
||||
|
||||
return node_ids
|
||||
|
||||
|
||||
def get_id_required_nodes():
|
||||
|
||||
valid_types = ["geometry"]
|
||||
nodes = {n for n in hou.node("/out").children() if
|
||||
n.type().name() in valid_types}
|
||||
|
||||
return list(nodes)
|
||||
|
||||
|
||||
def get_output_parameter(node):
|
||||
"""Return the render output parameter of the given node
|
||||
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
|
||||
lib.imprint(op_ctx, data)
|
||||
lib.imprint(op_ctx, data, update=True)
|
||||
|
||||
def get_context_data(self):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
product_type = "redshift_rop"
|
||||
icon = "magic"
|
||||
ext = "exr"
|
||||
multi_layered_mode = "No Multi-Layered EXR File"
|
||||
|
||||
# Default to split export and render jobs
|
||||
split_render = True
|
||||
|
|
@ -55,25 +56,36 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
|
||||
# Set the linked rop to the Redshift ROP
|
||||
ipr_rop.parm("linked_rop").set(instance_node.path())
|
||||
|
||||
ext = pre_create_data.get("image_format")
|
||||
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
|
||||
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||
product_name=product_name,
|
||||
fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext)
|
||||
)
|
||||
multi_layered_mode = pre_create_data.get("multi_layered_mode")
|
||||
|
||||
ext_format_index = {"exr": 0, "tif": 1, "jpg": 2, "png": 3}
|
||||
multilayer_mode_index = {"No Multi-Layered EXR File": "1",
|
||||
"Full Multi-Layered EXR File": "2" }
|
||||
|
||||
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
|
||||
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||
product_name=product_name,
|
||||
fmt="$AOV.$F4.{ext}".format(ext=ext)
|
||||
)
|
||||
|
||||
if multilayer_mode_index[multi_layered_mode] == "1":
|
||||
multipart = False
|
||||
|
||||
elif multilayer_mode_index[multi_layered_mode] == "2":
|
||||
multipart = True
|
||||
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Redshift ROP settings
|
||||
"RS_outputFileNamePrefix": filepath,
|
||||
"RS_outputMultilayerMode": "1", # no multi-layered exr
|
||||
"RS_outputBeautyAOVSuffix": "beauty",
|
||||
"RS_outputFileFormat": ext_format_index[ext],
|
||||
}
|
||||
if ext == "exr":
|
||||
parms["RS_outputMultilayerMode"] = multilayer_mode_index[multi_layered_mode]
|
||||
parms["RS_aovMultipart"] = multipart
|
||||
|
||||
if self.selected_nodes:
|
||||
# set up the render camera from the selected node
|
||||
|
|
@ -111,6 +123,11 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
image_format_enum = [
|
||||
"exr", "tif", "jpg", "png",
|
||||
]
|
||||
multi_layered_mode = [
|
||||
"No Multi-Layered EXR File",
|
||||
"Full Multi-Layered EXR File"
|
||||
]
|
||||
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
|
|
@ -122,5 +139,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default=self.ext,
|
||||
label="Image Format Options")
|
||||
label="Image Format Options"),
|
||||
EnumDef("multi_layered_mode",
|
||||
multi_layered_mode,
|
||||
default=self.multi_layered_mode,
|
||||
label="Multi-Layered EXR")
|
||||
]
|
||||
|
|
|
|||
|
|
@ -76,8 +76,8 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
|||
return
|
||||
|
||||
# Include handles
|
||||
start -= version_data.get("handleStart", 0)
|
||||
end += version_data.get("handleEnd", 0)
|
||||
start -= version_attributes.get("handleStart", 0)
|
||||
end += version_attributes.get("handleEnd", 0)
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
|
|
|||
|
|
@ -60,15 +60,22 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
# Default beauty AOV
|
||||
full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2")
|
||||
if full_exr_mode:
|
||||
# Ignore beauty suffix if full mode is enabled
|
||||
# As this is what the rop does.
|
||||
beauty_suffix = ""
|
||||
|
||||
# Default beauty/main layer AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=beauty_suffix
|
||||
)
|
||||
render_products = [beauty_product]
|
||||
files_by_aov = {
|
||||
"_": self.generate_expected_files(instance,
|
||||
beauty_product)}
|
||||
|
||||
beauty_suffix: self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode()
|
||||
if aovs_rop:
|
||||
rop = aovs_rop
|
||||
|
|
@ -89,11 +96,14 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
if not aov_prefix:
|
||||
aov_prefix = default_prefix
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \
|
||||
not full_exr_mode:
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
|
||||
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||
aov_product) # noqa
|
||||
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||
aov_product) # noqa
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
@ -121,7 +131,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
# When AOV is explicitly defined in prefix we just swap it out
|
||||
# directly with the AOV suffix to embed it.
|
||||
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
|
||||
# Note: '$AOV' seems to be evaluated in the parameter as '%AOV%'
|
||||
has_aov_in_prefix = "%AOV%" in prefix
|
||||
if has_aov_in_prefix:
|
||||
# It seems that when some special separator characters are present
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ def get_main_window():
|
|||
def suspended_refresh(suspend=True):
|
||||
"""Suspend viewport refreshes
|
||||
|
||||
cmds.ogs(pause=True) is a toggle so we cant pass False.
|
||||
cmds.ogs(pause=True) is a toggle so we can't pass False.
|
||||
"""
|
||||
if IS_HEADLESS:
|
||||
yield
|
||||
|
|
@ -583,7 +583,7 @@ def pairwise(iterable):
|
|||
|
||||
|
||||
def collect_animation_defs(fps=False):
|
||||
"""Get the basic animation attribute defintions for the publisher.
|
||||
"""Get the basic animation attribute definitions for the publisher.
|
||||
|
||||
Returns:
|
||||
OrderedDict
|
||||
|
|
@ -3834,7 +3834,7 @@ def get_color_management_output_transform():
|
|||
|
||||
def image_info(file_path):
|
||||
# type: (str) -> dict
|
||||
"""Based on tha texture path, get its bit depth and format information.
|
||||
"""Based on the texture path, get its bit depth and format information.
|
||||
Take reference from makeTx.py in Arnold:
|
||||
ImageInfo(filename): Get Image Information for colorspace
|
||||
AiTextureGetFormat(filename): Get Texture Format
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class MayaLegacyConvertor(ProductConvertorPlugin,
|
|||
).format(product_type))
|
||||
continue
|
||||
|
||||
creator_id = product_type_to_id[family]
|
||||
creator_id = product_type_to_id[product_type]
|
||||
creator = self.create_context.creators[creator_id]
|
||||
data["creator_identifier"] = creator_id
|
||||
|
||||
|
|
|
|||
|
|
@ -20,13 +20,6 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator):
|
|||
# Defined in settings
|
||||
joint_hints = set()
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply project settings to creator"""
|
||||
settings = (
|
||||
project_settings["maya"]["create"]["CreateUnrealSkeletalMesh"]
|
||||
)
|
||||
self.joint_hints = set(settings.get("joint_hints", []))
|
||||
|
||||
def get_dynamic_data(
|
||||
self,
|
||||
project_name,
|
||||
|
|
|
|||
|
|
@ -15,11 +15,6 @@ class CreateUnrealStaticMesh(plugin.MayaCreator):
|
|||
# Defined in settings
|
||||
collision_prefixes = []
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply project settings to creator"""
|
||||
settings = project_settings["maya"]["create"]["CreateUnrealStaticMesh"]
|
||||
self.collision_prefixes = settings["collision_prefixes"]
|
||||
|
||||
def get_dynamic_data(
|
||||
self,
|
||||
project_name,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_core.hosts.maya.api import (
|
|||
from ayon_core.lib import NumberDef
|
||||
|
||||
|
||||
class CreateYetiCache(plugin.MayaCreator):
|
||||
class CreateUnrealYetiCache(plugin.MayaCreator):
|
||||
"""Output for procedural plugin nodes of Yeti """
|
||||
|
||||
identifier = "io.openpype.creators.maya.unrealyeticache"
|
||||
|
|
|
|||
|
|
@ -314,7 +314,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
if not extend_frames:
|
||||
instance.data["overrideExistingFrame"] = False
|
||||
|
||||
# Update the instace
|
||||
# Update the instance
|
||||
instance.data.update(data)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -36,18 +36,18 @@ class ValidateSubsetName(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
if not isinstance(product_name, six.string_types):
|
||||
raise TypeError((
|
||||
raise PublishValidationError((
|
||||
"Instance product name must be string, got: {0} ({1})"
|
||||
).format(product_name, type(product_name)))
|
||||
|
||||
# Ensure is not empty product
|
||||
if not product_name:
|
||||
raise ValueError(
|
||||
raise PublishValidationError(
|
||||
"Instance product name is empty: {0}".format(product_name)
|
||||
)
|
||||
|
||||
# Validate product characters
|
||||
if not validate_name(product_name):
|
||||
raise ValueError((
|
||||
raise PublishValidationError((
|
||||
"Instance product name contains invalid characters: {0}"
|
||||
).format(product_name))
|
||||
|
|
|
|||
|
|
@ -51,5 +51,5 @@ class ValidateMeshEmpty(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Meshes found in instance without any vertices: %s" % invalid
|
||||
"Meshes found without any vertices: %s" % invalid
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,11 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import ValidateMeshOrder, OptionalPyblishPluginMixin
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateMeshOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin,
|
||||
|
|
@ -20,6 +24,16 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin,
|
|||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
description = (
|
||||
"## Meshes with Lamina Faces\n"
|
||||
"Detected meshes with lamina faces. <b>Lamina faces</b> are faces "
|
||||
"that share all of their edges and thus are merged together on top of "
|
||||
"each other.\n\n"
|
||||
"### How to repair?\n"
|
||||
"You can repair them by using Maya's modeling tool `Mesh > Cleanup..` "
|
||||
"and select to cleanup matching polygons for lamina faces."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
meshes = cmds.ls(instance, type='mesh', long=True)
|
||||
|
|
@ -36,5 +50,6 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin,
|
|||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise ValueError("Meshes found with lamina faces: "
|
||||
"{0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Meshes found with lamina faces: {0}".format(invalid),
|
||||
description=self.description)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import ayon_core.hosts.maya.api.action
|
|||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -27,6 +28,15 @@ class ValidateMeshNgons(pyblish.api.Validator,
|
|||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
description = (
|
||||
"## Meshes with NGONs Faces\n"
|
||||
"Detected meshes with NGON faces. **NGONS** are faces that "
|
||||
"with more than four sides.\n\n"
|
||||
"### How to repair?\n"
|
||||
"You can repair them by usings Maya's modeling tool Mesh > Cleanup.. "
|
||||
"and select to cleanup matching polygons for lamina faces."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
|
|
@ -49,5 +59,6 @@ class ValidateMeshNgons(pyblish.api.Validator,
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Meshes found with n-gons"
|
||||
"values: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Meshes found with n-gons: {0}".format(invalid),
|
||||
description=self.description)
|
||||
|
|
|
|||
|
|
@ -107,8 +107,9 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin,
|
|||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError("Shapes found with invalid shader "
|
||||
"connections: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Shapes found with invalid shader connections: "
|
||||
"{0}".format(invalid))
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ from ayon_core.hosts.maya.api import lib
|
|||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -66,7 +67,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin,
|
|||
if allowed:
|
||||
self.log.warning(message)
|
||||
else:
|
||||
raise ValueError(message)
|
||||
raise PublishValidationError(message)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import ayon_core.hosts.maya.api.action
|
|||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -55,8 +56,8 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin,
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Meshes found without 'map1' "
|
||||
"UV set: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Meshes found without 'map1' UV set: {0}".format(invalid))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -5,10 +5,12 @@ import pyblish.api
|
|||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
class ValidateNodeNoGhosting(pyblish.api.InstancePlugin.
|
||||
|
||||
|
||||
class ValidateNodeNoGhosting(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure nodes do not have ghosting enabled.
|
||||
|
||||
|
|
@ -55,5 +57,5 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin.
|
|||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise ValueError("Nodes with ghosting enabled found: "
|
||||
"{0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Nodes with ghosting enabled found: {0}".format(invalid))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateSetdressRoot(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,4 +23,6 @@ class ValidateSetdressRoot(pyblish.api.InstancePlugin):
|
|||
root = cmds.ls(set_member, assemblies=True, long=True)
|
||||
|
||||
if not root or root[0] not in set_member:
|
||||
raise Exception("Setdress top root node is not being published.")
|
||||
raise PublishValidationError(
|
||||
"Setdress top root node is not being published."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ import ayon_core.hosts.maya.api.action
|
|||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
RepairAction,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -84,8 +85,8 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin,
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Incorrectly named shapes "
|
||||
"found: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Incorrectly named shapes found: {0}".format(invalid))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateSingleAssembly(pyblish.api.InstancePlugin):
|
||||
|
|
@ -30,7 +33,11 @@ class ValidateSingleAssembly(pyblish.api.InstancePlugin):
|
|||
# ensure unique (somehow `maya.cmds.ls` doesn't manage that)
|
||||
assemblies = set(assemblies)
|
||||
|
||||
assert len(assemblies) > 0, (
|
||||
"One assembly required for: %s (currently empty?)" % instance)
|
||||
assert len(assemblies) < 2, (
|
||||
'Multiple assemblies found: %s' % assemblies)
|
||||
if len(assemblies) == 0:
|
||||
raise PublishValidationError(
|
||||
"One assembly required for: %s (currently empty?)" % instance
|
||||
)
|
||||
elif len(assemblies) > 1:
|
||||
raise PublishValidationError(
|
||||
'Multiple assemblies found: %s' % assemblies
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,11 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder,OptionalPyblishPluginMixin
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin,
|
||||
|
|
@ -30,8 +34,10 @@ class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin,
|
|||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise ValueError("Invalid skinCluster relationships "
|
||||
"found on meshes: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Invalid skinCluster relationships found on meshes: {0}"
|
||||
.format(invalid)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -4,9 +4,11 @@ import pyblish.api
|
|||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateUniqueNames(pyblish.api.Validator,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""transform names should be unique
|
||||
|
|
@ -40,5 +42,5 @@ class ValidateUniqueNames(pyblish.api.Validator,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Nodes found with none unique names. "
|
||||
"values: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
"Nodes found with non-unique names:\n{0}".format(invalid))
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import pyblish.api
|
|||
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateMeshOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
import ayon_core.hosts.maya.api.action
|
||||
|
||||
|
|
@ -26,8 +27,8 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin,
|
|||
invalid = []
|
||||
meshes = cmds.ls(instance, type="mesh", long=True)
|
||||
for mesh in meshes:
|
||||
faces = cmds.polyEvaluate(mesh, f=True)
|
||||
tris = cmds.polyEvaluate(mesh, t=True)
|
||||
faces = cmds.polyEvaluate(mesh, face=True)
|
||||
tris = cmds.polyEvaluate(mesh, triangle=True)
|
||||
if faces != tris:
|
||||
invalid.append(mesh)
|
||||
|
||||
|
|
@ -37,5 +38,5 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
assert len(invalid) == 0, (
|
||||
"Found meshes without triangles")
|
||||
if invalid:
|
||||
raise PublishValidationError("Found meshes without triangles")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ import pyblish.api
|
|||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
RepairAction,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -26,9 +27,10 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin,
|
|||
if not self.is_active(context.data):
|
||||
return
|
||||
|
||||
assert cmds.upAxis(q=True, axis=True) == "z", (
|
||||
"Invalid axis set as up axis"
|
||||
)
|
||||
if cmds.upAxis(q=True, axis=True) != "z":
|
||||
raise PublishValidationError(
|
||||
"Invalid axis set as up axis"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -34,8 +34,9 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin,
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
start, end = self.get_frame_range(instance)
|
||||
raise PublishValidationError("No visible nodes found in "
|
||||
"frame range {}-{}.".format(start, end))
|
||||
raise PublishValidationError(
|
||||
f"No visible nodes found in frame range {start}-{end}."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from maya import cmds
|
|||
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.pipeline.publish import (
|
||||
KnownPublishError,
|
||||
PublishValidationError,
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
|
|
@ -35,11 +36,14 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
if instance.data.get("renderer") != "vray":
|
||||
# If not V-Ray ignore..
|
||||
# If not V-Ray, ignore
|
||||
return
|
||||
|
||||
vray_settings = cmds.ls("vraySettings", type="VRaySettingsNode")
|
||||
assert vray_settings, "Please ensure a VRay Settings Node is present"
|
||||
if not vray_settings:
|
||||
raise KnownPublishError(
|
||||
"Please ensure a VRay Settings Node is present"
|
||||
)
|
||||
|
||||
renderlayer = instance.data['renderlayer']
|
||||
|
||||
|
|
@ -51,8 +55,8 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin,
|
|||
# during batch mode we invalidate the instance
|
||||
if not lib.get_attr_in_layer(self.ignored_attr, layer=renderlayer):
|
||||
raise PublishValidationError(
|
||||
("Renderlayer has distributed rendering enabled "
|
||||
"but is not set to ignore in batch mode."))
|
||||
"Renderlayer has distributed rendering enabled "
|
||||
"but is not set to ignore in batch mode.")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -6,9 +6,11 @@ from maya import cmds
|
|||
|
||||
from ayon_core.pipeline.publish import (
|
||||
RepairContextAction,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate whether the V-Ray Render Elements (AOVs) include references.
|
||||
|
|
@ -60,7 +62,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin,
|
|||
self.log.error((
|
||||
"'Use referenced' not enabled in Vray Render Settings."
|
||||
))
|
||||
raise AssertionError("Invalid render settings")
|
||||
raise PublishValidationError("Invalid render settings")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import KnownPublishError
|
||||
from ayon_core.pipeline.publish import OptionalPyblishPluginMixin
|
||||
from ayon_core.pipeline.publish import (
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateVrayProxy(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
|
|
@ -17,18 +20,18 @@ class ValidateVrayProxy(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(data):
|
||||
return
|
||||
if not data["setMembers"]:
|
||||
raise KnownPublishError(
|
||||
"'%s' is empty! This is a bug" % instance.name
|
||||
raise PublishValidationError(
|
||||
f"Instance '{instance.name}' is empty."
|
||||
)
|
||||
|
||||
if data["animation"]:
|
||||
if data["frameEnd"] < data["frameStart"]:
|
||||
raise KnownPublishError(
|
||||
raise PublishValidationError(
|
||||
"End frame is smaller than start frame"
|
||||
)
|
||||
|
||||
if not data["vrmesh"] and not data["alembic"]:
|
||||
raise KnownPublishError(
|
||||
raise PublishValidationError(
|
||||
"Both vrmesh and alembic are off. Needs at least one to"
|
||||
" publish."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ValidateXgen(pyblish.api.InstancePlugin):
|
|||
" Node type found: {}".format(node_type)
|
||||
)
|
||||
|
||||
# Cant have inactive modifiers in collection cause Xgen will try and
|
||||
# Can't have inactive modifiers in collection cause Xgen will try and
|
||||
# look for them when loading.
|
||||
palette = instance.data["xgmPalette"].replace("|", "")
|
||||
inactive_modifiers = {}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,11 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Check if the render script callbacks will be used during the rendering
|
||||
|
|
@ -45,8 +47,8 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Invalid render callbacks found for '%s'!"
|
||||
% instance.name)
|
||||
raise PublishValidationError(
|
||||
f"Invalid render callbacks found for '{instance.name}'.")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import inspect
|
||||
|
||||
import pyblish.api
|
||||
import maya.cmds as cmds
|
||||
import ayon_core.hosts.maya.api.action
|
||||
|
|
@ -8,7 +10,6 @@ from ayon_core.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
|
||||
class ValidateYetiRigCacheState(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate the I/O attributes of the node
|
||||
|
|
@ -32,7 +33,10 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin,
|
|||
return
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Nodes have incorrect I/O settings")
|
||||
raise PublishValidationError(
|
||||
"Nodes have incorrect I/O settings",
|
||||
description=inspect.getdoc(self)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -814,7 +814,7 @@ def on_script_load():
|
|||
|
||||
def check_inventory_versions():
|
||||
"""
|
||||
Actual version idetifier of Loaded containers
|
||||
Actual version identifier of Loaded containers
|
||||
|
||||
Any time this function is run it will check all nodes and filter only
|
||||
Loader nodes for its version. It will get all versions from database
|
||||
|
|
@ -921,7 +921,7 @@ def writes_version_sync():
|
|||
|
||||
for each in nuke.allNodes(filter="Write"):
|
||||
# check if the node is avalon tracked
|
||||
if _NODE_TAB_NAME not in each.knobs():
|
||||
if NODE_TAB_NAME not in each.knobs():
|
||||
continue
|
||||
|
||||
avalon_knob_data = read_avalon_data(each)
|
||||
|
|
@ -2381,7 +2381,7 @@ def launch_workfiles_app():
|
|||
|
||||
Context.workfiles_launched = True
|
||||
|
||||
# get all imortant settings
|
||||
# get all important settings
|
||||
open_at_start = env_value_to_bool(
|
||||
env_key="AYON_WORKFILE_TOOL_ON_START",
|
||||
default=None)
|
||||
|
|
|
|||
|
|
@ -910,7 +910,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
self._connect_to_above_nodes(
|
||||
node, product_name, "Reposition node... `{}`"
|
||||
)
|
||||
# append reformated tag
|
||||
# append reformatted tag
|
||||
add_tags.append("reformated")
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
|
|
@ -1114,7 +1114,7 @@ def convert_to_valid_instaces():
|
|||
transfer_data["active"] = (
|
||||
node["publish"].value())
|
||||
|
||||
# add idetifier
|
||||
# add identifier
|
||||
transfer_data["creator_identifier"] = product_type_to_identifier(
|
||||
product_type
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader):
|
|||
This loader will be triggered multiple times, but selected name will
|
||||
match only to proper path.
|
||||
|
||||
Loader doesnt do containerization as there is currently no data model
|
||||
Loader doesn't do containerization as there is currently no data model
|
||||
of 'frame of rendered files' (only rendered sequence), update would be
|
||||
difficult.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -925,7 +925,7 @@ def get_reformated_path(path, padded=False, first=False):
|
|||
path (str): path url or simple file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True, first=False):
|
|||
path (str): path url or simple file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ class CreateShotClip(plugin.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Source resolution",
|
||||
"target": "tag",
|
||||
"toolTip": "Is resloution taken from timeline or source?", # noqa
|
||||
"toolTip": "Is resolution taken from timeline or source?", # noqa
|
||||
"order": 4},
|
||||
}
|
||||
},
|
||||
|
|
@ -207,7 +207,7 @@ class CreateShotClip(plugin.Creator):
|
|||
presets = None
|
||||
|
||||
def process(self):
|
||||
# get key pares from presets and match it on ui inputs
|
||||
# get key pairs from presets and match it on ui inputs
|
||||
for k, v in self.gui_inputs.items():
|
||||
if v["type"] in ("dict", "section"):
|
||||
# nested dictionary (only one level allowed
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin,
|
|||
|
||||
optional = True
|
||||
# published data might be sequence (.mov, .mp4) in that counting files
|
||||
# doesnt make sense
|
||||
# doesn't make sense
|
||||
check_extensions = ["exr", "dpx", "jpg", "jpeg", "png", "tiff", "tga",
|
||||
"gif", "svg"]
|
||||
skip_timelines_check = [] # skip for specific task names (regex)
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class WindowCache:
|
|||
|
||||
@classmethod
|
||||
def _before_show(cls):
|
||||
"""Create QApplication if does not exists yet."""
|
||||
"""Create QApplication if does not exist yet."""
|
||||
if not cls._first_show:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -524,7 +524,7 @@ def get_ayon_appdirs(*args):
|
|||
def get_local_site_id():
|
||||
"""Get local site identifier.
|
||||
|
||||
Identifier is created if does not exists yet.
|
||||
Identifier is created if does not exist yet.
|
||||
"""
|
||||
# used for background syncing
|
||||
site_id = os.environ.get("AYON_SITE_ID")
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ class StringTemplate(object):
|
|||
""" Figure out with whole formatting.
|
||||
|
||||
Separate advanced keys (*Like '{project[name]}') from string which must
|
||||
be formatted separatelly in case of missing or incomplete keys in data.
|
||||
be formatted separately in case of missing or incomplete keys in data.
|
||||
|
||||
Args:
|
||||
data (dict): Containing keys to be filled into template.
|
||||
|
|
|
|||
|
|
@ -198,7 +198,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
priority = self.priority or instance.data.get("priority", 50)
|
||||
|
||||
# rr requires absolut path or all jobs won't show up in rControl
|
||||
# rr requires absolute path or all jobs won't show up in rrControl
|
||||
abs_metadata_path = self.anatomy.fill_root(rootless_metadata_path)
|
||||
|
||||
# command line set in E01__OpenPype__PublishJob.cfg, here only
|
||||
|
|
|
|||
|
|
@ -529,7 +529,7 @@ class AttributeValues(object):
|
|||
Has dictionary like methods. Not all of them are allowed all the time.
|
||||
|
||||
Args:
|
||||
attr_defs(AbstractAttrDef): Defintions of value type and properties.
|
||||
attr_defs(AbstractAttrDef): Definitions of value type and properties.
|
||||
values(dict): Values after possible conversion.
|
||||
origin_data(dict): Values loaded from host before conversion.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -347,7 +347,7 @@ class BaseCreator:
|
|||
|
||||
Returns:
|
||||
str: Group label that can be used for grouping of instances in UI.
|
||||
Group label can be overriden by instance itself.
|
||||
Group label can be overridden by instance itself.
|
||||
"""
|
||||
|
||||
if self._cached_group_label is None:
|
||||
|
|
@ -607,18 +607,19 @@ class Creator(BaseCreator):
|
|||
"""
|
||||
|
||||
# GUI Purposes
|
||||
# - default_variants may not be used if `get_default_variants` is overriden
|
||||
# - default_variants may not be used if `get_default_variants`
|
||||
# is overridden
|
||||
default_variants = []
|
||||
|
||||
# Default variant used in 'get_default_variant'
|
||||
_default_variant = None
|
||||
|
||||
# Short description of product type
|
||||
# - may not be used if `get_description` is overriden
|
||||
# - may not be used if `get_description` is overridden
|
||||
description = None
|
||||
|
||||
# Detailed description of product type for artists
|
||||
# - may not be used if `get_detail_description` is overriden
|
||||
# - may not be used if `get_detail_description` is overridden
|
||||
detailed_description = None
|
||||
|
||||
# It does make sense to change context on creation
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ def convert_to_padded_path(path, padding):
|
|||
padding (int): number of padding
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
convert_to_padded_path("plate.%d.exr") > plate.%04d.exr
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ class LoaderPlugin(list):
|
|||
def is_compatible_loader(cls, context):
|
||||
"""Return whether a loader is compatible with a context.
|
||||
|
||||
On override make sure it is overriden as class or static method.
|
||||
On override make sure it is overridden as class or static method.
|
||||
|
||||
This checks the product type and the representation for the given
|
||||
loader plugin.
|
||||
|
|
|
|||
|
|
@ -1865,7 +1865,7 @@ class PlaceholderCreateMixin(object):
|
|||
self.log.debug("Clean up of placeholder is not implemented.")
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
"""Can be overriden. Is called before instance is created."""
|
||||
"""Can be overridden. Is called before instance is created."""
|
||||
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
|||
# create duration
|
||||
duration = (timeline_out_h - timeline_in_h) + 1
|
||||
|
||||
# ffmpeg generate new file only if doesnt exists already
|
||||
# ffmpeg generate new file only if doesn't exists already
|
||||
if not recycling_file:
|
||||
# convert to seconds
|
||||
start_sec = float(timeline_in_h / fps)
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class ThumbnailsCache:
|
|||
"""
|
||||
|
||||
thumbnails_dir = self.get_thumbnails_dir()
|
||||
# Skip if thumbnails dir does not exists yet
|
||||
# Skip if thumbnails dir does not exist yet
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ def defer(delay, func):
|
|||
|
||||
This aids in keeping the GUI responsive, but complicates logic
|
||||
when producing tests. To combat this, the environment variable ensures
|
||||
that every operation is synchonous.
|
||||
that every operation is synchronous.
|
||||
|
||||
Arguments:
|
||||
delay (float): Delay multiplier; default 1, 0 means no delay
|
||||
|
|
|
|||
|
|
@ -424,7 +424,7 @@ class ExtractReviewOutputDefModel(BaseSettingsModel):
|
|||
title="Scale pixel aspect",
|
||||
description=(
|
||||
"Rescale input when it's pixel aspect ratio is not 1."
|
||||
" Usefull for anamorph reviews."
|
||||
" Useful for anamorphic reviews."
|
||||
)
|
||||
)
|
||||
bg_color: ColorRGBA_uint8 = SettingsField(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue