mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge branch 'develop' into feature/houdini_cleanup_after_publishing
This commit is contained in:
commit
f5964559e6
6 changed files with 112 additions and 82 deletions
|
|
@ -87,7 +87,9 @@ class IntegrateHeroVersion(
|
|||
]
|
||||
# QUESTION/TODO this process should happen on server if crashed due to
|
||||
# permissions error on files (files were used or user didn't have perms)
|
||||
# *but all other plugins must be sucessfully completed
|
||||
# *but all other plugins must be successfully completed
|
||||
|
||||
use_hardlinks = False
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
|
|
@ -617,24 +619,32 @@ class IntegrateHeroVersion(
|
|||
|
||||
self.log.debug("Folder already exists: \"{}\"".format(dirname))
|
||||
|
||||
if self.use_hardlinks:
|
||||
# First try hardlink and copy if paths are cross drive
|
||||
self.log.debug("Hardlinking file \"{}\" to \"{}\"".format(
|
||||
src_path, dst_path
|
||||
))
|
||||
try:
|
||||
create_hard_link(src_path, dst_path)
|
||||
# Return when successful
|
||||
return
|
||||
|
||||
except OSError as exc:
|
||||
# re-raise exception if different than
|
||||
# EXDEV - cross drive path
|
||||
# EINVAL - wrong format, must be NTFS
|
||||
self.log.debug(
|
||||
"Hardlink failed with errno:'{}'".format(exc.errno))
|
||||
if exc.errno not in [errno.EXDEV, errno.EINVAL]:
|
||||
raise
|
||||
|
||||
self.log.debug(
|
||||
"Hardlinking failed, falling back to regular copy...")
|
||||
|
||||
self.log.debug("Copying file \"{}\" to \"{}\"".format(
|
||||
src_path, dst_path
|
||||
))
|
||||
|
||||
# First try hardlink and copy if paths are cross drive
|
||||
try:
|
||||
create_hard_link(src_path, dst_path)
|
||||
# Return when successful
|
||||
return
|
||||
|
||||
except OSError as exc:
|
||||
# re-raise exception if different than
|
||||
# EXDEV - cross drive path
|
||||
# EINVAL - wrong format, must be NTFS
|
||||
self.log.debug("Hardlink failed with errno:'{}'".format(exc.errno))
|
||||
if exc.errno not in [errno.EXDEV, errno.EINVAL]:
|
||||
raise
|
||||
|
||||
shutil.copy(src_path, dst_path)
|
||||
|
||||
def version_from_representations(self, project_name, repres):
|
||||
|
|
|
|||
|
|
@ -743,6 +743,14 @@ class IntegrateHeroVersionModel(BaseSettingsModel):
|
|||
optional: bool = SettingsField(False, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
families: list[str] = SettingsField(default_factory=list, title="Families")
|
||||
use_hardlinks: bool = SettingsField(
|
||||
False, title="Use Hardlinks",
|
||||
description="When enabled first try to make a hardlink of the version "
|
||||
"instead of a copy. This helps reduce disk usage, but may "
|
||||
"create issues.\nFor example there are known issues on "
|
||||
"Windows being unable to delete any of the hardlinks if "
|
||||
"any of the links is in use creating issues with updating "
|
||||
"hero versions.")
|
||||
|
||||
|
||||
class CleanUpModel(BaseSettingsModel):
|
||||
|
|
@ -1136,7 +1144,8 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
"layout",
|
||||
"mayaScene",
|
||||
"simpleUnrealTexture"
|
||||
]
|
||||
],
|
||||
"use_hardlinks": False
|
||||
},
|
||||
"CleanUp": {
|
||||
"paterns": [],
|
||||
|
|
|
|||
|
|
@ -153,8 +153,8 @@ class FusionSubmitDeadlineModel(BaseSettingsModel):
|
|||
)
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
plugin: str = SettingsField("Fusion",
|
||||
enum_resolver=fusion_deadline_plugin_enum,
|
||||
title="Deadline Plugin")
|
||||
enum_resolver=fusion_deadline_plugin_enum,
|
||||
title="Deadline Plugin")
|
||||
|
||||
|
||||
class NukeSubmitDeadlineModel(BaseSettingsModel):
|
||||
|
|
@ -375,11 +375,11 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
title="Nuke Submit to deadline")
|
||||
ProcessSubmittedCacheJobOnFarm: ProcessCacheJobFarmModel = SettingsField(
|
||||
default_factory=ProcessCacheJobFarmModel,
|
||||
title="Process submitted cache Job on farm.",
|
||||
section="Publish Jobs")
|
||||
title="Process submitted cache Job on farm",
|
||||
section="Publish Jobs")
|
||||
ProcessSubmittedJobOnFarm: ProcessSubmittedJobOnFarmModel = SettingsField(
|
||||
default_factory=ProcessSubmittedJobOnFarmModel,
|
||||
title="Process submitted job on farm.")
|
||||
title="Process submitted job on farm")
|
||||
|
||||
|
||||
DEFAULT_DEADLINE_PLUGINS_SETTINGS = {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
import tempfile
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import OptionalPyblishPluginMixin
|
||||
from ayon_houdini.api import lib, plugin
|
||||
from ayon_houdini.api.pipeline import IS_HEADLESS
|
||||
|
||||
|
||||
class ExtractActiveViewThumbnail(plugin.HoudiniExtractorPlugin):
|
||||
class ExtractActiveViewThumbnail(plugin.HoudiniExtractorPlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Set instance thumbnail to a screengrab of current active viewport.
|
||||
|
||||
This makes it so that if an instance does not have a thumbnail set yet that
|
||||
|
|
@ -17,6 +20,9 @@ class ExtractActiveViewThumbnail(plugin.HoudiniExtractorPlugin):
|
|||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if IS_HEADLESS:
|
||||
self.log.debug(
|
||||
"Skip extraction of active view thumbnail, due to being in"
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ class AOVFilterSubmodel(BaseSettingsModel):
|
|||
title="AOV regex"
|
||||
)
|
||||
|
||||
|
||||
class CollectLocalRenderInstancesModel(BaseSettingsModel):
|
||||
|
||||
use_deadline_aov_filter: bool = SettingsField(
|
||||
|
|
@ -90,7 +91,7 @@ class ValidateWorkfilePathsModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class BasicValidateModel(BaseSettingsModel):
|
||||
class BasicEnabledStatesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
|
@ -99,12 +100,12 @@ class BasicValidateModel(BaseSettingsModel):
|
|||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectAssetHandles: CollectAssetHandlesModel = SettingsField(
|
||||
default_factory=CollectAssetHandlesModel,
|
||||
title="Collect Asset Handles.",
|
||||
title="Collect Asset Handles",
|
||||
section="Collectors"
|
||||
)
|
||||
CollectChunkSize: CollectChunkSizeModel = SettingsField(
|
||||
default_factory=CollectChunkSizeModel,
|
||||
title="Collect Chunk Size."
|
||||
title="Collect Chunk Size"
|
||||
)
|
||||
CollectFilesForCleaningUp:CollectFilesForCleaningUpModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
|
|
@ -112,27 +113,32 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
)
|
||||
CollectLocalRenderInstances: CollectLocalRenderInstancesModel = SettingsField(
|
||||
default_factory=CollectLocalRenderInstancesModel,
|
||||
title="Collect Local Render Instances."
|
||||
title="Collect Local Render Instances"
|
||||
)
|
||||
ValidateInstanceInContextHoudini: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Instance is in same Context.",
|
||||
ValidateInstanceInContextHoudini: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate Instance is in same Context",
|
||||
section="Validators")
|
||||
ValidateMeshIsStatic: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Mesh is Static.")
|
||||
ValidateReviewColorspace: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Review Colorspace.")
|
||||
ValidateSubsetName: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Subset Name.")
|
||||
ValidateUnrealStaticMeshName: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Unreal Static Mesh Name.")
|
||||
ValidateMeshIsStatic: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate Mesh is Static")
|
||||
ValidateReviewColorspace: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate Review Colorspace")
|
||||
ValidateSubsetName: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate Subset Name")
|
||||
ValidateUnrealStaticMeshName: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate Unreal Static Mesh Name")
|
||||
ValidateWorkfilePaths: ValidateWorkfilePathsModel = SettingsField(
|
||||
default_factory=ValidateWorkfilePathsModel,
|
||||
title="Validate workfile paths settings.")
|
||||
title="Validate workfile paths settings")
|
||||
ExtractActiveViewThumbnail: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Extract Active View Thumbnail",
|
||||
section="Extractors"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
|
||||
|
|
@ -152,7 +158,7 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
|
|||
},
|
||||
"CollectLocalRenderInstances": {
|
||||
"use_deadline_aov_filter": False,
|
||||
"aov_filter" : {
|
||||
"aov_filter": {
|
||||
"host_name": "houdini",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
|
|
@ -195,5 +201,10 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
|
|||
"$HIP",
|
||||
"$JOB"
|
||||
]
|
||||
},
|
||||
"ExtractActiveViewThumbnail": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -561,7 +561,7 @@ def read_avalon_data(node):
|
|||
node (nuke.Node): Nuke node object
|
||||
|
||||
Returns:
|
||||
list: A list of nuke.Knob object
|
||||
Dict[str, nuke.Knob]: A dictionary of knob name to nuke.Knob objects
|
||||
|
||||
"""
|
||||
def compat_prefixed(knob_name):
|
||||
|
|
@ -613,7 +613,7 @@ def get_node_path(path, padding=4):
|
|||
path (str): The path to render to.
|
||||
|
||||
Returns:
|
||||
tuple: head, padding, tail (extension)
|
||||
Tuple[str, int, str]: head, padding, tail (extension)
|
||||
|
||||
Examples:
|
||||
>>> get_frame_path("test.exr")
|
||||
|
|
@ -655,8 +655,7 @@ def get_nuke_imageio_settings():
|
|||
|
||||
|
||||
def get_imageio_node_setting(node_class, plugin_name, product_name):
|
||||
''' Get preset data for dataflow (fileType, compression, bitDepth)
|
||||
'''
|
||||
"""Get preset data for dataflow (fileType, compression, bitDepth)"""
|
||||
imageio_nodes = get_nuke_imageio_settings()["nodes"]
|
||||
required_nodes = imageio_nodes["required_nodes"]
|
||||
|
||||
|
|
@ -686,8 +685,8 @@ def get_imageio_node_setting(node_class, plugin_name, product_name):
|
|||
def get_imageio_node_override_setting(
|
||||
node_class, plugin_name, product_name, knobs_settings
|
||||
):
|
||||
''' Get imageio node overrides from settings
|
||||
'''
|
||||
""" Get imageio node overrides from settings
|
||||
"""
|
||||
imageio_nodes = get_nuke_imageio_settings()["nodes"]
|
||||
override_nodes = imageio_nodes["override_nodes"]
|
||||
|
||||
|
|
@ -745,8 +744,7 @@ def get_imageio_node_override_setting(
|
|||
|
||||
|
||||
def get_imageio_input_colorspace(filename):
|
||||
''' Get input file colorspace based on regex in settings.
|
||||
'''
|
||||
"""Get input file colorspace based on regex in settings."""
|
||||
imageio_regex_inputs = (
|
||||
get_nuke_imageio_settings()["regex_inputs"]["inputs"])
|
||||
|
||||
|
|
@ -791,8 +789,7 @@ def get_view_process_node():
|
|||
|
||||
|
||||
def on_script_load():
|
||||
''' Callback for ffmpeg support
|
||||
'''
|
||||
"""Callback for ffmpeg support"""
|
||||
if nuke.env["LINUX"]:
|
||||
nuke.tcl('load ffmpegReader')
|
||||
nuke.tcl('load ffmpegWriter')
|
||||
|
|
@ -815,7 +812,7 @@ def check_inventory_versions():
|
|||
# get all Loader nodes by avalon attribute metadata
|
||||
node_with_repre_id = []
|
||||
repre_ids = set()
|
||||
# Find all containers and collect it's node and representation ids
|
||||
# Find all containers and collect its node and representation ids
|
||||
for node in nuke.allNodes():
|
||||
container = parse_container(node)
|
||||
|
||||
|
|
@ -896,8 +893,7 @@ def check_inventory_versions():
|
|||
|
||||
|
||||
def writes_version_sync():
|
||||
''' Callback synchronizing version of publishable write nodes
|
||||
'''
|
||||
"""Callback synchronizing version of publishable write nodes"""
|
||||
try:
|
||||
rootVersion = get_version_from_path(nuke.root().name())
|
||||
padding = len(rootVersion)
|
||||
|
|
@ -934,8 +930,7 @@ def writes_version_sync():
|
|||
|
||||
|
||||
def version_up_script():
|
||||
''' Raising working script's version
|
||||
'''
|
||||
"""Raising working script's version"""
|
||||
import nukescripts
|
||||
nukescripts.script_and_write_nodes_version_up()
|
||||
|
||||
|
|
@ -957,14 +952,14 @@ def check_product_name_exists(nodes, product_name):
|
|||
|
||||
|
||||
def format_anatomy(data):
|
||||
''' Helping function for formatting of anatomy paths
|
||||
"""Helping function for formatting of anatomy paths
|
||||
|
||||
Arguments:
|
||||
data (dict): dictionary with attributes used for formatting
|
||||
|
||||
Return:
|
||||
path (str)
|
||||
'''
|
||||
str: Formatted path.
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
anatomy = Anatomy(project_name)
|
||||
|
|
@ -996,9 +991,8 @@ def format_anatomy(data):
|
|||
return anatomy.format(data)
|
||||
|
||||
|
||||
def script_name():
|
||||
''' Returns nuke script path
|
||||
'''
|
||||
def script_name() -> str:
|
||||
"""Returns nuke script path"""
|
||||
return nuke.root().knob("name").value()
|
||||
|
||||
|
||||
|
|
@ -1100,7 +1094,7 @@ def create_write_node(
|
|||
linked_knobs=None,
|
||||
**kwargs
|
||||
):
|
||||
''' Creating write node which is group node
|
||||
"""Creating write node which is group node
|
||||
|
||||
Arguments:
|
||||
name (str): name of node
|
||||
|
|
@ -1134,8 +1128,8 @@ def create_write_node(
|
|||
|
||||
|
||||
Return:
|
||||
node (obj): group node with avalon data as Knobs
|
||||
'''
|
||||
node (nuke.Node): group node with avalon data as Knobs
|
||||
"""
|
||||
# Ensure name does not contain any invalid characters.
|
||||
special_chars = re.escape("!@#$%^&*()=[]{}|\\;',.<>/?~+-")
|
||||
special_chars_regex = re.compile(f"[{special_chars}]")
|
||||
|
|
@ -1300,7 +1294,7 @@ def create_write_node(
|
|||
|
||||
|
||||
def set_node_knobs_from_settings(node, knob_settings, **kwargs):
|
||||
""" Overriding knob values from settings
|
||||
"""Overriding knob values from settings
|
||||
|
||||
Using `schema_nuke_knob_inputs` for knob type definitions.
|
||||
|
||||
|
|
@ -1393,8 +1387,7 @@ def color_gui_to_int(color_gui):
|
|||
|
||||
def create_backdrop(label="", color=None, layer=0,
|
||||
nodes=None):
|
||||
"""
|
||||
Create Backdrop node
|
||||
"""Create Backdrop node
|
||||
|
||||
Arguments:
|
||||
color (str): nuke compatible string with color code
|
||||
|
|
@ -1402,6 +1395,9 @@ def create_backdrop(label="", color=None, layer=0,
|
|||
label (str): the message
|
||||
nodes (list): list of nodes to be wrapped into backdrop
|
||||
|
||||
Returns:
|
||||
nuke.Node: The created backdrop node.
|
||||
|
||||
"""
|
||||
assert isinstance(nodes, list), "`nodes` should be a list of nodes"
|
||||
|
||||
|
|
@ -1491,12 +1487,12 @@ class WorkfileSettings(object):
|
|||
return [n for n in self._nodes if filter in n.Class()]
|
||||
|
||||
def set_viewers_colorspace(self, imageio_nuke):
|
||||
''' Adds correct colorspace to viewer
|
||||
"""Adds correct colorspace to viewer
|
||||
|
||||
Arguments:
|
||||
imageio_nuke (dict): nuke colorspace configurations
|
||||
|
||||
'''
|
||||
"""
|
||||
filter_knobs = [
|
||||
"viewerProcess",
|
||||
"wipe_position",
|
||||
|
|
@ -1560,12 +1556,12 @@ class WorkfileSettings(object):
|
|||
return StringTemplate(display_view).format_strict(self.formatting_data)
|
||||
|
||||
def set_root_colorspace(self, imageio_host):
|
||||
''' Adds correct colorspace to root
|
||||
"""Adds correct colorspace to root
|
||||
|
||||
Arguments:
|
||||
imageio_host (dict): host colorspace configurations
|
||||
|
||||
'''
|
||||
"""
|
||||
config_data = get_current_context_imageio_config_preset()
|
||||
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
|
|
@ -1819,9 +1815,8 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
return new_path
|
||||
|
||||
def set_writes_colorspace(self):
|
||||
''' Adds correct colorspace to write node dict
|
||||
|
||||
'''
|
||||
""" Adds correct colorspace to write node dict
|
||||
"""
|
||||
for node in nuke.allNodes(filter="Group", group=self._root_node):
|
||||
log.info("Setting colorspace to `{}`".format(node.name()))
|
||||
|
||||
|
|
@ -1943,8 +1938,8 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
knobs["to"]))
|
||||
|
||||
def set_colorspace(self):
|
||||
''' Setting colorspace following presets
|
||||
'''
|
||||
""" Setting colorspace following presets
|
||||
"""
|
||||
# get imageio
|
||||
nuke_colorspace = get_nuke_imageio_settings()
|
||||
|
||||
|
|
@ -2152,9 +2147,8 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
|
||||
|
||||
def get_write_node_template_attr(node):
|
||||
''' Gets all defined data from presets
|
||||
|
||||
'''
|
||||
""" Gets all defined data from presets
|
||||
"""
|
||||
|
||||
# TODO: add identifiers to settings and rename settings key
|
||||
plugin_names_mapping = {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue