mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #5186 from tokejepsen/enhancement/OP-5265_Use-custom-staging-dir-function-for-Maya-renders
This commit is contained in:
commit
75e265198b
10 changed files with 201 additions and 31 deletions
|
|
@ -157,10 +157,10 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
|
||||
# append full path
|
||||
aov_dict = {}
|
||||
default_render_folder = context.data.get("project_settings")\
|
||||
.get("maya")\
|
||||
.get("RenderSettings")\
|
||||
.get("default_render_image_folder") or ""
|
||||
image_directory = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
publish_meta_path = None
|
||||
|
|
@ -168,8 +168,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
full_paths = []
|
||||
aov_first_key = list(aov.keys())[0]
|
||||
for file in aov[aov_first_key]:
|
||||
full_path = os.path.join(workspace, default_render_folder,
|
||||
file)
|
||||
full_path = os.path.join(image_directory, file)
|
||||
full_path = full_path.replace("\\", "/")
|
||||
full_paths.append(full_path)
|
||||
publish_meta_path = os.path.dirname(full_path)
|
||||
|
|
|
|||
|
|
@ -107,7 +107,8 @@ class ExtractAlembic(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
if not instance.data.get("stagingDir_persistent", False):
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
|
|
|
|||
|
|
@ -80,7 +80,8 @@ class ExtractProxyAlembic(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
if not instance.data.get("stagingDir_persistent", False):
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
# remove the bounding box
|
||||
|
|
|
|||
|
|
@ -92,7 +92,6 @@ class ExtractThumbnail(publish.Extractor):
|
|||
"Create temp directory {} for thumbnail".format(dst_staging)
|
||||
)
|
||||
# Store new staging to cleanup paths
|
||||
instance.context.data["cleanupFullPaths"].append(dst_staging)
|
||||
filename = "{0}".format(instance.name)
|
||||
path = os.path.join(dst_staging, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline.publish import (
|
||||
|
|
@ -22,8 +25,12 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
required_images_rule = self.get_default_render_image_folder(instance)
|
||||
current_images_rule = cmds.workspace(fileRuleEntry="images")
|
||||
required_images_rule = os.path.normpath(
|
||||
self.get_default_render_image_folder(instance)
|
||||
)
|
||||
current_images_rule = os.path.normpath(
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
|
||||
if current_images_rule != required_images_rule:
|
||||
raise PublishValidationError(
|
||||
|
|
@ -42,8 +49,17 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin):
|
|||
cmds.workspace(fileRule=("images", required_images_rule))
|
||||
cmds.workspace(saveWorkspace=True)
|
||||
|
||||
@staticmethod
|
||||
def get_default_render_image_folder(instance):
|
||||
@classmethod
|
||||
def get_default_render_image_folder(cls, instance):
|
||||
staging_dir = instance.data.get("stagingDir")
|
||||
if staging_dir:
|
||||
cls.log.debug(
|
||||
"Staging dir found: \"{}\". Ignoring setting from "
|
||||
"`project_settings/maya/RenderSettings/"
|
||||
"default_render_image_folder`.".format(staging_dir)
|
||||
)
|
||||
return staging_dir
|
||||
|
||||
return instance.context.data.get('project_settings')\
|
||||
.get('maya') \
|
||||
.get('RenderSettings') \
|
||||
|
|
|
|||
|
|
@ -290,7 +290,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
def process_submission(self):
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
filepath = self.scene_path # publish if `use_publish` else workfile
|
||||
|
||||
|
|
@ -306,13 +305,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
self._patch_workfile()
|
||||
|
||||
# Gather needed data ------------------------------------------------
|
||||
workspace = context.data["workspaceDir"]
|
||||
default_render_file = instance.context.data.get('project_settings')\
|
||||
.get('maya')\
|
||||
.get('RenderSettings')\
|
||||
.get('default_render_image_folder')
|
||||
filename = os.path.basename(filepath)
|
||||
dirname = os.path.join(workspace, default_render_file)
|
||||
dirname = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
|
||||
# Fill in common data to payload ------------------------------------
|
||||
# TODO: Replace these with collected data from CollectRender
|
||||
|
|
|
|||
|
|
@ -345,6 +345,151 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
asset = data.get("asset") or context.data["asset"]
|
||||
subset = data.get("subset")
|
||||
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
subset,
|
||||
start,
|
||||
end,
|
||||
data["overrideExistingFrame"])
|
||||
|
||||
try:
|
||||
source = data["source"]
|
||||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
|
||||
success, rootless_path = (
|
||||
self.anatomy.find_root_template_from_path(source)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues."
|
||||
).format(source))
|
||||
|
||||
family = "render"
|
||||
if ("prerender" in instance.data["families"] or
|
||||
"prerender.farm" in instance.data["families"]):
|
||||
family = "prerender"
|
||||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
do_not_add_review = False
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
elif data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": subset,
|
||||
"families": families,
|
||||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": start - handle_start,
|
||||
"frameEndHandle": end + handle_end,
|
||||
"comment": instance.data["comment"],
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
"pixelAspect": data.get("pixelAspect", 1),
|
||||
"resolutionWidth": data.get("resolutionWidth", 1920),
|
||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||
"multipartExr": data.get("multipartExr", False),
|
||||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", []))),
|
||||
"colorspace": instance.data.get("colorspace"),
|
||||
"stagingDir_persistent": instance.data.get(
|
||||
"stagingDir_persistent", False
|
||||
)
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
instance_skeleton_data["families"] += [item]
|
||||
|
||||
# transfer specific properties from original instance based on
|
||||
# mapping dictionary `instance_transfer`
|
||||
for key, values in self.instance_transfer.items():
|
||||
if key in instance.data.get("families", []):
|
||||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for repre in instance.data.get("representations", []):
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if staging_dir:
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(
|
||||
staging_dir
|
||||
)
|
||||
)
|
||||
if success:
|
||||
repre["stagingDir"] = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging_dir))
|
||||
repre["stagingDir"] = staging_dir
|
||||
|
||||
if "publish_on_farm" in repre.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(repre)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
|
|
|
|||
|
|
@ -103,13 +103,16 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
|
||||
# stash render job id for later validation
|
||||
instance.data["render_job_id"] = data.get("job").get("_id")
|
||||
|
||||
staging_dir_persistent = instance.data.get(
|
||||
"stagingDir_persistent", False
|
||||
)
|
||||
representations = []
|
||||
for repre_data in instance_data.get("representations") or []:
|
||||
self._fill_staging_dir(repre_data, anatomy)
|
||||
representations.append(repre_data)
|
||||
|
||||
add_repre_files_for_cleanup(instance, repre_data)
|
||||
if not staging_dir_persistent:
|
||||
add_repre_files_for_cleanup(instance, repre_data)
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
||||
|
|
@ -124,6 +127,8 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
self.log.debug(
|
||||
f"Adding audio to instance: {instance.data['audio']}")
|
||||
|
||||
return staging_dir_persistent
|
||||
|
||||
def process(self, context):
|
||||
self._context = context
|
||||
|
||||
|
|
@ -160,9 +165,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
legacy_io.Session.update(session_data)
|
||||
os.environ.update(session_data)
|
||||
session_is_set = True
|
||||
self._process_path(data, anatomy)
|
||||
context.data["cleanupFullPaths"].append(path)
|
||||
context.data["cleanupEmptyDirs"].append(os.path.dirname(path))
|
||||
staging_dir_persistent = self._process_path(data, anatomy)
|
||||
if not staging_dir_persistent:
|
||||
context.data["cleanupFullPaths"].append(path)
|
||||
context.data["cleanupEmptyDirs"].append(
|
||||
os.path.dirname(path)
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.error(e, exc_info=True)
|
||||
raise Exception("Error") from e
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
{
|
||||
"type": "text",
|
||||
"key": "default_render_image_folder",
|
||||
"label": "Default render image folder"
|
||||
"label": "Default render image folder. This setting can be\noverwritten by custom staging directory profile;\n\"project_settings/global/tools/publish\n/custom_staging_dir_profiles\"."
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@ A profile may generate multiple outputs from a single input. Each output must de
|
|||
- Nuke extractor settings path: `project_settings/nuke/publish/ExtractReviewDataMov/outputs/baking/add_custom_tags`
|
||||
- Filtering by input length. Input may be video, sequence or single image. It is possible that `.mp4` should be created only when input is video or sequence and to create review `.png` when input is single frame. In some cases the output should be created even if it's single frame or multi frame input.
|
||||
|
||||
|
||||
|
||||
### Extract Burnin
|
||||
|
||||
Plugin is responsible for adding burnins into review representations.
|
||||
|
|
@ -226,13 +226,13 @@ A burnin profile may set multiple burnin outputs from one input. The burnin's na
|
|||
| **Bottom Centered** | Bottom center content. | str | "{username}" |
|
||||
| **Bottom Right** | Bottom right corner content. | str | "{frame_start}-{current_frame}-{frame_end}" |
|
||||
|
||||
Each burnin profile can be configured with additional family filtering and can
|
||||
add additional tags to the burnin representation, these can be configured under
|
||||
Each burnin profile can be configured with additional family filtering and can
|
||||
add additional tags to the burnin representation, these can be configured under
|
||||
the profile's **Additional filtering** section.
|
||||
|
||||
:::note Filename suffix
|
||||
The filename suffix is appended to filename of the source representation. For
|
||||
example, if the source representation has suffix **"h264"** and the burnin
|
||||
The filename suffix is appended to filename of the source representation. For
|
||||
example, if the source representation has suffix **"h264"** and the burnin
|
||||
suffix is **"client"** then the final suffix is **"h264_client"**.
|
||||
:::
|
||||
|
||||
|
|
@ -343,6 +343,10 @@ One of the key advantages of this feature is that it allows users to choose the
|
|||
|
||||
In some cases, these DCCs (Nuke, Houdini, Maya) automatically add a rendering path during the creation stage, which is then used in publishing. Creators and extractors of such DCCs need to use these profiles to fill paths in DCC's nodes to use this functionality.
|
||||
|
||||
:::note
|
||||
Maya's setting `project_settings/maya/RenderSettings/default_render_image_folder` is be overwritten by the custom staging dir.
|
||||
:::
|
||||
|
||||
The custom staging folder uses a path template configured in `project_anatomy/templates/others` with `transient` being a default example path that could be used. The template requires a 'folder' key for it to be usable as custom staging folder.
|
||||
|
||||
##### Known issues
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue