mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 13:52:15 +01:00
commit
f7968bb9d1
5 changed files with 326 additions and 8 deletions
|
|
@ -1,9 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import pprint
|
||||
import re
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
from avalon import api, io
|
||||
from avalon.vendor import requests, clique
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -23,6 +24,73 @@ def _get_script():
|
|||
return module_path
|
||||
|
||||
|
||||
# Logic to retrieve latest files concerning extendFrames
|
||||
def get_latest_version(asset_name, subset_name, family):
|
||||
# Get asset
|
||||
asset_name = io.find_one({"type": "asset",
|
||||
"name": asset_name},
|
||||
projection={"name": True})
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_name["_id"]},
|
||||
projection={"_id": True, "name": True})
|
||||
|
||||
# Check if subsets actually exists (pre-run check)
|
||||
assert subset, "No subsets found, please publish with `extendFrames` off"
|
||||
|
||||
# Get version
|
||||
version_projection = {"name": True,
|
||||
"data.startFrame": True,
|
||||
"data.endFrame": True,
|
||||
"parent": True}
|
||||
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"data.families": family},
|
||||
projection=version_projection,
|
||||
sort=[("name", -1)])
|
||||
|
||||
assert version, "No version found, this is a bug"
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def get_resources(version, extension=None):
|
||||
"""
|
||||
Get the files from the specific version
|
||||
"""
|
||||
query = {"type": "representation", "parent": version["_id"]}
|
||||
if extension:
|
||||
query["name"] = extension
|
||||
|
||||
representation = io.find_one(query)
|
||||
assert representation, "This is a bug"
|
||||
|
||||
directory = api.get_representation_path(representation)
|
||||
print("Source: ", directory)
|
||||
resources = sorted([os.path.normpath(os.path.join(directory, fname))
|
||||
for fname in os.listdir(directory)])
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
def get_resource_files(resources, frame_range, override=True):
|
||||
|
||||
res_collections, _ = clique.assemble(resources)
|
||||
assert len(res_collections) == 1, "Multiple collections found"
|
||||
res_collection = res_collections[0]
|
||||
|
||||
# Remove any frames
|
||||
if override:
|
||||
for frame in frame_range:
|
||||
if frame not in res_collection.indexes:
|
||||
continue
|
||||
res_collection.indexes.remove(frame)
|
||||
|
||||
return list(res_collection)
|
||||
|
||||
|
||||
class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit image sequence publish jobs to Deadline.
|
||||
|
||||
|
|
@ -69,8 +137,9 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
raise RuntimeError("Can't continue without valid deadline "
|
||||
"submission prior to this plug-in.")
|
||||
|
||||
subset = instance.data["subset"]
|
||||
state = instance.data.get("publishJobState", "Suspended")
|
||||
data = instance.data.copy()
|
||||
subset = data["subset"]
|
||||
state = data.get("publishJobState", "Suspended")
|
||||
job_name = "{batch} - {subset} [publish image sequence]".format(
|
||||
batch=job["Props"]["Name"],
|
||||
subset=subset
|
||||
|
|
@ -80,6 +149,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
context = instance.context
|
||||
start = instance.data.get("startFrame", context.data["startFrame"])
|
||||
end = instance.data.get("endFrame", context.data["endFrame"])
|
||||
resources = []
|
||||
|
||||
# Add in regex for sequence filename
|
||||
# This assumes the output files start with subset name and ends with
|
||||
|
|
@ -93,8 +163,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
ext=ext)
|
||||
|
||||
# Write metadata for publish job
|
||||
data = instance.data.copy()
|
||||
data.pop("deadlineSubmissionJob")
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
metadata = {
|
||||
"regex": regex,
|
||||
"startFrame": start,
|
||||
|
|
@ -114,6 +183,78 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
|
||||
family = "colorbleed.imagesequence"
|
||||
override = data["overrideExistingFrame"]
|
||||
|
||||
# override = data.get("overrideExistingFrame", False)
|
||||
out_file = render_job.get("OutFile")
|
||||
if not out_file:
|
||||
raise RuntimeError("OutFile not found in render job!")
|
||||
|
||||
extension = os.path.splitext(out_file[0])[1]
|
||||
_ext = extension[1:]
|
||||
|
||||
# Frame comparison
|
||||
prev_start = None
|
||||
prev_end = None
|
||||
resource_range = range(int(start), int(end)+1)
|
||||
|
||||
# Gather all the subset files (one subset per render pass!)
|
||||
subset_names = [data["subset"]]
|
||||
subset_names.extend(data.get("renderPasses", []))
|
||||
|
||||
for subset_name in subset_names:
|
||||
version = get_latest_version(asset_name=data["asset"],
|
||||
subset_name=subset_name,
|
||||
family=family)
|
||||
|
||||
# Set prev start / end frames for comparison
|
||||
if not prev_start and not prev_end:
|
||||
prev_start = version["data"]["startFrame"]
|
||||
prev_end = version["data"]["endFrame"]
|
||||
|
||||
subset_resources = get_resources(version, _ext)
|
||||
resource_files = get_resource_files(subset_resources,
|
||||
resource_range,
|
||||
override)
|
||||
|
||||
resources.extend(resource_files)
|
||||
|
||||
updated_start = min(start, prev_start)
|
||||
updated_end = max(end, prev_end)
|
||||
|
||||
# Update metadata and instance start / end frame
|
||||
self.log.info("Updating start / end frame : "
|
||||
"{} - {}".format(updated_start, updated_end))
|
||||
|
||||
# TODO : Improve logic to get new frame range for the
|
||||
# publish job (publish_filesequence.py)
|
||||
# The current approach is not following Pyblish logic which is based
|
||||
# on Collect / Validate / Extract.
|
||||
|
||||
# ---- Collect Plugins ---
|
||||
# Collect Extend Frames - Only run if extendFrames is toggled
|
||||
# # # Store in instance:
|
||||
# # # Previous rendered files per subset based on frames
|
||||
# # # --> Add to instance.data[resources]
|
||||
# # # Update publish frame range
|
||||
|
||||
# ---- Validate Plugins ---
|
||||
# Validate Extend Frames
|
||||
# # # Check if instance has the requirements to extend frames
|
||||
# There might have been some things which can be added to the list
|
||||
# Please do so when fixing this.
|
||||
|
||||
# Start frame
|
||||
metadata["startFrame"] = updated_start
|
||||
metadata["metadata"]["instance"]["startFrame"] = updated_start
|
||||
|
||||
# End frame
|
||||
metadata["endFrame"] = updated_end
|
||||
metadata["metadata"]["instance"]["endFrame"] = updated_end
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(subset)
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
with open(metadata_path, "w") as f:
|
||||
|
|
@ -159,3 +300,17 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Copy files from previous render if extendFrame is True
|
||||
if data.get("extendFrames", False):
|
||||
|
||||
self.log.info("Preparing to copy ..")
|
||||
import shutil
|
||||
|
||||
dest_path = data["outputDir"]
|
||||
for source in resources:
|
||||
src_file = os.path.basename(source)
|
||||
dest = os.path.join(dest_path, src_file)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
self.log.info("Finished copying %i files" % len(resources))
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
data = OrderedDict(**self.data)
|
||||
|
||||
data["suspendPublishJob"] = False
|
||||
data["extendFrames"] = False
|
||||
data["overrideExistingFrame"] = True
|
||||
data["includeDefaultRenderLayer"] = False
|
||||
data["useLegacyRenderLayers"] = True
|
||||
data["priority"] = 50
|
||||
|
|
|
|||
95
colorbleed/plugins/maya/publish/collect_render_layer_aovs.py
Normal file
95
colorbleed/plugins/maya/publish/collect_render_layer_aovs.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
||||
"""Validate all render layer's AOVs / Render Elements are registered in
|
||||
the database
|
||||
|
||||
This validator is important to be able to Extend Frames
|
||||
|
||||
Technical information:
|
||||
Each renderer uses different logic to work with render passes.
|
||||
VRay - RenderElement
|
||||
Simple node connection to the actual renderLayer node
|
||||
|
||||
Arnold - AOV:
|
||||
Uses its own render settings node and connects an aiOAV to it
|
||||
|
||||
Redshift - AOV:
|
||||
Uses its own render settings node and RedshiftAOV node. It is not
|
||||
connected but all AOVs are enabled for all render layers by default.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Render Elements / AOVs"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Check if Extend Frames is toggled
|
||||
if not instance.data("extendFrames", False):
|
||||
return
|
||||
|
||||
# Get renderer
|
||||
renderer = cmds.getAttr("defaultRenderGlobals.currentRenderer")
|
||||
|
||||
self.log.info("Renderer found: {}".format(renderer))
|
||||
|
||||
rp_node_types = {"vray": "VRayRenderElement",
|
||||
"arnold": "aiAOV",
|
||||
"redshift": "RedshiftAOV"}
|
||||
|
||||
if renderer not in rp_node_types.keys():
|
||||
self.log.error("Unsupported renderer found: '{}'".format(renderer))
|
||||
return
|
||||
|
||||
result = []
|
||||
|
||||
# Collect all AOVs / Render Elements
|
||||
with lib.renderlayer(instance.name):
|
||||
|
||||
node_type = rp_node_types[renderer]
|
||||
render_elements = cmds.ls(type=node_type)
|
||||
|
||||
# Check if AOVs / Render Elements are enabled
|
||||
for element in render_elements:
|
||||
enabled = cmds.getAttr("{}.enabled".format(element))
|
||||
if not enabled:
|
||||
continue
|
||||
|
||||
pass_name = self.get_pass_name(renderer, element)
|
||||
render_pass = "%s.%s" % (instance.name, pass_name)
|
||||
|
||||
result.append(render_pass)
|
||||
|
||||
self.log.info("Found {} render elements / AOVs for "
|
||||
"'{}'".format(len(result), instance.name))
|
||||
|
||||
instance.data["renderPasses"] = result
|
||||
|
||||
def get_pass_name(self, renderer, node):
|
||||
|
||||
if renderer == "vray":
|
||||
vray_node_attr = next(attr for attr in cmds.listAttr(node)
|
||||
if attr.startswith("vray_name"))
|
||||
|
||||
pass_type = vray_node_attr.rsplit("_", 1)[-1]
|
||||
if pass_type == "extratex":
|
||||
vray_node_attr = "vray_explicit_name_extratex"
|
||||
|
||||
# Node type is in the attribute name but we need to check if value
|
||||
# of the attribute as it can be changed
|
||||
pass_name = cmds.getAttr("{}.{}".format(node, vray_node_attr))
|
||||
|
||||
elif renderer in ["arnold", "redshift"]:
|
||||
pass_name = cmds.getAttr("{}.name".format(node))
|
||||
else:
|
||||
raise RuntimeError("Unsupported renderer: '{}'".format(renderer))
|
||||
|
||||
return pass_name
|
||||
|
|
@ -22,8 +22,8 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
try:
|
||||
render_globals = cmds.ls("renderglobalsDefault")[0]
|
||||
except IndexError:
|
||||
self.log.info("Cannot collect renderlayers without "
|
||||
"renderGlobals node")
|
||||
self.log.error("Cannot collect renderlayers without "
|
||||
"renderGlobals node")
|
||||
return
|
||||
|
||||
# Get start and end frame
|
||||
|
|
@ -135,4 +135,14 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
state = "Suspended" if attributes["suspendPublishJob"] else "Active"
|
||||
options["publishJobState"] = state
|
||||
|
||||
# Override frames should be False if extendFrames is False. This is
|
||||
# to ensure it doesn't go off doing crazy unpredictable things
|
||||
override_frames = False
|
||||
extend_frames = attributes.get("extendFrames", False)
|
||||
if extend_frames:
|
||||
override_frames = attributes.get("overrideExistingFrame", False)
|
||||
|
||||
options["extendFrames"] = extend_frames
|
||||
options["overrideExistingFrame"] = override_frames
|
||||
|
||||
return options
|
||||
|
|
|
|||
56
colorbleed/plugins/maya/publish/validate_renderlayer_aovs.py
Normal file
56
colorbleed/plugins/maya/publish/validate_renderlayer_aovs.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
|
||||
"""Validate created AOVs / RenderElement is registered in the database
|
||||
|
||||
Each render element is registered as a subset which is formatted based on
|
||||
the render layer and the render element, example:
|
||||
|
||||
<render layer>.<render element>
|
||||
|
||||
This translates to something like this:
|
||||
|
||||
CHAR.diffuse
|
||||
|
||||
This check is needed to ensure the render output is still complete
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
label = "Render Passes / AOVs Are Registered"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.renderlayer"]
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found unregistered subsets: {}".format(invalid))
|
||||
|
||||
def get_invalid(self, instance):
|
||||
|
||||
invalid = []
|
||||
|
||||
asset_name = instance.data["asset"]
|
||||
render_passses = instance.data.get("renderPasses", [])
|
||||
for render_pass in render_passses:
|
||||
is_valid = self.validate_subset_registered(asset_name, render_pass)
|
||||
if not is_valid:
|
||||
invalid.append(render_pass)
|
||||
|
||||
return invalid
|
||||
|
||||
def validate_subset_registered(self, asset_name, subset_name):
|
||||
"""Check if subset is registered in the database under the asset"""
|
||||
|
||||
asset = io.find_one({"type": "asset", "name": asset_name})
|
||||
is_valid = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
|
||||
return is_valid
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue