mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into enhancement/new-metadata-ids
# Conflicts: # client/ayon_core/hosts/photoshop/api/ws_stub.py
This commit is contained in:
commit
8b37628ea6
92 changed files with 611 additions and 430 deletions
|
|
@ -98,7 +98,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
source=current_file,
|
||||
label="{} - {}".format(subset_name, family),
|
||||
subset=subset_name,
|
||||
asset=inst.data["asset"],
|
||||
folderPath=inst.data["folderPath"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
|
|
@ -175,7 +175,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
version_str = "v{:03d}".format(render_instance.version)
|
||||
if "#" not in file_name: # single frame (mov)W
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.folderPath,
|
||||
render_instance.subset,
|
||||
version_str,
|
||||
ext
|
||||
|
|
@ -184,7 +184,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
else:
|
||||
for frame in range(start, end + 1):
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.folderPath,
|
||||
render_instance.subset,
|
||||
version_str,
|
||||
str(frame).zfill(self.padding_width),
|
||||
|
|
|
|||
|
|
@ -50,11 +50,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
asset_entity = context.data["assetEntity"]
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_entity)
|
||||
folder_path = get_asset_name_identifier(asset_entity)
|
||||
|
||||
instance_data = {
|
||||
"active": True,
|
||||
"asset": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"task": task,
|
||||
"frameStart": context.data['frameStart'],
|
||||
"frameEnd": context.data['frameEnd'],
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
|
|||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
|
||||
data["asset"] = get_current_asset_name()
|
||||
data["folderPath"] = get_current_asset_name()
|
||||
stub.imprint(instance[0].instance_id, data)
|
||||
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
instance_asset = instance.data["folderPath"]
|
||||
current_asset = get_current_asset_name()
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
|
|
|
|||
|
|
@ -47,6 +47,22 @@ def get_multilayer(settings):
|
|||
["multilayer_exr"])
|
||||
|
||||
|
||||
def get_renderer(settings):
|
||||
"""Get renderer from blender settings."""
|
||||
|
||||
return (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["renderer"])
|
||||
|
||||
|
||||
def get_compositing(settings):
|
||||
"""Get compositing from blender settings."""
|
||||
|
||||
return (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["compositing"])
|
||||
|
||||
|
||||
def get_render_product(output_path, name, aov_sep):
|
||||
"""
|
||||
Generate the path to the render product. Blender interprets the `#`
|
||||
|
|
@ -91,66 +107,120 @@ def set_render_format(ext, multilayer):
|
|||
image_settings.file_format = "TIFF"
|
||||
|
||||
|
||||
def set_render_passes(settings):
|
||||
aov_list = (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["aov_list"])
|
||||
|
||||
custom_passes = (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["custom_passes"])
|
||||
def set_render_passes(settings, renderer):
|
||||
aov_list = set(settings["blender"]["RenderSettings"]["aov_list"])
|
||||
custom_passes = settings["blender"]["RenderSettings"]["custom_passes"]
|
||||
|
||||
# Common passes for both renderers
|
||||
vl = bpy.context.view_layer
|
||||
|
||||
# Data Passes
|
||||
vl.use_pass_combined = "combined" in aov_list
|
||||
vl.use_pass_z = "z" in aov_list
|
||||
vl.use_pass_mist = "mist" in aov_list
|
||||
vl.use_pass_normal = "normal" in aov_list
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_diffuse_direct = "diffuse_light" in aov_list
|
||||
vl.use_pass_diffuse_color = "diffuse_color" in aov_list
|
||||
vl.use_pass_glossy_direct = "specular_light" in aov_list
|
||||
vl.use_pass_glossy_color = "specular_color" in aov_list
|
||||
vl.eevee.use_pass_volume_direct = "volume_light" in aov_list
|
||||
vl.use_pass_emit = "emission" in aov_list
|
||||
vl.use_pass_environment = "environment" in aov_list
|
||||
vl.use_pass_shadow = "shadow" in aov_list
|
||||
vl.use_pass_ambient_occlusion = "ao" in aov_list
|
||||
|
||||
cycles = vl.cycles
|
||||
# Cryptomatte Passes
|
||||
vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list
|
||||
vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list
|
||||
vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list
|
||||
|
||||
cycles.denoising_store_passes = "denoising" in aov_list
|
||||
cycles.use_pass_volume_direct = "volume_direct" in aov_list
|
||||
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
|
||||
if renderer == "BLENDER_EEVEE":
|
||||
# Eevee exclusive passes
|
||||
eevee = vl.eevee
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_shadow = "shadow" in aov_list
|
||||
eevee.use_pass_volume_direct = "volume_light" in aov_list
|
||||
|
||||
# Effects Passes
|
||||
eevee.use_pass_bloom = "bloom" in aov_list
|
||||
eevee.use_pass_transparent = "transparent" in aov_list
|
||||
|
||||
# Cryptomatte Passes
|
||||
vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list
|
||||
elif renderer == "CYCLES":
|
||||
# Cycles exclusive passes
|
||||
cycles = vl.cycles
|
||||
|
||||
# Data Passes
|
||||
vl.use_pass_position = "position" in aov_list
|
||||
vl.use_pass_vector = "vector" in aov_list
|
||||
vl.use_pass_uv = "uv" in aov_list
|
||||
cycles.denoising_store_passes = "denoising" in aov_list
|
||||
vl.use_pass_object_index = "object_index" in aov_list
|
||||
vl.use_pass_material_index = "material_index" in aov_list
|
||||
cycles.pass_debug_sample_count = "sample_count" in aov_list
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list
|
||||
vl.use_pass_glossy_indirect = "specular_indirect" in aov_list
|
||||
vl.use_pass_transmission_direct = "transmission_direct" in aov_list
|
||||
vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list
|
||||
vl.use_pass_transmission_color = "transmission_color" in aov_list
|
||||
cycles.use_pass_volume_direct = "volume_light" in aov_list
|
||||
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
|
||||
cycles.use_pass_shadow_catcher = "shadow" in aov_list
|
||||
|
||||
aovs_names = [aov.name for aov in vl.aovs]
|
||||
for cp in custom_passes:
|
||||
cp_name = cp[0]
|
||||
cp_name = cp["attribute"]
|
||||
if cp_name not in aovs_names:
|
||||
aov = vl.aovs.add()
|
||||
aov.name = cp_name
|
||||
else:
|
||||
aov = vl.aovs[cp_name]
|
||||
aov.type = cp[1].get("type", "VALUE")
|
||||
aov.type = cp["value"]
|
||||
|
||||
return aov_list, custom_passes
|
||||
return list(aov_list), custom_passes
|
||||
|
||||
|
||||
def set_node_tree(output_path, name, aov_sep, ext, multilayer):
|
||||
def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path):
|
||||
filename = f"{name}{aov_sep}{rpass_name}.####"
|
||||
slot = slots.new(rpass_name if multi_exr else filename)
|
||||
filepath = str(output_path / filename.lstrip("/"))
|
||||
|
||||
return slot, filepath
|
||||
|
||||
|
||||
def set_node_tree(
|
||||
output_path, render_product, name, aov_sep, ext, multilayer, compositing
|
||||
):
|
||||
# Set the scene to use the compositor node tree to render
|
||||
bpy.context.scene.use_nodes = True
|
||||
|
||||
tree = bpy.context.scene.node_tree
|
||||
|
||||
# Get the Render Layers node
|
||||
rl_node = None
|
||||
comp_layer_type = "CompositorNodeRLayers"
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
compositor_type = "CompositorNodeComposite"
|
||||
|
||||
# Get the Render Layer, Composite and the previous output nodes
|
||||
render_layer_node = None
|
||||
composite_node = None
|
||||
old_output_node = None
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == "CompositorNodeRLayers":
|
||||
rl_node = node
|
||||
if node.bl_idname == comp_layer_type:
|
||||
render_layer_node = node
|
||||
elif node.bl_idname == compositor_type:
|
||||
composite_node = node
|
||||
elif node.bl_idname == output_type and "AYON" in node.name:
|
||||
old_output_node = node
|
||||
if render_layer_node and composite_node and old_output_node:
|
||||
break
|
||||
|
||||
# If there's not a Render Layers node, we create it
|
||||
if not rl_node:
|
||||
rl_node = tree.nodes.new("CompositorNodeRLayers")
|
||||
if not render_layer_node:
|
||||
render_layer_node = tree.nodes.new(comp_layer_type)
|
||||
|
||||
# Get the enabled output sockets, that are the active passes for the
|
||||
# render.
|
||||
|
|
@ -158,48 +228,81 @@ def set_node_tree(output_path, name, aov_sep, ext, multilayer):
|
|||
exclude_sockets = ["Image", "Alpha", "Noisy Image"]
|
||||
passes = [
|
||||
socket
|
||||
for socket in rl_node.outputs
|
||||
for socket in render_layer_node.outputs
|
||||
if socket.enabled and socket.name not in exclude_sockets
|
||||
]
|
||||
|
||||
# Remove all output nodes
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == "CompositorNodeOutputFile":
|
||||
tree.nodes.remove(node)
|
||||
|
||||
# Create a new output node
|
||||
output = tree.nodes.new("CompositorNodeOutputFile")
|
||||
output = tree.nodes.new(output_type)
|
||||
|
||||
image_settings = bpy.context.scene.render.image_settings
|
||||
output.format.file_format = image_settings.file_format
|
||||
|
||||
slots = None
|
||||
|
||||
# In case of a multilayer exr, we don't need to use the output node,
|
||||
# because the blender render already outputs a multilayer exr.
|
||||
if ext == "exr" and multilayer:
|
||||
output.layer_slots.clear()
|
||||
return []
|
||||
multi_exr = ext == "exr" and multilayer
|
||||
slots = output.layer_slots if multi_exr else output.file_slots
|
||||
output.base_path = render_product if multi_exr else str(output_path)
|
||||
|
||||
output.file_slots.clear()
|
||||
output.base_path = str(output_path)
|
||||
slots.clear()
|
||||
|
||||
aov_file_products = []
|
||||
|
||||
old_links = {
|
||||
link.from_socket.name: link for link in tree.links
|
||||
if link.to_node == old_output_node}
|
||||
|
||||
# Create a new socket for the beauty output
|
||||
pass_name = "rgba" if multi_exr else "beauty"
|
||||
slot, _ = _create_aov_slot(
|
||||
name, aov_sep, slots, pass_name, multi_exr, output_path)
|
||||
tree.links.new(render_layer_node.outputs["Image"], slot)
|
||||
|
||||
if compositing:
|
||||
# Create a new socket for the composite output
|
||||
pass_name = "composite"
|
||||
comp_socket, filepath = _create_aov_slot(
|
||||
name, aov_sep, slots, pass_name, multi_exr, output_path)
|
||||
aov_file_products.append(("Composite", filepath))
|
||||
|
||||
# For each active render pass, we add a new socket to the output node
|
||||
# and link it
|
||||
for render_pass in passes:
|
||||
filepath = f"{name}{aov_sep}{render_pass.name}.####"
|
||||
for rpass in passes:
|
||||
slot, filepath = _create_aov_slot(
|
||||
name, aov_sep, slots, rpass.name, multi_exr, output_path)
|
||||
aov_file_products.append((rpass.name, filepath))
|
||||
|
||||
output.file_slots.new(filepath)
|
||||
# If the rpass was not connected with the old output node, we connect
|
||||
# it with the new one.
|
||||
if not old_links.get(rpass.name):
|
||||
tree.links.new(rpass, slot)
|
||||
|
||||
filename = str(output_path / filepath.lstrip("/"))
|
||||
for link in list(old_links.values()):
|
||||
# Check if the socket is still available in the new output node.
|
||||
socket = output.inputs.get(link.to_socket.name)
|
||||
# If it is, we connect it with the new output node.
|
||||
if socket:
|
||||
tree.links.new(link.from_socket, socket)
|
||||
# Then, we remove the old link.
|
||||
tree.links.remove(link)
|
||||
|
||||
aov_file_products.append((render_pass.name, filename))
|
||||
# If there's a composite node, we connect its input with the new output
|
||||
if compositing and composite_node:
|
||||
for link in tree.links:
|
||||
if link.to_node == composite_node:
|
||||
tree.links.new(link.from_socket, comp_socket)
|
||||
break
|
||||
|
||||
node_input = output.inputs[-1]
|
||||
if old_output_node:
|
||||
output.location = old_output_node.location
|
||||
tree.nodes.remove(old_output_node)
|
||||
|
||||
tree.links.new(render_pass, node_input)
|
||||
output.name = "AYON File Output"
|
||||
output.label = "AYON File Output"
|
||||
|
||||
return aov_file_products
|
||||
return [] if multi_exr else aov_file_products
|
||||
|
||||
|
||||
def imprint_render_settings(node, data):
|
||||
|
|
@ -228,17 +331,23 @@ def prepare_rendering(asset_group):
|
|||
aov_sep = get_aov_separator(settings)
|
||||
ext = get_image_format(settings)
|
||||
multilayer = get_multilayer(settings)
|
||||
renderer = get_renderer(settings)
|
||||
compositing = get_compositing(settings)
|
||||
|
||||
set_render_format(ext, multilayer)
|
||||
aov_list, custom_passes = set_render_passes(settings)
|
||||
bpy.context.scene.render.engine = renderer
|
||||
aov_list, custom_passes = set_render_passes(settings, renderer)
|
||||
|
||||
output_path = Path.joinpath(dirpath, render_folder, file_name)
|
||||
|
||||
render_product = get_render_product(output_path, name, aov_sep)
|
||||
aov_file_product = set_node_tree(
|
||||
output_path, name, aov_sep, ext, multilayer)
|
||||
output_path, render_product, name, aov_sep,
|
||||
ext, multilayer, compositing)
|
||||
|
||||
bpy.context.scene.render.filepath = render_product
|
||||
# Clear the render filepath, so that the output is handled only by the
|
||||
# output node in the compositor.
|
||||
bpy.context.scene.render.filepath = ""
|
||||
|
||||
render_settings = {
|
||||
"render_folder": render_folder,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,9 @@ class CreateAction(plugin.BaseCreator):
|
|||
)
|
||||
|
||||
# Get instance name
|
||||
name = plugin.prepare_scene_name(instance_data["asset"], subset_name)
|
||||
name = plugin.prepare_scene_name(
|
||||
instance_data["folderPath"], subset_name
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
for obj in lib.get_selection():
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
"""Create render."""
|
||||
import bpy
|
||||
|
||||
from ayon_core.lib import version_up
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.render_lib import prepare_rendering
|
||||
from ayon_core.hosts.blender.api.workio import save_file
|
||||
|
||||
|
||||
class CreateRenderlayer(plugin.BaseCreator):
|
||||
|
|
@ -37,6 +39,7 @@ class CreateRenderlayer(plugin.BaseCreator):
|
|||
# settings. Even the validator to check that the file is saved will
|
||||
# detect the file as saved, even if it isn't. The only solution for
|
||||
# now it is to force the file to be saved.
|
||||
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
|
||||
filepath = version_up(bpy.data.filepath)
|
||||
save_file(filepath, copy=False)
|
||||
|
||||
return collection
|
||||
|
|
|
|||
|
|
@ -28,15 +28,27 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
|
|||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
tree = bpy.context.scene.node_tree
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
output_node = None
|
||||
# Remove all output nodes that inlcude "AYON" in the name.
|
||||
# There should be only one.
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == output_type and "AYON" in node.name:
|
||||
output_node = node
|
||||
break
|
||||
if not output_node:
|
||||
raise PublishValidationError(
|
||||
"No output node found in the compositor tree."
|
||||
)
|
||||
filepath = bpy.data.filepath
|
||||
file = os.path.basename(filepath)
|
||||
filename, ext = os.path.splitext(file)
|
||||
if filename not in bpy.context.scene.render.filepath:
|
||||
if filename not in output_node.base_path:
|
||||
raise PublishValidationError(
|
||||
"Render output folder "
|
||||
"doesn't match the blender scene name! "
|
||||
"Use Repair action to "
|
||||
"fix the folder file path."
|
||||
"Render output folder doesn't match the blender scene name! "
|
||||
"Use Repair action to fix the folder file path."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
|||
asset_name = get_asset_name_identifier(asset_entity)
|
||||
|
||||
shared_instance_data = {
|
||||
"asset": asset_name,
|
||||
"folderPath": asset_name,
|
||||
"frameStart": asset_entity["data"]["frameStart"],
|
||||
"frameEnd": asset_entity["data"]["frameEnd"],
|
||||
"handleStart": asset_entity["data"]["handleStart"],
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
folder_path = get_asset_name_identifier(asset_doc)
|
||||
|
||||
# adding otio timeline to context
|
||||
with opfapi.maintained_segment_selection(sequence) as selected_seg:
|
||||
|
|
@ -42,7 +42,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"subset": subset_name,
|
||||
"family": "workfile",
|
||||
"families": []
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class ExtractProductResources(publish.Extractor):
|
|||
|
||||
# flame objects
|
||||
segment = instance.data["item"]
|
||||
asset_name = instance.data["asset"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
segment_name = segment.name.get_value()
|
||||
clip_path = instance.data["path"]
|
||||
sequence_clip = instance.context.data["flameSequence"]
|
||||
|
|
@ -249,7 +249,7 @@ class ExtractProductResources(publish.Extractor):
|
|||
out_mark = in_mark + source_duration_handles
|
||||
exporting_clip = self.import_clip(clip_path)
|
||||
exporting_clip.name.set_value("{}_{}".format(
|
||||
asset_name, segment_name))
|
||||
folder_path, segment_name))
|
||||
|
||||
# add xml tags modifications
|
||||
modify_xml_data.update({
|
||||
|
|
|
|||
|
|
@ -168,10 +168,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
frame_duration = (frame_end - frame_start) + 1
|
||||
asset_name = instance.data["asset"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
|
||||
task_name = task_data["name"]
|
||||
batchgroup_name = "{}_{}".format(asset_name, task_name)
|
||||
batchgroup_name = "{}_{}".format(folder_path, task_name)
|
||||
|
||||
batch_data = {
|
||||
"shematic_reels": [
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ class CollectFusionRender(
|
|||
source=current_file,
|
||||
label=inst.data["label"],
|
||||
subset=subset_name,
|
||||
asset=inst.data["asset"],
|
||||
folderPath=inst.data["folderPath"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class FusionRenderLocal(
|
|||
self.log.info(
|
||||
"Rendered '{nm}' for asset '{ast}' under the task '{tsk}'".format(
|
||||
nm=instance.data["name"],
|
||||
ast=instance.data["asset"],
|
||||
ast=instance.data["folderPath"],
|
||||
tsk=instance.data["task"],
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,9 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin):
|
|||
# Collect instances per subset per asset
|
||||
instances_per_subset_asset = defaultdict(lambda: defaultdict(list))
|
||||
for instance in context:
|
||||
asset = instance.data.get("asset", context.data.get("asset"))
|
||||
asset = instance.data.get(
|
||||
"folderPath", context.data.get("folderPath")
|
||||
)
|
||||
subset = instance.data.get("subset", context.data.get("subset"))
|
||||
instances_per_subset_asset[asset][subset].append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
|
||||
self_name = self.__class__.__name__
|
||||
|
||||
asset_name = context.data["asset"]
|
||||
folder_path = context.data["folderPath"]
|
||||
|
||||
for node in context.data["allNodes"]:
|
||||
data = harmony.read(node)
|
||||
|
|
@ -142,7 +142,7 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
source=context.data["currentFile"],
|
||||
label=node.split("/")[1],
|
||||
subset=subset_name,
|
||||
asset=asset_name,
|
||||
folderPath=folder_path,
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers=[node],
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
if (not any([re.search(pattern, task_name)
|
||||
for pattern in self.allowed_tasks])):
|
||||
return
|
||||
asset_name = context.data["asset"]
|
||||
folder_path = context.data["folderPath"]
|
||||
|
||||
for name, id in palettes.items():
|
||||
instance = context.create_instance(name)
|
||||
|
|
@ -39,7 +39,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
"id": id,
|
||||
"family": "harmony.palette",
|
||||
'families': [],
|
||||
"asset": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"subset": "{}{}".format("palette", name)
|
||||
})
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -36,5 +36,5 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"family": family,
|
||||
"families": [family],
|
||||
"representations": [],
|
||||
"asset": context.data["asset"]
|
||||
"folderPath": context.data["folderPath"]
|
||||
})
|
||||
|
|
|
|||
|
|
@ -27,9 +27,10 @@ class ValidateInstanceRepair(pyblish.api.Action):
|
|||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
folder_path = get_current_asset_name()
|
||||
for instance in instances:
|
||||
data = harmony.read(instance.data["setMembers"][0])
|
||||
data["asset"] = get_current_asset_name()
|
||||
data["folderPath"] = folder_path
|
||||
harmony.imprint(instance.data["setMembers"][0], data)
|
||||
|
||||
|
||||
|
|
@ -42,7 +43,7 @@ class ValidateInstance(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
instance_asset = instance.data["folderPath"]
|
||||
current_asset = get_current_asset_name()
|
||||
msg = (
|
||||
"Instance asset is not the same as current asset:"
|
||||
|
|
|
|||
|
|
@ -118,9 +118,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
|
|||
data["subset"] = name
|
||||
data["family"] = family
|
||||
data["families"] = [family]
|
||||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
data["name"] = data["subset"] + "_" + data["folderPath"]
|
||||
data["label"] = "{} - {}".format(
|
||||
data['asset'], data["subset"]
|
||||
data["folderPath"], data["subset"]
|
||||
)
|
||||
data["effects"] = effects
|
||||
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
|
|||
# first collect all available subset tag frames
|
||||
subset_data = {}
|
||||
context_asset_doc = context.data["assetEntity"]
|
||||
context_asset_name = get_asset_name_identifier(context_asset_doc)
|
||||
context_folder_path = get_asset_name_identifier(context_asset_doc)
|
||||
|
||||
for tag_data in sequence_tags:
|
||||
frame = int(tag_data["start"])
|
||||
|
|
@ -120,7 +120,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
|
|||
subset_data[subset] = {
|
||||
"frames": [frame],
|
||||
"format": tag_data["format"],
|
||||
"asset": context_asset_name
|
||||
"folderPath": context_folder_path
|
||||
}
|
||||
return subset_data
|
||||
|
||||
|
|
@ -133,7 +133,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
|
|||
"label": "{} {}".format(name, subset_data["frames"]),
|
||||
"family": "image",
|
||||
"families": ["frame"],
|
||||
"asset": subset_data["asset"],
|
||||
"folderPath": subset_data["folderPath"],
|
||||
"subset": name,
|
||||
"format": subset_data["format"],
|
||||
"frames": subset_data["frames"]
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ class ExtractClipEffects(publish.Extractor):
|
|||
"sourceStart", "sourceStartH", "sourceEnd", "sourceEndH",
|
||||
"frameStart", "frameEnd",
|
||||
"clipIn", "clipOut", "clipInH", "clipOutH",
|
||||
"asset", "version"
|
||||
"folderPath", "version"
|
||||
]
|
||||
|
||||
# pass data to version
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"folderPath": asset,
|
||||
"asset_name": asset_name,
|
||||
"item": track_item,
|
||||
"families": families,
|
||||
|
|
@ -192,7 +192,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not hierarchy_data:
|
||||
return
|
||||
|
||||
asset = data["asset"]
|
||||
asset = data["folderPath"]
|
||||
asset_name = data["asset_name"]
|
||||
|
||||
# insert family into families
|
||||
|
|
@ -244,7 +244,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not master_layer:
|
||||
return
|
||||
|
||||
asset = data.get("asset")
|
||||
item = data.get("item")
|
||||
clip_name = item.name()
|
||||
|
||||
|
|
@ -252,7 +251,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not self.test_any_audio(item):
|
||||
return
|
||||
|
||||
asset = data["asset"]
|
||||
asset = data["folderPath"]
|
||||
asset_name = data["asset_name"]
|
||||
|
||||
# insert family into families
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.491
|
||||
|
||||
def process(self, context):
|
||||
asset = context.data["asset"]
|
||||
asset = context.data["folderPath"]
|
||||
asset_name = asset.split("/")[-1]
|
||||
|
||||
active_timeline = hiero.ui.activeSequence()
|
||||
|
|
@ -64,7 +64,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"label": "{} - {}Main".format(
|
||||
asset, family),
|
||||
"name": "{}_{}".format(asset_name, family),
|
||||
"asset": context.data["asset"],
|
||||
"folderPath": context.data["folderPath"],
|
||||
# TODO use 'get_subset_name'
|
||||
"subset": "{}{}Main".format(asset_name, family.capitalize()),
|
||||
"item": project,
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Create nice name if the instance has a frame range.
|
||||
label = data.get("name", node.name())
|
||||
label += " (%s)" % data["asset"] # include asset in name
|
||||
label += " (%s)" % data["folderPath"] # include folder in name
|
||||
|
||||
instance = context.create_instance(label)
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Add bootstrap for: %s" % bootstrap)
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
asset_name = instance.data["asset"]
|
||||
asset_name = instance.data["folderPath"]
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
assert asset_doc, "Asset must exist: %s" % asset_name
|
||||
|
||||
|
|
@ -95,7 +95,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
|||
new.data["optional"] = False
|
||||
|
||||
# Copy some data from the instance for which we bootstrap
|
||||
for key in ["asset"]:
|
||||
for key in ["folderPath"]:
|
||||
new.data[key] = instance.data[key]
|
||||
|
||||
def _subset_exists(self, project_name, instance, subset_name, asset_doc):
|
||||
|
|
@ -107,7 +107,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
|||
for inst in context:
|
||||
if (
|
||||
inst.data["subset"] == subset_name
|
||||
and inst.data["asset"] == asset_doc_name
|
||||
and inst.data["folderPath"] == asset_doc_name
|
||||
):
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
|
|||
layer_inst.data["families"] = [family]
|
||||
layer_inst.data["subset"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.data["folderPath"] = instance.data["folderPath"]
|
||||
layer_inst.data["instance_node"] = instance.data["instance_node"]
|
||||
# include same USD ROP
|
||||
layer_inst.append(rop_node)
|
||||
|
|
|
|||
|
|
@ -285,7 +285,7 @@ class ExtractUSDLayered(publish.Extractor):
|
|||
# to detect whether we should make this into a new publish
|
||||
# version. If not, skip it.
|
||||
asset = get_asset_by_name(
|
||||
project_name, dependency.data["asset"], fields=["_id"]
|
||||
project_name, dependency.data["folderPath"], fields=["_id"]
|
||||
)
|
||||
subset = get_subset_by_name(
|
||||
project_name,
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
project_name = instance.context.data["projectName"]
|
||||
asset_name = instance.data["asset"]
|
||||
asset_name = instance.data["folderPath"]
|
||||
subset = instance.data["subset"]
|
||||
|
||||
# Assume shading variation starts after a dot separator
|
||||
|
|
|
|||
109
client/ayon_core/hosts/max/plugins/create/create_workfile.py
Normal file
109
client/ayon_core/hosts/max/plugins/create/create_workfile.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating workfiles."""
|
||||
from ayon_core.pipeline import CreatedInstance, AutoCreator
|
||||
from ayon_core.client import get_asset_by_name, get_asset_name_identifier
|
||||
from ayon_core.hosts.max.api import plugin
|
||||
from ayon_core.hosts.max.api.lib import read, imprint
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator):
|
||||
"""Workfile auto-creator."""
|
||||
identifier = "io.ayon.creators.max.workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
icon = "fa5.file"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def create(self):
|
||||
variant = self.default_variant
|
||||
current_instance = next(
|
||||
(
|
||||
instance for instance in self.create_context.instances
|
||||
if instance.creator_identifier == self.identifier
|
||||
), None)
|
||||
project_name = self.project_name
|
||||
asset_name = self.create_context.get_current_asset_name()
|
||||
task_name = self.create_context.get_current_task_name()
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
if current_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"folderPath": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
|
||||
data.update(
|
||||
self.get_dynamic_data(
|
||||
variant, task_name, asset_doc,
|
||||
project_name, host_name, current_instance)
|
||||
)
|
||||
self.log.info("Auto-creating workfile instance...")
|
||||
instance_node = self.create_node(subset_name)
|
||||
data["instance_node"] = instance_node.name
|
||||
current_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(current_instance)
|
||||
imprint(instance_node.name, current_instance.data)
|
||||
elif (
|
||||
current_instance["folderPath"] != asset_name
|
||||
or current_instance["task"] != task_name
|
||||
):
|
||||
# Update instance context if is not the same
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
|
||||
current_instance["folderPath"] = asset_name
|
||||
current_instance["task"] = task_name
|
||||
current_instance["subset"] = subset_name
|
||||
|
||||
def collect_instances(self):
|
||||
self.cache_subsets(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data["max_cached_subsets"].get(self.identifier, []): # noqa
|
||||
if not rt.getNodeByName(instance):
|
||||
continue
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(rt.GetNodeByName(instance)), self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _ in update_list:
|
||||
instance_node = created_inst.get("instance_node")
|
||||
imprint(
|
||||
instance_node,
|
||||
created_inst.data_to_store()
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Remove specified instance from the scene.
|
||||
|
||||
This is only removing `id` parameter so instance is no longer
|
||||
instance, because it might contain valuable data for artist.
|
||||
|
||||
"""
|
||||
for instance in instances:
|
||||
instance_node = rt.GetNodeByName(
|
||||
instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
rt.Delete(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def create_node(self, subset_name):
|
||||
if rt.getNodeByName(subset_name):
|
||||
node = rt.getNodeByName(subset_name)
|
||||
return node
|
||||
node = rt.Container(name=subset_name)
|
||||
node.isHidden = True
|
||||
return node
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Max Current File"
|
||||
hosts = ['max']
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
folder = rt.maxFilePath
|
||||
file = rt.maxFileName
|
||||
if not folder or not file:
|
||||
self.log.error("Scene is not saved.")
|
||||
current_file = os.path.join(folder, file)
|
||||
|
||||
context.data["currentFile"] = current_file
|
||||
self.log.debug("Scene path: {}".format(current_file))
|
||||
|
|
@ -12,7 +12,9 @@ class CollectMembers(pyblish.api.InstancePlugin):
|
|||
hosts = ['max']
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Skipping Collecting Members for workfile family.")
|
||||
return
|
||||
if instance.data.get("instance_node"):
|
||||
container = rt.GetNodeByName(instance.data["instance_node"])
|
||||
instance.data["members"] = [
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class CollectRender(pyblish.api.InstancePlugin):
|
|||
renderer = str(renderer_class).split(":")[0]
|
||||
# also need to get the render dir for conversion
|
||||
data = {
|
||||
"asset": instance.data["asset"],
|
||||
"folderPath": instance.data["folderPath"],
|
||||
"subset": str(instance.name),
|
||||
"publish": True,
|
||||
"maxversion": str(get_max_version()),
|
||||
|
|
|
|||
|
|
@ -6,57 +6,41 @@ import pyblish.api
|
|||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
class CollectWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect 3dsmax Workfile"
|
||||
hosts = ['max']
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
"""Inject the current working file."""
|
||||
context = instance.context
|
||||
folder = rt.maxFilePath
|
||||
file = rt.maxFileName
|
||||
if not folder or not file:
|
||||
self.log.error("Scene is not saved.")
|
||||
current_file = os.path.join(folder, file)
|
||||
|
||||
context.data['currentFile'] = current_file
|
||||
|
||||
filename, ext = os.path.splitext(file)
|
||||
|
||||
task = context.data["task"]
|
||||
ext = os.path.splitext(file)[-1].lstrip(".")
|
||||
|
||||
data = {}
|
||||
|
||||
# create instance
|
||||
instance = context.create_instance(name=filename)
|
||||
subset = 'workfile' + task.capitalize()
|
||||
|
||||
data.update({
|
||||
"subset": subset,
|
||||
"asset": context.data["asset"],
|
||||
"label": subset,
|
||||
"publish": True,
|
||||
"family": 'workfile',
|
||||
"families": ['workfile'],
|
||||
"setMembers": [current_file],
|
||||
"frameStart": context.data['frameStart'],
|
||||
"frameEnd": context.data['frameEnd'],
|
||||
"handleStart": context.data['handleStart'],
|
||||
"handleEnd": context.data['handleEnd']
|
||||
"setMembers": context.data["currentFile"],
|
||||
"frameStart": context.data["frameStart"],
|
||||
"frameEnd": context.data["frameEnd"],
|
||||
"handleStart": context.data["handleStart"],
|
||||
"handleEnd": context.data["handleEnd"]
|
||||
})
|
||||
|
||||
data['representations'] = [{
|
||||
'name': ext.lstrip("."),
|
||||
'ext': ext.lstrip("."),
|
||||
'files': file,
|
||||
data["representations"] = [{
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": file,
|
||||
"stagingDir": folder,
|
||||
}]
|
||||
|
||||
instance.data.update(data)
|
||||
|
||||
self.log.info('Collected instance: {}'.format(file))
|
||||
self.log.info('Scene path: {}'.format(current_file))
|
||||
self.log.info('staging Dir: {}'.format(folder))
|
||||
self.log.info('subset: {}'.format(subset))
|
||||
self.log.debug("Collected data: {}".format(data))
|
||||
self.log.debug("Collected instance: {}".format(file))
|
||||
self.log.debug("staging Dir: {}".format(folder))
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
receive new UUIDs are actually invalid.
|
||||
|
||||
Requires:
|
||||
- instance.data["asset"]
|
||||
- instance.data["folderPath"]
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
# should be always available, but kept a way to query it by name.
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
asset_name = instance.data["asset"]
|
||||
asset_name = instance.data["folderPath"]
|
||||
project_name = instance.context.data["projectName"]
|
||||
self.log.info((
|
||||
"Asset is not stored on instance."
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ from ayon_core.client import (
|
|||
get_asset_by_name,
|
||||
get_subsets,
|
||||
get_last_versions,
|
||||
get_representation_by_name
|
||||
get_representation_by_name,
|
||||
get_asset_name_identifier,
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import (
|
||||
|
|
@ -3152,11 +3153,11 @@ def fix_incompatible_containers():
|
|||
|
||||
def update_content_on_context_change():
|
||||
"""
|
||||
This will update scene content to match new asset on context change
|
||||
This will update scene content to match new folder on context change
|
||||
"""
|
||||
scene_sets = cmds.listSets(allSets=True)
|
||||
asset_doc = get_current_project_asset()
|
||||
new_asset = asset_doc["name"]
|
||||
new_folder_path = get_asset_name_identifier(asset_doc)
|
||||
new_data = asset_doc["data"]
|
||||
for s in scene_sets:
|
||||
try:
|
||||
|
|
@ -3165,10 +3166,14 @@ def update_content_on_context_change():
|
|||
}:
|
||||
attr = cmds.listAttr(s)
|
||||
print(s)
|
||||
if "asset" in attr:
|
||||
print(" - setting asset to: [ {} ]".format(new_asset))
|
||||
cmds.setAttr("{}.asset".format(s),
|
||||
new_asset, type="string")
|
||||
if "folderPath" in attr:
|
||||
print(
|
||||
" - setting folder to: [ {} ]".format(new_folder_path)
|
||||
)
|
||||
cmds.setAttr(
|
||||
"{}.folderPath".format(s),
|
||||
new_folder_path, type="string"
|
||||
)
|
||||
if "frameStart" in attr:
|
||||
cmds.setAttr("{}.frameStart".format(s),
|
||||
new_data["frameStart"])
|
||||
|
|
|
|||
|
|
@ -307,7 +307,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
_instance.data["version"] = context.data["version"]
|
||||
|
||||
# Define nice label
|
||||
label = "{0} ({1})".format(layer_name, instance.data["asset"])
|
||||
label = "{0} ({1})".format(layer_name, instance.data["folderPath"])
|
||||
label += " [{0}-{1}]".format(
|
||||
int(data["frameStartHandle"]), int(data["frameEndHandle"])
|
||||
)
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin):
|
|||
instance.data.update(data)
|
||||
|
||||
# Define nice label
|
||||
label = "{0} ({1})".format(layer_name, instance.data["asset"])
|
||||
label = "{0} ({1})".format(layer_name, instance.data["folderPath"])
|
||||
label += " [{0}-{1}]".format(
|
||||
int(data["frameStartHandle"]), int(data["frameEndHandle"])
|
||||
)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
|||
# we rely on hierarchy under one root.
|
||||
original_parent = to_extract[0].split("|")[1]
|
||||
|
||||
parent_node = instance.data.get("asset")
|
||||
parent_node = instance.data.get("folderPath")
|
||||
# this needs to be done for AYON
|
||||
# WARNING: since AYON supports duplicity of asset names,
|
||||
# this needs to be refactored throughout the pipeline.
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset = instance.data.get("asset")
|
||||
asset = instance.data.get("folderPath")
|
||||
context_asset = self.get_context_asset(instance)
|
||||
if asset != context_asset:
|
||||
raise PublishValidationError(
|
||||
|
|
@ -74,4 +74,4 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
|||
|
||||
@staticmethod
|
||||
def get_context_asset(instance):
|
||||
return instance.context.data["asset"]
|
||||
return instance.context.data["folderPath"]
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin,
|
|||
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True)
|
||||
asset_name = instance.data.get("asset")
|
||||
asset_name = instance.data.get("folderPath")
|
||||
|
||||
# Check the number of connected shadingEngines per shape
|
||||
regex_compile = re.compile(cls.regex)
|
||||
|
|
|
|||
|
|
@ -23,10 +23,10 @@ class ValidateCorrectAssetContext(
|
|||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Checking `asset` and `task` keys.
|
||||
Checking `folderPath` and `task` keys.
|
||||
"""
|
||||
order = ValidateContentsOrder
|
||||
label = "Validate asset context"
|
||||
label = "Validate Folder context"
|
||||
hosts = ["nuke"]
|
||||
actions = [
|
||||
RepairAction,
|
||||
|
|
@ -85,7 +85,7 @@ class ValidateCorrectAssetContext(
|
|||
"""Get invalid keys from instance data and context data."""
|
||||
|
||||
invalid_keys = []
|
||||
testing_keys = ["asset", "task"]
|
||||
testing_keys = ["folderPath", "task"]
|
||||
for _key in testing_keys:
|
||||
if _key not in instance.data:
|
||||
invalid_keys.append(_key)
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ class PhotoshopServerStub:
|
|||
"subset":"imageBG",
|
||||
"family":"image",
|
||||
"id":"ayon.create.instance",
|
||||
"asset":"Town",
|
||||
"folderPath":"Town",
|
||||
"uuid": "8"
|
||||
}] - for created instances
|
||||
OR
|
||||
|
|
@ -421,7 +421,7 @@ class PhotoshopServerStub:
|
|||
example:
|
||||
{"8":{"active":true,"subset":"imageBG",
|
||||
"family":"image","id":"ayon.create.instance",
|
||||
"asset":"Town"}}
|
||||
"folderPath":"/Town"}}
|
||||
8 is layer(group) id - used for deletion, update etc.
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call('Photoshop.read'))
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
|
|||
task_name = context.data["task"]
|
||||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
folder_path = get_asset_name_identifier(asset_doc)
|
||||
|
||||
auto_creator = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
|
|
@ -86,7 +86,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
|
|||
|
||||
instance = context.create_instance(subset_name)
|
||||
instance.data["family"] = family
|
||||
instance.data["asset"] = asset_name
|
||||
instance.data["folderPath"] = folder_path
|
||||
instance.data["subset"] = subset_name
|
||||
instance.data["ids"] = publishable_ids
|
||||
instance.data["publish"] = True
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
|
|||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
folder_path = get_asset_name_identifier(asset_doc)
|
||||
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
|
|
@ -87,7 +87,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
|
|||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"publish": self.publish
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
|
|||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
folder_path = get_asset_name_identifier(asset_doc)
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
|
|
@ -91,7 +91,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
|
|||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": asset_name
|
||||
"folderPath": folder_path
|
||||
})
|
||||
|
||||
# creating representation
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
Provides:
|
||||
context -> Loaded batch file.
|
||||
- asset
|
||||
- folderPath
|
||||
- task (task name)
|
||||
- taskType
|
||||
- project_name
|
||||
|
|
@ -71,7 +71,7 @@ class CollectBatchData(pyblish.api.ContextPlugin):
|
|||
os.environ["AYON_FOLDER_PATH"] = asset_name
|
||||
os.environ["AYON_TASK_NAME"] = task_name
|
||||
|
||||
context.data["asset"] = asset_name
|
||||
context.data["folderPath"] = asset_name
|
||||
context.data["task"] = task_name
|
||||
context.data["taskType"] = task_type
|
||||
context.data["project_name"] = project_name
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
existing_subset_names = self._get_existing_subset_names(context)
|
||||
|
||||
# from CollectBatchData
|
||||
asset_name = context.data["asset"]
|
||||
asset_name = context.data["folderPath"]
|
||||
task_name = context.data["task"]
|
||||
variant = context.data["variant"]
|
||||
project_name = context.data["projectEntity"]["name"]
|
||||
|
|
@ -163,7 +163,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
instance = context.create_instance(layer.name)
|
||||
instance.data["family"] = family
|
||||
instance.data["publish"] = True
|
||||
instance.data["asset"] = asset
|
||||
instance.data["folderPath"] = asset
|
||||
instance.data["task"] = task_name
|
||||
instance.data["subset"] = subset
|
||||
instance.data["layer"] = layer
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
|
|||
current_asset_name = get_current_asset_name()
|
||||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
data["asset"] = current_asset_name
|
||||
data["folderPath"] = current_asset_name
|
||||
stub.imprint(instance[0], data)
|
||||
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ class ValidateInstanceAsset(OptionalPyblishPluginMixin,
|
|||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
instance_asset = instance.data["folderPath"]
|
||||
current_asset = get_current_asset_name()
|
||||
|
||||
if instance_asset != current_asset:
|
||||
|
|
|
|||
|
|
@ -519,7 +519,7 @@ def imprint(timeline_item, data=None):
|
|||
|
||||
Examples:
|
||||
data = {
|
||||
'asset': 'sq020sh0280',
|
||||
'folderPath': 'sq020sh0280',
|
||||
'family': 'render',
|
||||
'subset': 'subsetMain'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": "{} {}".format(asset, subset),
|
||||
"asset": asset,
|
||||
"folderPath": asset,
|
||||
"item": timeline_item,
|
||||
"publish": get_publish_attribute(timeline_item),
|
||||
"fps": context.data["fps"],
|
||||
|
|
@ -127,7 +127,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
if not hierarchy_data:
|
||||
return
|
||||
|
||||
asset = data["asset"]
|
||||
asset = data["folderPath"]
|
||||
subset = "shotMain"
|
||||
|
||||
# insert family into families
|
||||
|
|
@ -137,7 +137,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
"name": "{}_{}".format(asset, subset),
|
||||
"label": "{} {}".format(asset, subset),
|
||||
"subset": subset,
|
||||
"asset": asset,
|
||||
"folderPath": asset,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"publish": get_publish_attribute(timeline_item)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
instance_data = {
|
||||
"name": "{}_{}".format(asset_name, subset),
|
||||
"label": "{} {}".format(current_asset_name, subset),
|
||||
"asset": current_asset_name,
|
||||
"folderPath": current_asset_name,
|
||||
"subset": subset,
|
||||
"item": project,
|
||||
"family": "workfile",
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ class CollectTextureSet(pyblish.api.InstancePlugin):
|
|||
|
||||
config = self.get_export_config(instance)
|
||||
asset_doc = get_asset_by_name(
|
||||
project_name=instance.context.data["projectName"],
|
||||
asset_name=instance.data["asset"]
|
||||
instance.context.data["projectName"],
|
||||
instance.data["folderPath"]
|
||||
)
|
||||
|
||||
instance.data["exportConfig"] = config
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ class OnlineCreator(TrayPublishCreator):
|
|||
# disable check for existing subset with the same name
|
||||
"""
|
||||
asset = get_asset_by_name(
|
||||
self.project_name, instance_data["asset"], fields=["_id"])
|
||||
self.project_name, instance_data["folderPath"], fields=["_id"])
|
||||
|
||||
if get_subset_by_name(
|
||||
self.project_name, origin_basename, asset["_id"],
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
|
|||
families = ["shot"]
|
||||
|
||||
SHARED_KEYS = [
|
||||
"asset",
|
||||
"folderPath",
|
||||
"fps",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
|
|
@ -132,7 +132,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
|
|||
"sourceIn": _cr_attrs["sourceIn"],
|
||||
"sourceOut": _cr_attrs["sourceOut"],
|
||||
"workfileFrameStart": workfile_start_frame,
|
||||
"asset": _cr_attrs["folderPath"],
|
||||
"folderPath": _cr_attrs["folderPath"],
|
||||
}
|
||||
|
||||
def _solve_hierarchy_context(self, instance):
|
||||
|
|
@ -170,7 +170,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
|
|||
parents = instance.data.get('parents', [])
|
||||
|
||||
# Split by '/' for AYON where asset is a path
|
||||
asset_name = instance.data["asset"].split("/")[-1]
|
||||
asset_name = instance.data["folderPath"].split("/")[-1]
|
||||
actual = {asset_name: in_info}
|
||||
|
||||
for parent in reversed(parents):
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class ValidateExistingVersion(
|
|||
|
||||
formatting_data = {
|
||||
"subset_name": subset_name,
|
||||
"asset_name": instance.data["asset"],
|
||||
"asset_name": instance.data["folderPath"],
|
||||
"version": version
|
||||
}
|
||||
raise PublishXmlValidationError(
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
# Collect and store current context to have reference
|
||||
current_context = {
|
||||
"project_name": context.data["projectName"],
|
||||
"asset_name": context.data["asset"],
|
||||
"asset_name": context.data["folderPath"],
|
||||
"task_name": context.data["task"]
|
||||
}
|
||||
self.log.debug("Current context is: {}".format(current_context))
|
||||
|
|
@ -105,7 +105,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
))
|
||||
|
||||
# Store context asset name
|
||||
context.data["asset"] = asset_name
|
||||
context.data["folderPath"] = asset_name
|
||||
context.data["task"] = task_name
|
||||
self.log.info(
|
||||
"Context is set to Asset: \"{}\" and Task: \"{}\"".format(
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class FixAssetNames(pyblish.api.Action):
|
|||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
context_asset_name = context.data["asset"]
|
||||
context_asset_name = context.data["folderPath"]
|
||||
old_instance_items = list_instances()
|
||||
new_instance_items = []
|
||||
for instance_item in old_instance_items:
|
||||
|
|
@ -51,9 +51,9 @@ class ValidateAssetName(
|
|||
def process(self, context):
|
||||
if not self.is_active(context.data):
|
||||
return
|
||||
context_asset_name = context.data["asset"]
|
||||
context_asset_name = context.data["folderPath"]
|
||||
for instance in context:
|
||||
asset_name = instance.data.get("asset")
|
||||
asset_name = instance.data.get("folderPath")
|
||||
if asset_name and asset_name == context_asset_name:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class CollectRenderInstances(pyblish.api.InstancePlugin):
|
|||
|
||||
new_data = new_instance.data
|
||||
|
||||
new_data["asset"] = seq_name
|
||||
new_data["folderPath"] = seq_name
|
||||
new_data["setMembers"] = seq_name
|
||||
new_data["family"] = "render"
|
||||
new_data["families"] = ["render", "review"]
|
||||
|
|
|
|||
|
|
@ -44,7 +44,10 @@ class VrayRenderPluginInfo():
|
|||
@attr.s
|
||||
class RedshiftRenderPluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
# Use "1" as the default Redshift version just because it
|
||||
# default fallback version in Deadline's Redshift plugin
|
||||
# if no version was specified
|
||||
Version = attr.ib(default="1")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadline(
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("asset"),
|
||||
instance.data.get("folderPath"),
|
||||
instance.data["subset"],
|
||||
instance.context,
|
||||
instance.data["family"],
|
||||
|
|
@ -126,7 +126,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
|
|
@ -359,7 +359,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"asset": instance_skeleton_data["asset"],
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
|
|
|
|||
|
|
@ -189,7 +189,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("asset"),
|
||||
instance.data.get("folderPath"),
|
||||
instances[0]["subset"],
|
||||
instance.context,
|
||||
instances[0]["family"],
|
||||
|
|
@ -203,7 +203,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
|
|
@ -475,7 +475,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"asset": instance_skeleton_data["asset"],
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from .royal_render_module import RoyalRenderModule
|
||||
from .addon import RoyalRenderAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"RoyalRenderModule",
|
||||
"RoyalRenderAddon",
|
||||
)
|
||||
|
|
|
|||
36
client/ayon_core/modules/royalrender/addon.py
Normal file
36
client/ayon_core/modules/royalrender/addon.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Module providing support for Royal Render."""
|
||||
import os
|
||||
|
||||
from ayon_core.addon import AYONAddon, IPluginPaths
|
||||
|
||||
|
||||
class RoyalRenderAddon(AYONAddon, IPluginPaths):
|
||||
"""Class providing basic Royal Render implementation logic."""
|
||||
name = "royalrender"
|
||||
|
||||
# _rr_api = None
|
||||
# @property
|
||||
# def rr_api(self):
|
||||
# if not self._rr_api:
|
||||
# # import royal render modules
|
||||
# from .api import Api
|
||||
# self._rr_api = Api(self.settings)
|
||||
# return self._rr_api
|
||||
|
||||
def initialize(self, studio_settings):
|
||||
# type: (dict) -> None
|
||||
self.enabled = self.name in studio_settings
|
||||
|
||||
@staticmethod
|
||||
def get_plugin_paths():
|
||||
# type: () -> dict
|
||||
"""Royal Render plugin paths.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of plugin paths for RR.
|
||||
"""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
|
|
@ -108,9 +108,7 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
context = instance.context
|
||||
|
||||
self._rr_root = self._resolve_rr_path(context, instance.data.get(
|
||||
"rrPathName")) # noqa
|
||||
self.log.debug(self._rr_root)
|
||||
self._rr_root = instance.data.get("rrPathName")
|
||||
if not self._rr_root:
|
||||
raise KnownPublishError(
|
||||
("Missing RoyalRender root. "
|
||||
|
|
@ -210,35 +208,6 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
"""Host specific mapping for RRJob"""
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _resolve_rr_path(context, rr_path_name):
|
||||
# type: (pyblish.api.Context, str) -> str
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return context.data["defaultRRPath"][platform.system().lower()]
|
||||
|
||||
return rr_servers[rr_path_name][platform.system().lower()]
|
||||
|
||||
def expected_files(self, instance, path, start_frame, end_frame):
|
||||
"""Get expected files.
|
||||
|
||||
|
|
@ -350,7 +319,7 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
add_kwargs = {
|
||||
"project": anatomy_data["project"]["name"],
|
||||
"asset": instance.context.data["asset"],
|
||||
"asset": instance.context.data["folderPath"],
|
||||
"task": anatomy_data["task"]["name"],
|
||||
"app": instance.context.data.get("appName"),
|
||||
"envgroup": "farm"
|
||||
|
|
|
|||
|
|
@ -18,30 +18,24 @@ class CollectRRPathFromInstance(pyblish.api.InstancePlugin):
|
|||
def _collect_rr_path_name(instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Royal Render pat name from render instance."""
|
||||
rr_settings = (
|
||||
instance.context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
if not instance.data.get("rrPaths"):
|
||||
|
||||
# TODO there are no "rrPaths" on instance, if Publisher should expose
|
||||
# this (eg. artist could select specific server) it must be added
|
||||
# to publisher
|
||||
instance_rr_paths = instance.data.get("rrPaths")
|
||||
if instance_rr_paths is None:
|
||||
return "default"
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
instance.context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return rr_settings["rr_paths"]["default"]
|
||||
rr_settings = instance.context.data["project_settings"]["royalrender"]
|
||||
rr_paths = rr_settings["rr_paths"]
|
||||
selected_paths = rr_settings["selected_rr_paths"]
|
||||
|
||||
return list(rr_servers.keys())[int(instance.data.get("rrPaths"))]
|
||||
rr_servers = {
|
||||
path_key
|
||||
for path_key in selected_paths
|
||||
if path_key in rr_paths
|
||||
}
|
||||
for instance_rr_path in instance_rr_paths:
|
||||
if instance_rr_path in rr_servers:
|
||||
return instance_rr_path
|
||||
return "default"
|
||||
|
|
|
|||
|
|
@ -189,8 +189,8 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
|
|||
"family": families[0], # backwards compatibility / pyblish
|
||||
"families": list(families),
|
||||
"subset": subset,
|
||||
"asset": data.get(
|
||||
"asset", context.data["asset"]
|
||||
"folderPath": data.get(
|
||||
"folderPath", context.data["folderPath"]
|
||||
),
|
||||
"stagingDir": root,
|
||||
"frameStart": start,
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"asset": instance_skeleton_data["asset"],
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
|
|
@ -180,7 +180,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
environment = RREnvList({
|
||||
"AYON_PROJECT_NAME": anatomy_data["project"]["name"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": anatomy_data["task"]["name"],
|
||||
"AYON_USERNAME": anatomy_data["user"]
|
||||
})
|
||||
|
|
@ -216,7 +216,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
SeqEnd=1,
|
||||
SeqStep=1,
|
||||
SeqFileOffset=0,
|
||||
Version=self._sanitize_version(os.environ.get("OPENPYPE_VERSION")),
|
||||
Version=os.environ["AYON_BUNDLE_NAME"],
|
||||
SceneName=abs_metadata_path,
|
||||
# command line arguments
|
||||
CustomAddCmdFlags=" ".join(args),
|
||||
|
|
@ -243,26 +243,3 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
job.WaitForPreIDs += jobs_pre_ids
|
||||
|
||||
return job
|
||||
|
||||
def _sanitize_version(self, version):
|
||||
"""Returns version in format MAJOR.MINORPATCH
|
||||
|
||||
3.15.7-nightly.2 >> 3.157
|
||||
"""
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
r"\.(?P<patch>0|[1-9]\d*)"
|
||||
r"(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?"
|
||||
r"(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?"
|
||||
)
|
||||
|
||||
valid_parts = VERSION_REGEX.findall(version)
|
||||
if len(valid_parts) != 1:
|
||||
# Return invalid version with filled 'origin' attribute
|
||||
return version
|
||||
|
||||
# Unpack found version
|
||||
major, minor, patch, pre, post = valid_parts[0]
|
||||
|
||||
return "{}.{}{}".format(major, minor, patch)
|
||||
|
|
|
|||
|
|
@ -25,16 +25,6 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin):
|
|||
self._submission_parameters = []
|
||||
|
||||
def process(self, context):
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
|
||||
if rr_settings["enabled"] is not True:
|
||||
self.log.warning("RoyalRender modules is disabled.")
|
||||
return
|
||||
|
||||
# iterate over all instances and try to find RRJobs
|
||||
jobs = []
|
||||
|
|
@ -51,7 +41,7 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin):
|
|||
instance_rr_path = instance.data["rrPathName"]
|
||||
|
||||
if jobs:
|
||||
self._rr_root = self._resolve_rr_path(context, instance_rr_path)
|
||||
self._rr_root = instance_rr_path
|
||||
if not self._rr_root:
|
||||
raise KnownPublishError(
|
||||
("Missing RoyalRender root. "
|
||||
|
|
@ -100,32 +90,3 @@ class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin):
|
|||
|
||||
def get_submission_parameters(self):
|
||||
return [SubmitterParameter("RequiredMemory", "0")]
|
||||
|
||||
@staticmethod
|
||||
def _resolve_rr_path(context, rr_path_name):
|
||||
# type: (pyblish.api.Context, str) -> str
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return context.data["defaultRRPath"][platform.system().lower()]
|
||||
|
||||
return rr_servers[rr_path_name][platform.system().lower()]
|
||||
|
|
|
|||
|
|
@ -1,45 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Module providing support for Royal Render."""
|
||||
import os
|
||||
import ayon_core.modules
|
||||
from ayon_core.modules import OpenPypeModule, IPluginPaths
|
||||
|
||||
|
||||
class RoyalRenderModule(OpenPypeModule, IPluginPaths):
|
||||
"""Class providing basic Royal Render implementation logic."""
|
||||
name = "royalrender"
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
if not self._api:
|
||||
# import royal render modules
|
||||
from . import api as rr_api
|
||||
self._api = rr_api.Api(self.settings)
|
||||
|
||||
return self._api
|
||||
|
||||
def __init__(self, manager, settings):
|
||||
# type: (ayon_core.addon.AddonsManager, dict) -> None
|
||||
self.rr_paths = {}
|
||||
self._api = None
|
||||
self.settings = settings
|
||||
super(RoyalRenderModule, self).__init__(manager, settings)
|
||||
|
||||
def initialize(self, module_settings):
|
||||
# type: (dict) -> None
|
||||
rr_settings = module_settings[self.name]
|
||||
self.enabled = rr_settings["enabled"]
|
||||
self.rr_paths = rr_settings.get("rr_paths")
|
||||
|
||||
@staticmethod
|
||||
def get_plugin_paths():
|
||||
# type: () -> dict
|
||||
"""Royal Render plugin paths.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of plugin paths for RR.
|
||||
"""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
|
|
@ -24,7 +24,7 @@ class StartTimer(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
asset_name = context.data.get("asset")
|
||||
asset_name = context.data.get("folderPath")
|
||||
task_name = context.data.get("task")
|
||||
if not project_name or not asset_name or not task_name:
|
||||
self.log.info((
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Family tells how should be instance processed and subset what name will publishe
|
|||
<Plugin name>: {...},
|
||||
...
|
||||
},
|
||||
## Additional data related to instance (`asset`, `task`, etc.)
|
||||
## Additional data related to instance (`folderPath`, `task`, etc.)
|
||||
...
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class LegacyCreator(object):
|
|||
log = logging.getLogger("LegacyCreator")
|
||||
log.propagate = True
|
||||
|
||||
def __init__(self, name, asset, options=None, data=None):
|
||||
def __init__(self, name, folder_path, options=None, data=None):
|
||||
self.name = name # For backwards compatibility
|
||||
self.options = options
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ class LegacyCreator(object):
|
|||
# TODO use 'AYON_INSTANCE_ID' when all hosts support it
|
||||
self.data["id"] = AVALON_INSTANCE_ID
|
||||
self.data["family"] = self.family
|
||||
self.data["asset"] = asset
|
||||
self.data["folderPath"] = folder_path
|
||||
self.data["subset"] = name
|
||||
self.data["active"] = True
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ def get_last_versions_for_instances(
|
|||
subset_names_by_asset_name = collections.defaultdict(set)
|
||||
instances_by_hierarchy = {}
|
||||
for instance in instances:
|
||||
asset_name = instance.data.get("asset")
|
||||
asset_name = instance.data.get("folderPath")
|
||||
subset_name = instance.subset_name
|
||||
if not asset_name or not subset_name:
|
||||
if use_value_for_missing:
|
||||
|
|
|
|||
|
|
@ -197,7 +197,7 @@ def create_skeleton_instance(
|
|||
|
||||
if data.get("extendFrames", False):
|
||||
time_data.start, time_data.end = extend_frames(
|
||||
data["asset"],
|
||||
data["folderPath"],
|
||||
data["subset"],
|
||||
time_data.start,
|
||||
time_data.end,
|
||||
|
|
@ -228,7 +228,7 @@ def create_skeleton_instance(
|
|||
"family": family,
|
||||
"subset": data["subset"],
|
||||
"families": families,
|
||||
"asset": data["asset"],
|
||||
"folderPath": data["folderPath"],
|
||||
"frameStart": time_data.start,
|
||||
"frameEnd": time_data.end,
|
||||
"handleStart": time_data.handle_start,
|
||||
|
|
@ -777,7 +777,7 @@ def create_skeleton_instance_cache(instance):
|
|||
|
||||
if data.get("extendFrames", False):
|
||||
time_data.start, time_data.end = extend_frames(
|
||||
data["asset"],
|
||||
data["folderPath"],
|
||||
data["subset"],
|
||||
time_data.start,
|
||||
time_data.end,
|
||||
|
|
@ -805,7 +805,7 @@ def create_skeleton_instance_cache(instance):
|
|||
"family": family,
|
||||
"subset": data["subset"],
|
||||
"families": families,
|
||||
"asset": data["asset"],
|
||||
"folderPath": data["folderPath"],
|
||||
"frameStart": time_data.start,
|
||||
"frameEnd": time_data.end,
|
||||
"handleStart": time_data.handle_start,
|
||||
|
|
@ -1011,7 +1011,7 @@ def copy_extend_frames(instance, representation):
|
|||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
instance.data.get("subset"),
|
||||
asset_name=instance.data.get("asset")
|
||||
asset_name=instance.data.get("folderPath")
|
||||
)
|
||||
|
||||
# get its files based on extension
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class RenderInstance(object):
|
|||
label = attr.ib() # label to show in GUI
|
||||
subset = attr.ib() # subset name
|
||||
task = attr.ib() # task name
|
||||
asset = attr.ib() # asset name
|
||||
folderPath = attr.ib() # folder path
|
||||
attachTo = attr.ib() # subset name to attach render to
|
||||
setMembers = attr.ib() # list of nodes/members producing render output
|
||||
publish = attr.ib() # bool, True to publish instance
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ Requires:
|
|||
context -> anatomyData
|
||||
context -> projectEntity
|
||||
context -> assetEntity
|
||||
instance -> asset
|
||||
instance -> folderPath
|
||||
instance -> subset
|
||||
instance -> family
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
instances_with_missing_asset_doc = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
instance_asset_doc = instance.data.get("assetEntity")
|
||||
_asset_name = instance.data["asset"]
|
||||
_asset_name = instance.data["folderPath"]
|
||||
|
||||
# There is possibility that assetEntity on instance is already set
|
||||
# which can happen in standalone publisher
|
||||
|
|
@ -296,7 +296,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
if hierarchy:
|
||||
parent_name = hierarchy.split("/")[-1]
|
||||
|
||||
asset_name = instance.data["asset"].split("/")[-1]
|
||||
asset_name = instance.data["folderPath"].split("/")[-1]
|
||||
anatomy_data.update({
|
||||
"asset": asset_name,
|
||||
"hierarchy": hierarchy,
|
||||
|
|
@ -337,7 +337,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
|
||||
# Try to find task data based on hierarchy context and asset name
|
||||
hierarchy_context = instance.context.data.get("hierarchyContext")
|
||||
asset_name = instance.data.get("asset")
|
||||
asset_name = instance.data.get("folderPath")
|
||||
if not hierarchy_context or not asset_name:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
# Add audio to instance if exists.
|
||||
instances_by_asset_name = collections.defaultdict(list)
|
||||
for instance in filtered_instances:
|
||||
asset_name = instance.data["asset"]
|
||||
asset_name = instance.data["folderPath"]
|
||||
instances_by_asset_name[asset_name].append(instance)
|
||||
|
||||
asset_names = set(instances_by_asset_name.keys())
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class CollectContextEntities(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
project_name = context.data["projectName"]
|
||||
asset_name = context.data["asset"]
|
||||
asset_name = context.data["folderPath"]
|
||||
task_name = context.data["task"]
|
||||
|
||||
project_entity = get_project(project_name)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Provides:
|
||||
context -> projectName (str)
|
||||
context -> asset (str)
|
||||
context -> folderPath (str)
|
||||
context -> task (str)
|
||||
"""
|
||||
|
||||
|
|
@ -21,7 +21,7 @@ class CollectCurrentContext(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
# Check if values are already set
|
||||
project_name = context.data.get("projectName")
|
||||
asset_name = context.data.get("asset")
|
||||
asset_name = context.data.get("folderPath")
|
||||
task_name = context.data.get("task")
|
||||
|
||||
current_context = get_current_context()
|
||||
|
|
@ -29,13 +29,12 @@ class CollectCurrentContext(pyblish.api.ContextPlugin):
|
|||
context.data["projectName"] = current_context["project_name"]
|
||||
|
||||
if not asset_name:
|
||||
context.data["asset"] = current_context["asset_name"]
|
||||
context.data["folderPath"] = current_context["asset_name"]
|
||||
|
||||
if not task_name:
|
||||
context.data["task"] = current_context["task_name"]
|
||||
|
||||
# QUESTION should we be explicit with keys? (the same on instances)
|
||||
# - 'asset' -> 'assetName'
|
||||
# - 'task' -> 'taskName'
|
||||
|
||||
self.log.info((
|
||||
|
|
@ -45,6 +44,6 @@ class CollectCurrentContext(pyblish.api.ContextPlugin):
|
|||
"Task: {task_name}"
|
||||
).format(
|
||||
project_name=context.data["projectName"],
|
||||
asset_name=context.data["asset"],
|
||||
asset_name=context.data["folderPath"],
|
||||
task_name=context.data["task"]
|
||||
))
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class CollectFramesFixDef(
|
|||
instance.data["frames_to_fix"] = frames_to_fix
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
asset_name = instance.data["asset"]
|
||||
asset_name = instance.data["folderPath"]
|
||||
|
||||
project_entity = instance.data["projectEntity"]
|
||||
project_name = project_entity["name"]
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
|
||||
for created_instance in create_context.instances:
|
||||
instance_data = created_instance.data_to_store()
|
||||
instance_data["asset"] = instance_data.pop("folderPath")
|
||||
if instance_data["active"]:
|
||||
thumbnail_path = thumbnail_paths_by_instance_id.get(
|
||||
created_instance.id
|
||||
|
|
@ -80,7 +79,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": in_data["asset"],
|
||||
"folderPath": in_data["folderPath"],
|
||||
"task": in_data["task"],
|
||||
"label": in_data.get("label") or subset,
|
||||
"name": subset,
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
"pixelAspect": instance.data["pixelAspect"]
|
||||
}
|
||||
# Split by '/' for AYON where asset is a path
|
||||
name = instance.data["asset"].split("/")[-1]
|
||||
name = instance.data["folderPath"].split("/")[-1]
|
||||
actual = {name: shot_data}
|
||||
|
||||
for parent in reversed(instance.data["parents"]):
|
||||
|
|
|
|||
|
|
@ -71,14 +71,19 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
# validate basic necessary data
|
||||
data_err = "invalid json file - missing data"
|
||||
required = ["asset", "user", "comment",
|
||||
required = ["user", "comment",
|
||||
"job", "instances", "version"]
|
||||
assert all(elem in data.keys() for elem in required), data_err
|
||||
if "folderPath" not in data and "asset" not in data:
|
||||
raise AssertionError(data_err)
|
||||
|
||||
if "folderPath" not in data:
|
||||
data["folderPath"] = data.pop("asset")
|
||||
|
||||
# set context by first json file
|
||||
ctx = self._context.data
|
||||
|
||||
ctx["asset"] = ctx.get("asset") or data.get("asset")
|
||||
ctx["folderPath"] = ctx.get("folderPath") or data.get("folderPath")
|
||||
ctx["intent"] = ctx.get("intent") or data.get("intent")
|
||||
ctx["comment"] = ctx.get("comment") or data.get("comment")
|
||||
ctx["user"] = ctx.get("user") or data.get("user")
|
||||
|
|
@ -87,7 +92,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
# basic sanity check to see if we are working in same context
|
||||
# if some other json file has different context, bail out.
|
||||
ctx_err = "inconsistent contexts in json files - %s"
|
||||
assert ctx.get("asset") == data.get("asset"), ctx_err % "asset"
|
||||
assert ctx.get("folderPath") == data.get("folderPath"), ctx_err % "folderPath"
|
||||
assert ctx.get("intent") == data.get("intent"), ctx_err % "intent"
|
||||
assert ctx.get("comment") == data.get("comment"), ctx_err % "comment"
|
||||
assert ctx.get("user") == data.get("user"), ctx_err % "user"
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
# Skip if instance asset does not match
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
instance_asset_name = instance.data.get("folderPath")
|
||||
instances_by_asset_name[instance_asset_name].append(instance)
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
|
|
@ -189,7 +189,7 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
active_folder_paths = set()
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is not False:
|
||||
active_folder_paths.add(instance.data.get("asset"))
|
||||
active_folder_paths.add(instance.data.get("folderPath"))
|
||||
|
||||
active_folder_paths.discard(None)
|
||||
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
|||
def add_audio_to_instances(self, audio_file, instances):
|
||||
created_files = []
|
||||
for inst in instances:
|
||||
name = inst.data["asset"]
|
||||
name = inst.data["folderPath"]
|
||||
|
||||
recycling_file = [f for f in created_files if name in f]
|
||||
|
||||
|
|
|
|||
|
|
@ -196,12 +196,13 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
|||
))
|
||||
|
||||
asset_entity = instance.data["assetEntity"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
thumbnail_info_by_entity_id[asset_entity["_id"]] = {
|
||||
"thumbnail_id": thumbnail_id,
|
||||
"entity_type": "asset",
|
||||
}
|
||||
self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format(
|
||||
asset_entity["name"], version_id
|
||||
self.log.debug("Setting thumbnail for folder \"{}\" <{}>".format(
|
||||
folder_path, version_id
|
||||
))
|
||||
|
||||
op_session = OperationsSession()
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
|
|||
def get_parents(self, context):
|
||||
return_dict = {}
|
||||
for instance in context:
|
||||
asset = instance.data["asset"]
|
||||
asset = instance.data["folderPath"]
|
||||
families = instance.data.get("families", []) + [
|
||||
instance.data["family"]
|
||||
]
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
# Ignore instance without asset data
|
||||
asset = instance.data.get("asset")
|
||||
asset = instance.data.get("folderPath")
|
||||
if asset is None:
|
||||
self.log.warning("Instance found without `asset` data: "
|
||||
"{}".format(instance.name))
|
||||
|
|
|
|||
|
|
@ -62,34 +62,13 @@ def _convert_general(ayon_settings, output, default_settings):
|
|||
}
|
||||
|
||||
|
||||
def _convert_royalrender_system_settings(
|
||||
ayon_settings, output, addon_versions, default_settings
|
||||
):
|
||||
enabled = addon_versions.get("royalrender") is not None
|
||||
rr_settings = default_settings["modules"]["royalrender"]
|
||||
rr_settings["enabled"] = enabled
|
||||
if enabled:
|
||||
ayon_royalrender = ayon_settings["royalrender"]
|
||||
rr_settings["rr_paths"] = {
|
||||
item["name"]: item["value"]
|
||||
for item in ayon_royalrender["rr_paths"]
|
||||
}
|
||||
output["modules"]["royalrender"] = rr_settings
|
||||
|
||||
|
||||
def _convert_modules_system(
|
||||
ayon_settings, output, addon_versions, default_settings
|
||||
):
|
||||
# TODO add all modules
|
||||
# TODO add 'enabled' values
|
||||
for func in (
|
||||
_convert_royalrender_system_settings,
|
||||
):
|
||||
func(ayon_settings, output, addon_versions, default_settings)
|
||||
|
||||
for key in {
|
||||
"timers_manager",
|
||||
"clockify",
|
||||
"royalrender",
|
||||
"deadline",
|
||||
}:
|
||||
if addon_versions.get(key):
|
||||
|
|
|
|||
|
|
@ -22,7 +22,8 @@
|
|||
"aov_separator": "underscore",
|
||||
"image_format": "exr",
|
||||
"multilayer_exr": true,
|
||||
"aov_list": [],
|
||||
"renderer": "CYCLES",
|
||||
"aov_list": ["combined"],
|
||||
"custom_passes": []
|
||||
},
|
||||
"workfile_builder": {
|
||||
|
|
|
|||
|
|
@ -23,6 +23,13 @@ def image_format_enum():
|
|||
]
|
||||
|
||||
|
||||
def renderers_enum():
|
||||
return [
|
||||
{"value": "CYCLES", "label": "Cycles"},
|
||||
{"value": "BLENDER_EEVEE", "label": "Eevee"},
|
||||
]
|
||||
|
||||
|
||||
def aov_list_enum():
|
||||
return [
|
||||
{"value": "empty", "label": "< none >"},
|
||||
|
|
@ -30,18 +37,52 @@ def aov_list_enum():
|
|||
{"value": "z", "label": "Z"},
|
||||
{"value": "mist", "label": "Mist"},
|
||||
{"value": "normal", "label": "Normal"},
|
||||
{"value": "diffuse_light", "label": "Diffuse Light"},
|
||||
{"value": "position", "label": "Position (Cycles Only)"},
|
||||
{"value": "vector", "label": "Vector (Cycles Only)"},
|
||||
{"value": "uv", "label": "UV (Cycles Only)"},
|
||||
{"value": "denoising", "label": "Denoising Data (Cycles Only)"},
|
||||
{"value": "object_index", "label": "Object Index (Cycles Only)"},
|
||||
{"value": "material_index", "label": "Material Index (Cycles Only)"},
|
||||
{"value": "sample_count", "label": "Sample Count (Cycles Only)"},
|
||||
{"value": "diffuse_light", "label": "Diffuse Light/Direct"},
|
||||
{
|
||||
"value": "diffuse_indirect",
|
||||
"label": "Diffuse Indirect (Cycles Only)"
|
||||
},
|
||||
{"value": "diffuse_color", "label": "Diffuse Color"},
|
||||
{"value": "specular_light", "label": "Specular Light"},
|
||||
{"value": "specular_color", "label": "Specular Color"},
|
||||
{"value": "volume_light", "label": "Volume Light"},
|
||||
{"value": "specular_light", "label": "Specular (Glossy) Light/Direct"},
|
||||
{
|
||||
"value": "specular_indirect",
|
||||
"label": "Specular (Glossy) Indirect (Cycles Only)"
|
||||
},
|
||||
{"value": "specular_color", "label": "Specular (Glossy) Color"},
|
||||
{
|
||||
"value": "transmission_light",
|
||||
"label": "Transmission Light/Direct (Cycles Only)"
|
||||
},
|
||||
{
|
||||
"value": "transmission_indirect",
|
||||
"label": "Transmission Indirect (Cycles Only)"
|
||||
},
|
||||
{
|
||||
"value": "transmission_color",
|
||||
"label": "Transmission Color (Cycles Only)"
|
||||
},
|
||||
{"value": "volume_light", "label": "Volume Light/Direct"},
|
||||
{"value": "volume_indirect", "label": "Volume Indirect (Cycles Only)"},
|
||||
{"value": "emission", "label": "Emission"},
|
||||
{"value": "environment", "label": "Environment"},
|
||||
{"value": "shadow", "label": "Shadow"},
|
||||
{"value": "shadow", "label": "Shadow/Shadow Catcher"},
|
||||
{"value": "ao", "label": "Ambient Occlusion"},
|
||||
{"value": "denoising", "label": "Denoising"},
|
||||
{"value": "volume_direct", "label": "Direct Volumetric Scattering"},
|
||||
{"value": "volume_indirect", "label": "Indirect Volumetric Scattering"}
|
||||
{"value": "bloom", "label": "Bloom (Eevee Only)"},
|
||||
{"value": "transparent", "label": "Transparent (Eevee Only)"},
|
||||
{"value": "cryptomatte_object", "label": "Cryptomatte Object"},
|
||||
{"value": "cryptomatte_material", "label": "Cryptomatte Material"},
|
||||
{"value": "cryptomatte_asset", "label": "Cryptomatte Asset"},
|
||||
{
|
||||
"value": "cryptomatte_accurate",
|
||||
"label": "Cryptomatte Accurate Mode (Eevee Only)"
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -81,6 +122,14 @@ class RenderSettingsModel(BaseSettingsModel):
|
|||
multilayer_exr: bool = SettingsField(
|
||||
title="Multilayer (EXR)"
|
||||
)
|
||||
renderer: str = SettingsField(
|
||||
"CYCLES",
|
||||
title="Renderer",
|
||||
enum_resolver=renderers_enum
|
||||
)
|
||||
compositing: bool = SettingsField(
|
||||
title="Enable Compositing"
|
||||
)
|
||||
aov_list: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
enum_resolver=aov_list_enum,
|
||||
|
|
@ -102,6 +151,8 @@ DEFAULT_RENDER_SETTINGS = {
|
|||
"aov_separator": "underscore",
|
||||
"image_format": "exr",
|
||||
"multilayer_exr": True,
|
||||
"aov_list": [],
|
||||
"renderer": "CYCLES",
|
||||
"compositing": True,
|
||||
"aov_list": ["combined"],
|
||||
"custom_passes": []
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.5"
|
||||
__version__ = "0.1.6"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue