Merge branch 'develop' into enhancement/OP-8268_AOV-filter-for-Ftrack-family

This commit is contained in:
Kayla Man 2024-02-22 17:23:47 +08:00 committed by GitHub
commit 5aeace284b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
166 changed files with 1243 additions and 1618 deletions

View file

@ -181,6 +181,10 @@ class HostDirmap(object):
exclude_locals=False,
cached=False)
# TODO implement
# Dirmap is dependent on 'get_site_local_overrides' which
# is not implemented in AYON. The mapping should be received
# from sitesync addon.
active_overrides = get_site_local_overrides(
project_name, active_site)
remote_overrides = get_site_local_overrides(

View file

@ -9,6 +9,8 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
AVALON_CONTAINER_ID,
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
AEPlaceholderLoadPlugin,
@ -142,7 +144,9 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
layers_meta = stub.get_metadata()
for instance in layers_meta:
if instance.get("id") == "pyblish.avalon.instance":
if instance.get("id") in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
instances.append(instance)
return instances

View file

@ -194,13 +194,13 @@ class RenderCreator(Creator):
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
`project_settings/global/tools/creator/product_name_profiles` with
some form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
If {composition} placeholder is not us 'product_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.

View file

@ -98,7 +98,7 @@ class CollectAERender(publish.AbstractCollectRender):
source=current_file,
label="{} - {}".format(subset_name, family),
subset=subset_name,
asset=inst.data["asset"],
folderPath=inst.data["folderPath"],
task=task_name,
attachTo=False,
setMembers='',
@ -175,7 +175,7 @@ class CollectAERender(publish.AbstractCollectRender):
version_str = "v{:03d}".format(render_instance.version)
if "#" not in file_name: # single frame (mov)W
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
render_instance.asset,
render_instance.folderPath,
render_instance.subset,
version_str,
ext
@ -184,7 +184,7 @@ class CollectAERender(publish.AbstractCollectRender):
else:
for frame in range(start, end + 1):
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
render_instance.asset,
render_instance.folderPath,
render_instance.subset,
version_str,
str(frame).zfill(self.padding_width),

View file

@ -50,11 +50,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
asset_entity = context.data["assetEntity"]
project_entity = context.data["projectEntity"]
asset_name = get_asset_name_identifier(asset_entity)
folder_path = get_asset_name_identifier(asset_entity)
instance_data = {
"active": True,
"asset": asset_name,
"folderPath": folder_path,
"task": task,
"frameStart": context.data['frameStart'],
"frameEnd": context.data['frameEnd'],

View file

@ -30,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
for instance in instances:
data = stub.read(instance[0])
data["asset"] = get_current_asset_name()
data["folderPath"] = get_current_asset_name()
stub.imprint(instance[0].instance_id, data)
@ -53,7 +53,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
def process(self, instance):
instance_asset = instance.data["asset"]
instance_asset = instance.data["folderPath"]
current_asset = get_current_asset_name()
msg = (
f"Instance asset {instance_asset} is not the same "

View file

@ -26,6 +26,7 @@ from ayon_core.pipeline import (
deregister_loader_plugin_path,
deregister_creator_plugin_path,
AVALON_CONTAINER_ID,
AYON_CONTAINER_ID,
)
from ayon_core.lib import (
Logger,
@ -563,8 +564,9 @@ def ls() -> Iterator:
called containers.
"""
for container in lib.lsattr("id", AVALON_CONTAINER_ID):
yield parse_container(container)
for id_type in {AYON_CONTAINER_ID, AVALON_CONTAINER_ID}:
for container in lib.lsattr("id", id_type):
yield parse_container(container)
def publish():

View file

@ -10,6 +10,8 @@ from ayon_core.pipeline import (
Creator,
CreatedInstance,
LoaderPlugin,
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
from ayon_core.lib import BoolDef
@ -193,7 +195,9 @@ class BaseCreator(Creator):
if not avalon_prop:
continue
if avalon_prop.get('id') != 'pyblish.avalon.instance':
if avalon_prop.get('id') not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
creator_id = avalon_prop.get('creator_identifier')
@ -352,7 +356,7 @@ class BaseCreator(Creator):
instance_data.update(
{
"id": "pyblish.avalon.instance",
"id": AVALON_INSTANCE_ID,
"creator_identifier": self.identifier,
"subset": subset_name,
}

View file

@ -47,6 +47,22 @@ def get_multilayer(settings):
["multilayer_exr"])
def get_renderer(settings):
"""Get renderer from blender settings."""
return (settings["blender"]
["RenderSettings"]
["renderer"])
def get_compositing(settings):
"""Get compositing from blender settings."""
return (settings["blender"]
["RenderSettings"]
["compositing"])
def get_render_product(output_path, name, aov_sep):
"""
Generate the path to the render product. Blender interprets the `#`
@ -91,66 +107,120 @@ def set_render_format(ext, multilayer):
image_settings.file_format = "TIFF"
def set_render_passes(settings):
aov_list = (settings["blender"]
["RenderSettings"]
["aov_list"])
custom_passes = (settings["blender"]
["RenderSettings"]
["custom_passes"])
def set_render_passes(settings, renderer):
aov_list = set(settings["blender"]["RenderSettings"]["aov_list"])
custom_passes = settings["blender"]["RenderSettings"]["custom_passes"]
# Common passes for both renderers
vl = bpy.context.view_layer
# Data Passes
vl.use_pass_combined = "combined" in aov_list
vl.use_pass_z = "z" in aov_list
vl.use_pass_mist = "mist" in aov_list
vl.use_pass_normal = "normal" in aov_list
# Light Passes
vl.use_pass_diffuse_direct = "diffuse_light" in aov_list
vl.use_pass_diffuse_color = "diffuse_color" in aov_list
vl.use_pass_glossy_direct = "specular_light" in aov_list
vl.use_pass_glossy_color = "specular_color" in aov_list
vl.eevee.use_pass_volume_direct = "volume_light" in aov_list
vl.use_pass_emit = "emission" in aov_list
vl.use_pass_environment = "environment" in aov_list
vl.use_pass_shadow = "shadow" in aov_list
vl.use_pass_ambient_occlusion = "ao" in aov_list
cycles = vl.cycles
# Cryptomatte Passes
vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list
vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list
vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list
cycles.denoising_store_passes = "denoising" in aov_list
cycles.use_pass_volume_direct = "volume_direct" in aov_list
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
if renderer == "BLENDER_EEVEE":
# Eevee exclusive passes
eevee = vl.eevee
# Light Passes
vl.use_pass_shadow = "shadow" in aov_list
eevee.use_pass_volume_direct = "volume_light" in aov_list
# Effects Passes
eevee.use_pass_bloom = "bloom" in aov_list
eevee.use_pass_transparent = "transparent" in aov_list
# Cryptomatte Passes
vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list
elif renderer == "CYCLES":
# Cycles exclusive passes
cycles = vl.cycles
# Data Passes
vl.use_pass_position = "position" in aov_list
vl.use_pass_vector = "vector" in aov_list
vl.use_pass_uv = "uv" in aov_list
cycles.denoising_store_passes = "denoising" in aov_list
vl.use_pass_object_index = "object_index" in aov_list
vl.use_pass_material_index = "material_index" in aov_list
cycles.pass_debug_sample_count = "sample_count" in aov_list
# Light Passes
vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list
vl.use_pass_glossy_indirect = "specular_indirect" in aov_list
vl.use_pass_transmission_direct = "transmission_direct" in aov_list
vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list
vl.use_pass_transmission_color = "transmission_color" in aov_list
cycles.use_pass_volume_direct = "volume_light" in aov_list
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
cycles.use_pass_shadow_catcher = "shadow" in aov_list
aovs_names = [aov.name for aov in vl.aovs]
for cp in custom_passes:
cp_name = cp[0]
cp_name = cp["attribute"]
if cp_name not in aovs_names:
aov = vl.aovs.add()
aov.name = cp_name
else:
aov = vl.aovs[cp_name]
aov.type = cp[1].get("type", "VALUE")
aov.type = cp["value"]
return aov_list, custom_passes
return list(aov_list), custom_passes
def set_node_tree(output_path, name, aov_sep, ext, multilayer):
def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path):
filename = f"{name}{aov_sep}{rpass_name}.####"
slot = slots.new(rpass_name if multi_exr else filename)
filepath = str(output_path / filename.lstrip("/"))
return slot, filepath
def set_node_tree(
output_path, render_product, name, aov_sep, ext, multilayer, compositing
):
# Set the scene to use the compositor node tree to render
bpy.context.scene.use_nodes = True
tree = bpy.context.scene.node_tree
# Get the Render Layers node
rl_node = None
comp_layer_type = "CompositorNodeRLayers"
output_type = "CompositorNodeOutputFile"
compositor_type = "CompositorNodeComposite"
# Get the Render Layer, Composite and the previous output nodes
render_layer_node = None
composite_node = None
old_output_node = None
for node in tree.nodes:
if node.bl_idname == "CompositorNodeRLayers":
rl_node = node
if node.bl_idname == comp_layer_type:
render_layer_node = node
elif node.bl_idname == compositor_type:
composite_node = node
elif node.bl_idname == output_type and "AYON" in node.name:
old_output_node = node
if render_layer_node and composite_node and old_output_node:
break
# If there's not a Render Layers node, we create it
if not rl_node:
rl_node = tree.nodes.new("CompositorNodeRLayers")
if not render_layer_node:
render_layer_node = tree.nodes.new(comp_layer_type)
# Get the enabled output sockets, that are the active passes for the
# render.
@ -158,48 +228,81 @@ def set_node_tree(output_path, name, aov_sep, ext, multilayer):
exclude_sockets = ["Image", "Alpha", "Noisy Image"]
passes = [
socket
for socket in rl_node.outputs
for socket in render_layer_node.outputs
if socket.enabled and socket.name not in exclude_sockets
]
# Remove all output nodes
for node in tree.nodes:
if node.bl_idname == "CompositorNodeOutputFile":
tree.nodes.remove(node)
# Create a new output node
output = tree.nodes.new("CompositorNodeOutputFile")
output = tree.nodes.new(output_type)
image_settings = bpy.context.scene.render.image_settings
output.format.file_format = image_settings.file_format
slots = None
# In case of a multilayer exr, we don't need to use the output node,
# because the blender render already outputs a multilayer exr.
if ext == "exr" and multilayer:
output.layer_slots.clear()
return []
multi_exr = ext == "exr" and multilayer
slots = output.layer_slots if multi_exr else output.file_slots
output.base_path = render_product if multi_exr else str(output_path)
output.file_slots.clear()
output.base_path = str(output_path)
slots.clear()
aov_file_products = []
old_links = {
link.from_socket.name: link for link in tree.links
if link.to_node == old_output_node}
# Create a new socket for the beauty output
pass_name = "rgba" if multi_exr else "beauty"
slot, _ = _create_aov_slot(
name, aov_sep, slots, pass_name, multi_exr, output_path)
tree.links.new(render_layer_node.outputs["Image"], slot)
if compositing:
# Create a new socket for the composite output
pass_name = "composite"
comp_socket, filepath = _create_aov_slot(
name, aov_sep, slots, pass_name, multi_exr, output_path)
aov_file_products.append(("Composite", filepath))
# For each active render pass, we add a new socket to the output node
# and link it
for render_pass in passes:
filepath = f"{name}{aov_sep}{render_pass.name}.####"
for rpass in passes:
slot, filepath = _create_aov_slot(
name, aov_sep, slots, rpass.name, multi_exr, output_path)
aov_file_products.append((rpass.name, filepath))
output.file_slots.new(filepath)
# If the rpass was not connected with the old output node, we connect
# it with the new one.
if not old_links.get(rpass.name):
tree.links.new(rpass, slot)
filename = str(output_path / filepath.lstrip("/"))
for link in list(old_links.values()):
# Check if the socket is still available in the new output node.
socket = output.inputs.get(link.to_socket.name)
# If it is, we connect it with the new output node.
if socket:
tree.links.new(link.from_socket, socket)
# Then, we remove the old link.
tree.links.remove(link)
aov_file_products.append((render_pass.name, filename))
# If there's a composite node, we connect its input with the new output
if compositing and composite_node:
for link in tree.links:
if link.to_node == composite_node:
tree.links.new(link.from_socket, comp_socket)
break
node_input = output.inputs[-1]
if old_output_node:
output.location = old_output_node.location
tree.nodes.remove(old_output_node)
tree.links.new(render_pass, node_input)
output.name = "AYON File Output"
output.label = "AYON File Output"
return aov_file_products
return [] if multi_exr else aov_file_products
def imprint_render_settings(node, data):
@ -228,17 +331,23 @@ def prepare_rendering(asset_group):
aov_sep = get_aov_separator(settings)
ext = get_image_format(settings)
multilayer = get_multilayer(settings)
renderer = get_renderer(settings)
compositing = get_compositing(settings)
set_render_format(ext, multilayer)
aov_list, custom_passes = set_render_passes(settings)
bpy.context.scene.render.engine = renderer
aov_list, custom_passes = set_render_passes(settings, renderer)
output_path = Path.joinpath(dirpath, render_folder, file_name)
render_product = get_render_product(output_path, name, aov_sep)
aov_file_product = set_node_tree(
output_path, name, aov_sep, ext, multilayer)
output_path, render_product, name, aov_sep,
ext, multilayer, compositing)
bpy.context.scene.render.filepath = render_product
# Clear the render filepath, so that the output is handled only by the
# output node in the compositor.
bpy.context.scene.render.filepath = ""
render_settings = {
"render_folder": render_folder,

View file

@ -22,7 +22,9 @@ class CreateAction(plugin.BaseCreator):
)
# Get instance name
name = plugin.prepare_scene_name(instance_data["asset"], subset_name)
name = plugin.prepare_scene_name(
instance_data["folderPath"], subset_name
)
if pre_create_data.get("use_selection"):
for obj in lib.get_selection():

View file

@ -1,8 +1,10 @@
"""Create render."""
import bpy
from ayon_core.lib import version_up
from ayon_core.hosts.blender.api import plugin
from ayon_core.hosts.blender.api.render_lib import prepare_rendering
from ayon_core.hosts.blender.api.workio import save_file
class CreateRenderlayer(plugin.BaseCreator):
@ -37,6 +39,7 @@ class CreateRenderlayer(plugin.BaseCreator):
# settings. Even the validator to check that the file is saved will
# detect the file as saved, even if it isn't. The only solution for
# now it is to force the file to be saved.
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
filepath = version_up(bpy.data.filepath)
save_file(filepath, copy=False)
return collection

View file

@ -28,15 +28,27 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
def process(self, instance):
if not self.is_active(instance.data):
return
tree = bpy.context.scene.node_tree
output_type = "CompositorNodeOutputFile"
output_node = None
# Remove all output nodes that inlcude "AYON" in the name.
# There should be only one.
for node in tree.nodes:
if node.bl_idname == output_type and "AYON" in node.name:
output_node = node
break
if not output_node:
raise PublishValidationError(
"No output node found in the compositor tree."
)
filepath = bpy.data.filepath
file = os.path.basename(filepath)
filename, ext = os.path.splitext(file)
if filename not in bpy.context.scene.render.filepath:
if filename not in output_node.base_path:
raise PublishValidationError(
"Render output folder "
"doesn't match the blender scene name! "
"Use Repair action to "
"fix the folder file path."
"Render output folder doesn't match the blender scene name! "
"Use Repair action to fix the folder file path."
)
@classmethod

View file

@ -22,7 +22,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
asset_name = get_asset_name_identifier(asset_entity)
shared_instance_data = {
"asset": asset_name,
"folderPath": asset_name,
"frameStart": asset_entity["data"]["frameStart"],
"frameEnd": asset_entity["data"]["frameEnd"],
"handleStart": asset_entity["data"]["handleStart"],

View file

@ -3,6 +3,7 @@ from types import NoneType
import pyblish
import ayon_core.hosts.flame.api as opfapi
from ayon_core.hosts.flame.otio import flame_export
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
from ayon_core.pipeline.editorial import (
is_overlapping_otio_ranges,
get_media_range_with_retimes
@ -47,7 +48,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
if not marker_data:
continue
if marker_data.get("id") != "pyblish.avalon.instance":
if marker_data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
self.log.debug("__ segment.name: {}".format(

View file

@ -34,7 +34,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
project_settings=context.data["project_settings"]
)
asset_name = get_asset_name_identifier(asset_doc)
folder_path = get_asset_name_identifier(asset_doc)
# adding otio timeline to context
with opfapi.maintained_segment_selection(sequence) as selected_seg:
@ -42,7 +42,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
instance_data = {
"name": subset_name,
"asset": asset_name,
"folderPath": folder_path,
"subset": subset_name,
"family": "workfile",
"families": []

View file

@ -55,7 +55,7 @@ class ExtractProductResources(publish.Extractor):
# flame objects
segment = instance.data["item"]
asset_name = instance.data["asset"]
folder_path = instance.data["folderPath"]
segment_name = segment.name.get_value()
clip_path = instance.data["path"]
sequence_clip = instance.context.data["flameSequence"]
@ -249,7 +249,7 @@ class ExtractProductResources(publish.Extractor):
out_mark = in_mark + source_duration_handles
exporting_clip = self.import_clip(clip_path)
exporting_clip.name.set_value("{}_{}".format(
asset_name, segment_name))
folder_path, segment_name))
# add xml tags modifications
modify_xml_data.update({

View file

@ -168,10 +168,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
handle_start = instance.data["handleStart"]
handle_end = instance.data["handleEnd"]
frame_duration = (frame_end - frame_start) + 1
asset_name = instance.data["asset"]
folder_path = instance.data["folderPath"]
task_name = task_data["name"]
batchgroup_name = "{}_{}".format(asset_name, task_name)
batchgroup_name = "{}_{}".format(folder_path, task_name)
batch_data = {
"shematic_reels": [

View file

@ -12,7 +12,9 @@ from ayon_core.lib import (
)
from ayon_core.pipeline import (
Creator,
CreatedInstance
CreatedInstance,
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
@ -172,13 +174,13 @@ class GenericCreateSaver(Creator):
if not isinstance(data, dict):
return
required = {
"id": "pyblish.avalon.instance",
"creator_identifier": self.identifier,
}
for key, value in required.items():
if key not in data or data[key] != value:
return
if (
data.get("creator_identifier") != self.identifier
or data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}
):
return
# Get active state from the actual tool state
attrs = tool.GetAttrs()

View file

@ -68,7 +68,7 @@ class CollectFusionRender(
source=current_file,
label=inst.data["label"],
subset=subset_name,
asset=inst.data["asset"],
folderPath=inst.data["folderPath"],
task=task_name,
attachTo=False,
setMembers='',

View file

@ -72,7 +72,7 @@ class FusionRenderLocal(
self.log.info(
"Rendered '{nm}' for asset '{ast}' under the task '{tsk}'".format(
nm=instance.data["name"],
ast=instance.data["asset"],
ast=instance.data["folderPath"],
tsk=instance.data["task"],
)
)

View file

@ -21,7 +21,9 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin):
# Collect instances per subset per asset
instances_per_subset_asset = defaultdict(lambda: defaultdict(list))
for instance in context:
asset = instance.data.get("asset", context.data.get("asset"))
asset = instance.data.get(
"folderPath", context.data.get("folderPath")
)
subset = instance.data.get("subset", context.data.get("subset"))
instances_per_subset_asset[asset][subset].append(instance)

View file

@ -212,6 +212,7 @@ class CreateComposite(harmony.Creator):
The creator plugin can be configured to use other node types. For example here is a write node creator:
```python
from uuid import uuid4
import ayon_core.hosts.harmony.api as harmony
@ -242,6 +243,7 @@ class CreateRender(harmony.Creator):
#### Collector Plugin
```python
import pyblish.api
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
import ayon_core.hosts.harmony.api as harmony
@ -252,7 +254,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
a composite node and marked with a unique identifier;
Identifier:
id (str): "pyblish.avalon.instance"
id (str): "ayon.create.instance"
"""
label = "Instances"
@ -272,7 +274,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
continue
# Skip containers.
if "container" in data["id"]:
if data["id"] not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}:
continue
instance = context.create_instance(node.split("/")[-1])
@ -287,6 +289,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
#### Extractor Plugin
```python
import os
from uuid import uuid4
import pyblish.api
import ayon_core.hosts.harmony.api as harmony
@ -418,6 +421,7 @@ class ExtractImage(pyblish.api.InstancePlugin):
#### Loader Plugin
```python
import os
from uuid import uuid4
import ayon_core.hosts.harmony.api as harmony

View file

@ -98,7 +98,7 @@ class CollectFarmRender(publish.AbstractCollectRender):
self_name = self.__class__.__name__
asset_name = context.data["asset"]
folder_path = context.data["folderPath"]
for node in context.data["allNodes"]:
data = harmony.read(node)
@ -142,7 +142,7 @@ class CollectFarmRender(publish.AbstractCollectRender):
source=context.data["currentFile"],
label=node.split("/")[1],
subset=subset_name,
asset=asset_name,
folderPath=folder_path,
task=task_name,
attachTo=False,
setMembers=[node],

View file

@ -13,7 +13,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
a composite node and marked with a unique identifier.
Identifier:
id (str): "pyblish.avalon.instance"
id (str): "ayon.create.instance"
"""
label = "Instances"

View file

@ -31,7 +31,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
if (not any([re.search(pattern, task_name)
for pattern in self.allowed_tasks])):
return
asset_name = context.data["asset"]
folder_path = context.data["folderPath"]
for name, id in palettes.items():
instance = context.create_instance(name)
@ -39,7 +39,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
"id": id,
"family": "harmony.palette",
'families': [],
"asset": asset_name,
"folderPath": folder_path,
"subset": "{}{}".format("palette", name)
})
self.log.info(

View file

@ -36,5 +36,5 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"family": family,
"families": [family],
"representations": [],
"asset": context.data["asset"]
"folderPath": context.data["folderPath"]
})

View file

@ -27,9 +27,10 @@ class ValidateInstanceRepair(pyblish.api.Action):
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
folder_path = get_current_asset_name()
for instance in instances:
data = harmony.read(instance.data["setMembers"][0])
data["asset"] = get_current_asset_name()
data["folderPath"] = folder_path
harmony.imprint(instance.data["setMembers"][0], data)
@ -42,7 +43,7 @@ class ValidateInstance(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
def process(self, instance):
instance_asset = instance.data["asset"]
instance_asset = instance.data["folderPath"]
current_asset = get_current_asset_name()
msg = (
"Instance asset is not the same as current asset:"

View file

@ -22,7 +22,12 @@ except ImportError:
from ayon_core.client import get_project
from ayon_core.settings import get_project_settings
from ayon_core.pipeline import Anatomy, get_current_project_name
from ayon_core.pipeline import (
Anatomy,
get_current_project_name,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.pipeline.load import filter_containers
from ayon_core.lib import Logger
from . import tags
@ -1217,7 +1222,9 @@ def sync_clip_name_to_data_asset(track_items_list):
# ignore if no data on the clip or not publish instance
if not data:
continue
if data.get("id") != "pyblish.avalon.instance":
if data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
# fix data if wrong name

View file

@ -15,6 +15,7 @@ from ayon_core.pipeline import (
deregister_creator_plugin_path,
deregister_loader_plugin_path,
AVALON_CONTAINER_ID,
AYON_CONTAINER_ID,
)
from ayon_core.tools.utils import host_tools
from . import lib, menu, events
@ -158,7 +159,9 @@ def parse_container(item, validate=True):
def data_to_container(item, data):
if (
not data
or data.get("id") != "pyblish.avalon.container"
or data.get("id") not in {
AYON_CONTAINER_ID, AVALON_CONTAINER_ID
}
):
return

View file

@ -118,9 +118,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
data["subset"] = name
data["family"] = family
data["families"] = [family]
data["name"] = data["subset"] + "_" + data["asset"]
data["name"] = data["subset"] + "_" + data["folderPath"]
data["label"] = "{} - {}".format(
data['asset'], data["subset"]
data["folderPath"], data["subset"]
)
data["effects"] = effects

View file

@ -102,7 +102,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
# first collect all available subset tag frames
subset_data = {}
context_asset_doc = context.data["assetEntity"]
context_asset_name = get_asset_name_identifier(context_asset_doc)
context_folder_path = get_asset_name_identifier(context_asset_doc)
for tag_data in sequence_tags:
frame = int(tag_data["start"])
@ -120,7 +120,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
subset_data[subset] = {
"frames": [frame],
"format": tag_data["format"],
"asset": context_asset_name
"folderPath": context_folder_path
}
return subset_data
@ -133,7 +133,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin):
"label": "{} {}".format(name, subset_data["frames"]),
"family": "image",
"families": ["frame"],
"asset": subset_data["asset"],
"folderPath": subset_data["folderPath"],
"subset": name,
"format": subset_data["format"],
"frames": subset_data["frames"]

View file

@ -57,7 +57,7 @@ class ExtractClipEffects(publish.Extractor):
"sourceStart", "sourceStartH", "sourceEnd", "sourceEndH",
"frameStart", "frameEnd",
"clipIn", "clipOut", "clipInH", "clipOutH",
"asset", "version"
"folderPath", "version"
]
# pass data to version

View file

@ -1,5 +1,6 @@
import pyblish
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
from ayon_core.pipeline.editorial import is_overlapping_otio_ranges
from ayon_core.hosts.hiero import api as phiero
@ -56,7 +57,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not tag_data:
continue
if tag_data.get("id") != "pyblish.avalon.instance":
if tag_data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
# get clips subtracks and anotations
@ -98,7 +101,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
data.update({
"name": "{}_{}".format(asset, subset),
"label": label,
"asset": asset,
"folderPath": asset,
"asset_name": asset_name,
"item": track_item,
"families": families,
@ -189,7 +192,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not hierarchy_data:
return
asset = data["asset"]
asset = data["folderPath"]
asset_name = data["asset_name"]
# insert family into families
@ -241,7 +244,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not master_layer:
return
asset = data.get("asset")
item = data.get("item")
clip_name = item.name()
@ -249,7 +251,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not self.test_any_audio(item):
return
asset = data["asset"]
asset = data["folderPath"]
asset_name = data["asset_name"]
# insert family into families

View file

@ -17,7 +17,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.491
def process(self, context):
asset = context.data["asset"]
asset = context.data["folderPath"]
asset_name = asset.split("/")[-1]
active_timeline = hiero.ui.activeSequence()
@ -64,7 +64,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
"label": "{} - {}Main".format(
asset, family),
"name": "{}_{}".format(asset_name, family),
"asset": context.data["asset"],
"folderPath": context.data["folderPath"],
# TODO use 'get_subset_name'
"subset": "{}{}Main".format(asset_name, family.capitalize()),
"item": project,

View file

@ -15,6 +15,7 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_inventory_action_path,
AVALON_CONTAINER_ID,
AYON_CONTAINER_ID,
)
from ayon_core.pipeline.load import any_outdated_containers
from ayon_core.hosts.houdini import HOUDINI_HOST_DIR
@ -271,8 +272,11 @@ def parse_container(container):
def ls():
containers = []
for identifier in (AVALON_CONTAINER_ID,
"pyblish.mindbender.container"):
for identifier in (
AYON_CONTAINER_ID,
AVALON_CONTAINER_ID,
"pyblish.mindbender.container"
):
containers += lib.lsattr("id", identifier)
for container in sorted(containers,

View file

@ -11,7 +11,9 @@ from ayon_core.pipeline import (
CreatorError,
LegacyCreator,
Creator as NewCreator,
CreatedInstance
CreatedInstance,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.lib import BoolDef
from .lib import imprint, read, lsattr, add_self_publish_button
@ -118,7 +120,10 @@ class HoudiniCreatorBase(object):
cache = dict()
cache_legacy = dict()
for node in lsattr("id", "pyblish.avalon.instance"):
nodes = []
for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]:
nodes.extend(lsattr("id", id_type))
for node in nodes:
creator_identifier_parm = node.parm("creator_identifier")
if creator_identifier_parm:

View file

@ -2,6 +2,7 @@ import hou
import pyblish.api
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
from ayon_core.hosts.houdini.api import lib
@ -12,7 +13,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
an specific node and marked with a unique identifier;
Identifier:
id (str): "pyblish.avalon.instance
id (str): "ayon.create.instance"
Specific node:
The specific node is important because it dictates in which way the
@ -44,7 +45,9 @@ class CollectInstances(pyblish.api.ContextPlugin):
if not node.parm("id"):
continue
if node.evalParm("id") != "pyblish.avalon.instance":
if node.evalParm("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
# instance was created by new creator code, skip it as
@ -72,7 +75,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Create nice name if the instance has a frame range.
label = data.get("name", node.name())
label += " (%s)" % data["asset"] # include asset in name
label += " (%s)" % data["folderPath"] # include folder in name
instance = context.create_instance(label)

View file

@ -2,7 +2,7 @@ import hou
import pyblish.api
from ayon_core.hosts.houdini.api import lib
import ayon_core.hosts.houdini.api.usd as hou_usdlib
import ayon_core.lib.usdlib as usdlib
from ayon_core.pipeline import usdlib
class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):
@ -12,7 +12,7 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):
layers remain set to 'publish' by the user.
This works differently from most of our Avalon instances in the pipeline.
As opposed to storing `pyblish.avalon.instance` as id on the node we store
As opposed to storing `ayon.create.instance` as id on the node we store
`pyblish.avalon.usdlayered`.
Additionally this instance has no need for storing family, asset, subset

View file

@ -5,7 +5,7 @@ from ayon_core.client import (
get_asset_by_name,
get_asset_name_identifier,
)
import ayon_core.lib.usdlib as usdlib
from ayon_core.pipeline import usdlib
class CollectUsdBootstrap(pyblish.api.InstancePlugin):
@ -55,7 +55,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
self.log.debug("Add bootstrap for: %s" % bootstrap)
project_name = instance.context.data["projectName"]
asset_name = instance.data["asset"]
asset_name = instance.data["folderPath"]
asset_doc = get_asset_by_name(project_name, asset_name)
assert asset_doc, "Asset must exist: %s" % asset_name
@ -95,7 +95,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
new.data["optional"] = False
# Copy some data from the instance for which we bootstrap
for key in ["asset"]:
for key in ["folderPath"]:
new.data[key] = instance.data[key]
def _subset_exists(self, project_name, instance, subset_name, asset_doc):
@ -107,7 +107,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
for inst in context:
if (
inst.data["subset"] == subset_name
and inst.data["asset"] == asset_doc_name
and inst.data["folderPath"] == asset_doc_name
):
return True

View file

@ -55,7 +55,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
layer_inst.data["families"] = [family]
layer_inst.data["subset"] = "__stub__"
layer_inst.data["label"] = label
layer_inst.data["asset"] = instance.data["asset"]
layer_inst.data["folderPath"] = instance.data["folderPath"]
layer_inst.data["instance_node"] = instance.data["instance_node"]
# include same USD ROP
layer_inst.append(rop_node)

View file

@ -285,7 +285,7 @@ class ExtractUSDLayered(publish.Extractor):
# to detect whether we should make this into a new publish
# version. If not, skip it.
asset = get_asset_by_name(
project_name, dependency.data["asset"], fields=["_id"]
project_name, dependency.data["folderPath"], fields=["_id"]
)
subset = get_subset_by_name(
project_name,

View file

@ -18,7 +18,7 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
def process(self, instance):
project_name = instance.context.data["projectName"]
asset_name = instance.data["asset"]
asset_name = instance.data["folderPath"]
subset = instance.data["subset"]
# Assume shading variation starts after a dot separator

View file

@ -12,6 +12,7 @@ from ayon_core.pipeline import (
register_creator_plugin_path,
register_loader_plugin_path,
AVALON_CONTAINER_ID,
AYON_CONTAINER_ID,
)
from ayon_core.hosts.max.api.menu import OpenPypeMenu
from ayon_core.hosts.max.api import lib
@ -151,7 +152,9 @@ def ls() -> list:
objs = rt.objects
containers = [
obj for obj in objs
if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID
if rt.getUserProp(obj, "id") in {
AYON_CONTAINER_ID, AVALON_CONTAINER_ID
}
]
for container in sorted(containers, key=attrgetter("name")):

View file

@ -6,7 +6,13 @@ import six
from pymxs import runtime as rt
from ayon_core.lib import BoolDef
from ayon_core.pipeline import CreatedInstance, Creator, CreatorError
from ayon_core.pipeline import (
CreatedInstance,
Creator,
CreatorError,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from .lib import imprint, lsattr, read
@ -162,7 +168,11 @@ class MaxCreatorBase(object):
return shared_data
shared_data["max_cached_subsets"] = {}
cached_instances = lsattr("id", "pyblish.avalon.instance")
cached_instances = []
for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]:
cached_instances.extend(lsattr("id", id_type))
for i in cached_instances:
creator_id = rt.GetUserProp(i, "creator_identifier")
if creator_id not in shared_data["max_cached_subsets"]:

View file

@ -0,0 +1,109 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating workfiles."""
from ayon_core.pipeline import CreatedInstance, AutoCreator
from ayon_core.client import get_asset_by_name, get_asset_name_identifier
from ayon_core.hosts.max.api import plugin
from ayon_core.hosts.max.api.lib import read, imprint
from pymxs import runtime as rt
class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator):
"""Workfile auto-creator."""
identifier = "io.ayon.creators.max.workfile"
label = "Workfile"
family = "workfile"
icon = "fa5.file"
default_variant = "Main"
def create(self):
variant = self.default_variant
current_instance = next(
(
instance for instance in self.create_context.instances
if instance.creator_identifier == self.identifier
), None)
project_name = self.project_name
asset_name = self.create_context.get_current_asset_name()
task_name = self.create_context.get_current_task_name()
host_name = self.create_context.host_name
if current_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
data = {
"folderPath": asset_name,
"task": task_name,
"variant": variant
}
data.update(
self.get_dynamic_data(
variant, task_name, asset_doc,
project_name, host_name, current_instance)
)
self.log.info("Auto-creating workfile instance...")
instance_node = self.create_node(subset_name)
data["instance_node"] = instance_node.name
current_instance = CreatedInstance(
self.family, subset_name, data, self
)
self._add_instance_to_context(current_instance)
imprint(instance_node.name, current_instance.data)
elif (
current_instance["folderPath"] != asset_name
or current_instance["task"] != task_name
):
# Update instance context if is not the same
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
asset_name = get_asset_name_identifier(asset_doc)
current_instance["folderPath"] = asset_name
current_instance["task"] = task_name
current_instance["subset"] = subset_name
def collect_instances(self):
self.cache_subsets(self.collection_shared_data)
for instance in self.collection_shared_data["max_cached_subsets"].get(self.identifier, []): # noqa
if not rt.getNodeByName(instance):
continue
created_instance = CreatedInstance.from_existing(
read(rt.GetNodeByName(instance)), self
)
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _ in update_list:
instance_node = created_inst.get("instance_node")
imprint(
instance_node,
created_inst.data_to_store()
)
def remove_instances(self, instances):
"""Remove specified instance from the scene.
This is only removing `id` parameter so instance is no longer
instance, because it might contain valuable data for artist.
"""
for instance in instances:
instance_node = rt.GetNodeByName(
instance.data.get("instance_node"))
if instance_node:
rt.Delete(instance_node)
self._remove_instance_from_context(instance)
def create_node(self, subset_name):
if rt.getNodeByName(subset_name):
node = rt.getNodeByName(subset_name)
return node
node = rt.Container(name=subset_name)
node.isHidden = True
return node

View file

@ -0,0 +1,23 @@
import os
import pyblish.api
from pymxs import runtime as rt
class CollectCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file."""
order = pyblish.api.CollectorOrder - 0.5
label = "Max Current File"
hosts = ['max']
def process(self, context):
"""Inject the current working file"""
folder = rt.maxFilePath
file = rt.maxFileName
if not folder or not file:
self.log.error("Scene is not saved.")
current_file = os.path.join(folder, file)
context.data["currentFile"] = current_file
self.log.debug("Scene path: {}".format(current_file))

View file

@ -12,7 +12,9 @@ class CollectMembers(pyblish.api.InstancePlugin):
hosts = ['max']
def process(self, instance):
if instance.data["family"] == "workfile":
self.log.debug("Skipping Collecting Members for workfile family.")
return
if instance.data.get("instance_node"):
container = rt.GetNodeByName(instance.data["instance_node"])
instance.data["members"] = [

View file

@ -94,7 +94,7 @@ class CollectRender(pyblish.api.InstancePlugin):
renderer = str(renderer_class).split(":")[0]
# also need to get the render dir for conversion
data = {
"asset": instance.data["asset"],
"folderPath": instance.data["folderPath"],
"subset": str(instance.name),
"publish": True,
"maxversion": str(get_max_version()),

View file

@ -6,57 +6,41 @@ import pyblish.api
from pymxs import runtime as rt
class CollectWorkfile(pyblish.api.ContextPlugin):
class CollectWorkfile(pyblish.api.InstancePlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.01
label = "Collect 3dsmax Workfile"
hosts = ['max']
families = ["workfile"]
def process(self, context):
def process(self, instance):
"""Inject the current working file."""
context = instance.context
folder = rt.maxFilePath
file = rt.maxFileName
if not folder or not file:
self.log.error("Scene is not saved.")
current_file = os.path.join(folder, file)
context.data['currentFile'] = current_file
filename, ext = os.path.splitext(file)
task = context.data["task"]
ext = os.path.splitext(file)[-1].lstrip(".")
data = {}
# create instance
instance = context.create_instance(name=filename)
subset = 'workfile' + task.capitalize()
data.update({
"subset": subset,
"asset": context.data["asset"],
"label": subset,
"publish": True,
"family": 'workfile',
"families": ['workfile'],
"setMembers": [current_file],
"frameStart": context.data['frameStart'],
"frameEnd": context.data['frameEnd'],
"handleStart": context.data['handleStart'],
"handleEnd": context.data['handleEnd']
"setMembers": context.data["currentFile"],
"frameStart": context.data["frameStart"],
"frameEnd": context.data["frameEnd"],
"handleStart": context.data["handleStart"],
"handleEnd": context.data["handleEnd"]
})
data['representations'] = [{
'name': ext.lstrip("."),
'ext': ext.lstrip("."),
'files': file,
data["representations"] = [{
"name": ext,
"ext": ext,
"files": file,
"stagingDir": folder,
}]
instance.data.update(data)
self.log.info('Collected instance: {}'.format(file))
self.log.info('Scene path: {}'.format(current_file))
self.log.info('staging Dir: {}'.format(folder))
self.log.info('subset: {}'.format(subset))
self.log.debug("Collected data: {}".format(data))
self.log.debug("Collected instance: {}".format(file))
self.log.debug("staging Dir: {}".format(folder))

View file

@ -15,7 +15,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
receive new UUIDs are actually invalid.
Requires:
- instance.data["asset"]
- instance.data["folderPath"]
"""
@ -78,7 +78,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
# should be always available, but kept a way to query it by name.
asset_doc = instance.data.get("assetEntity")
if not asset_doc:
asset_name = instance.data["asset"]
asset_name = instance.data["folderPath"]
project_name = instance.context.data["projectName"]
self.log.info((
"Asset is not stored on instance."

View file

@ -38,25 +38,6 @@ class ToolWindows:
cls._windows[tool] = window
def edit_shader_definitions():
from qtpy import QtWidgets
from ayon_core.hosts.maya.api.shader_definition_editor import (
ShaderDefinitionsEditor
)
from ayon_core.tools.utils import qt_app_context
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
main_window = next(widget for widget in top_level_widgets
if widget.objectName() == "MayaWindow")
with qt_app_context():
window = ToolWindows.get_window("shader_definition_editor")
if not window:
window = ShaderDefinitionsEditor(parent=main_window)
ToolWindows.set_window("shader_definition_editor", window)
window.show()
def _resolution_from_document(doc):
if not doc or "data" not in doc:
print("Entered document is not valid. \"{}\"".format(str(doc)))

View file

@ -24,7 +24,8 @@ from ayon_core.client import (
get_asset_by_name,
get_subsets,
get_last_versions,
get_representation_by_name
get_representation_by_name,
get_asset_name_identifier,
)
from ayon_core.settings import get_project_settings
from ayon_core.pipeline import (
@ -35,7 +36,11 @@ from ayon_core.pipeline import (
loaders_from_representation,
get_representation_path,
load_container,
registered_host
registered_host,
AVALON_CONTAINER_ID,
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
AYON_CONTAINER_ID,
)
from ayon_core.lib import NumberDef
from ayon_core.pipeline.context_tools import get_current_project_asset
@ -2100,7 +2105,7 @@ def get_related_sets(node):
"""Return objectSets that are relationships for a look for `node`.
Filters out based on:
- id attribute is NOT `pyblish.avalon.container`
- id attribute is NOT `AVALON_CONTAINER_ID`
- shapes and deformer shapes (alembic creates meshShapeDeformed)
- set name ends with any from a predefined list
- set in not in viewport set (isolate selected for example)
@ -2120,7 +2125,12 @@ def get_related_sets(node):
defaults = {"defaultLightSet", "defaultObjectSet"}
# Ids to ignore
ignored = {"pyblish.avalon.instance", "pyblish.avalon.container"}
ignored = {
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
AYON_INSTANCE_ID,
AYON_CONTAINER_ID,
}
view_sets = get_isolate_view_sets()
@ -3143,21 +3153,27 @@ def fix_incompatible_containers():
def update_content_on_context_change():
"""
This will update scene content to match new asset on context change
This will update scene content to match new folder on context change
"""
scene_sets = cmds.listSets(allSets=True)
asset_doc = get_current_project_asset()
new_asset = asset_doc["name"]
new_folder_path = get_asset_name_identifier(asset_doc)
new_data = asset_doc["data"]
for s in scene_sets:
try:
if cmds.getAttr("{}.id".format(s)) == "pyblish.avalon.instance":
if cmds.getAttr("{}.id".format(s)) in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
attr = cmds.listAttr(s)
print(s)
if "asset" in attr:
print(" - setting asset to: [ {} ]".format(new_asset))
cmds.setAttr("{}.asset".format(s),
new_asset, type="string")
if "folderPath" in attr:
print(
" - setting folder to: [ {} ]".format(new_folder_path)
)
cmds.setAttr(
"{}.folderPath".format(s),
new_folder_path, type="string"
)
if "frameStart" in attr:
cmds.setAttr("{}.frameStart".format(s),
new_data["frameStart"])

View file

@ -33,6 +33,7 @@ from ayon_core.pipeline import (
deregister_loader_plugin_path,
deregister_inventory_action_path,
deregister_creator_plugin_path,
AYON_CONTAINER_ID,
AVALON_CONTAINER_ID,
)
from ayon_core.pipeline.load import any_outdated_containers
@ -376,9 +377,11 @@ def _ls():
yield iterator.thisNode()
iterator.next()
ids = {AVALON_CONTAINER_ID,
# Backwards compatibility
"pyblish.mindbender.container"}
ids = {
AYON_CONTAINER_ID,
# Backwards compatibility
AVALON_CONTAINER_ID
}
# Iterate over all 'set' nodes in the scene to detect whether
# they have the avalon container ".id" attribute.

View file

@ -10,6 +10,9 @@ from maya.app.renderSetup.model import renderSetup
from ayon_core.lib import BoolDef, Logger
from ayon_core.settings import get_project_settings
from ayon_core.pipeline import (
AYON_INSTANCE_ID,
AYON_CONTAINER_ID,
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
Anatomy,
@ -110,7 +113,9 @@ class MayaCreatorBase(object):
for node in cmds.ls(type="objectSet"):
if _get_attr(node, attr="id") != "pyblish.avalon.instance":
if _get_attr(node, attr="id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
creator_id = _get_attr(node, attr="creator_identifier")
@ -992,5 +997,7 @@ class ReferenceLoader(Loader):
id_attr = "{}.id".format(node)
if not cmds.attributeQuery("id", node=node, exists=True):
continue
if cmds.getAttr(id_attr) == AVALON_CONTAINER_ID:
if cmds.getAttr(id_attr) not in {
AYON_CONTAINER_ID, AVALON_CONTAINER_ID
}:
cmds.sets(node, forceElement=container)

View file

@ -1,176 +0,0 @@
# -*- coding: utf-8 -*-
"""Editor for shader definitions.
Shader names are stored as simple text file over GridFS in mongodb.
"""
import os
from qtpy import QtWidgets, QtCore, QtGui
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core import resources
import gridfs
DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format(
os.getenv("AYON_PROJECT_NAME"))
class ShaderDefinitionsEditor(QtWidgets.QWidget):
"""Widget serving as simple editor for shader name definitions."""
# name of the file used to store definitions
def __init__(self, parent=None):
super(ShaderDefinitionsEditor, self).__init__(parent)
self._mongo = OpenPypeMongoConnection.get_mongo_client()
self._gridfs = gridfs.GridFS(
self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")])
self._editor = None
self._original_content = self._read_definition_file()
self.setObjectName("shaderDefinitionEditor")
self.setWindowTitle("OpenPype shader name definition editor")
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(QtCore.Qt.Window)
self.setParent(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.resize(750, 500)
self._setup_ui()
self._reload()
def _setup_ui(self):
"""Setup UI of Widget."""
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel()
label.setText("Put shader names here - one name per line:")
layout.addWidget(label)
self._editor = QtWidgets.QPlainTextEdit()
self._editor.setStyleSheet("border: none;")
layout.addWidget(self._editor)
btn_layout = QtWidgets.QHBoxLayout()
save_btn = QtWidgets.QPushButton("Save")
save_btn.clicked.connect(self._save)
reload_btn = QtWidgets.QPushButton("Reload")
reload_btn.clicked.connect(self._reload)
exit_btn = QtWidgets.QPushButton("Exit")
exit_btn.clicked.connect(self._close)
btn_layout.addWidget(reload_btn)
btn_layout.addWidget(save_btn)
btn_layout.addWidget(exit_btn)
layout.addLayout(btn_layout)
def _read_definition_file(self, file=None):
"""Read definition file from database.
Args:
file (gridfs.grid_file.GridOut, Optional): File to read. If not
set, new query will be issued to find it.
Returns:
str: Content of the file or empty string if file doesn't exist.
"""
content = ""
if not file:
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if not file:
print(">>> [SNDE]: nothing in database yet")
return content
content = file.read()
file.close()
return content
def _write_definition_file(self, content, force=False):
"""Write content as definition to file in database.
Before file is written, check is made if its content has not
changed. If is changed, warning is issued to user if he wants
it to overwrite. Note: GridFs doesn't allow changing file content.
You need to delete existing file and create new one.
Args:
content (str): Content to write.
Raises:
ContentException: If file is changed in database while
editor is running.
"""
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if file:
content_check = self._read_definition_file(file)
if content == content_check:
print(">>> [SNDE]: content not changed")
return
if self._original_content != content_check:
if not force:
raise ContentException("Content changed")
print(">>> [SNDE]: overwriting data")
file.close()
self._gridfs.delete(file._id)
file = self._gridfs.new_file(
filename=DEFINITION_FILENAME,
content_type='text/plain',
encoding='utf-8')
file.write(content)
file.close()
QtCore.QTimer.singleShot(200, self._reset_style)
self._editor.setStyleSheet("border: 1px solid #33AF65;")
self._original_content = content
def _reset_style(self):
"""Reset editor style back.
Used to visually indicate save.
"""
self._editor.setStyleSheet("border: none;")
def _close(self):
self.hide()
def closeEvent(self, event):
event.ignore()
self.hide()
def _reload(self):
print(">>> [SNDE]: reloading")
self._set_content(self._read_definition_file())
def _save(self):
try:
self._write_definition_file(content=self._editor.toPlainText())
except ContentException:
# content has changed meanwhile
print(">>> [SNDE]: content has changed")
self._show_overwrite_warning()
def _set_content(self, content):
self._editor.setPlainText(content)
def _show_overwrite_warning(self):
reply = QtWidgets.QMessageBox.question(
self,
"Warning",
("Content you are editing was changed meanwhile in database.\n"
"Please, reload and solve the conflict."),
QtWidgets.QMessageBox.OK)
if reply == QtWidgets.QMessageBox.OK:
# do nothing
pass
class ContentException(Exception):
"""This is risen during save if file is changed in database."""
pass

View file

@ -2,7 +2,12 @@ import json
from maya import cmds
from ayon_core.pipeline import registered_host, get_current_asset_name
from ayon_core.pipeline import (
registered_host,
get_current_asset_name,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.pipeline.workfile.workfile_template_builder import (
TemplateAlreadyImported,
AbstractTemplateBuilder,
@ -73,7 +78,9 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
for node in imported_sets:
if not cmds.attributeQuery("id", node=node, exists=True):
continue
if cmds.getAttr("{}.id".format(node)) != "pyblish.avalon.instance":
if cmds.getAttr("{}.id".format(node)) not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
if not cmds.attributeQuery("asset", node=node, exists=True):
continue

View file

@ -11,7 +11,7 @@ class CollectNewInstances(pyblish.api.InstancePlugin):
an objectSet and marked with a unique identifier;
Identifier:
id (str): "pyblish.avalon.instance"
id (str): "ayon.create.instance"
Limitations:
- Does not take into account nodes connected to those

View file

@ -307,7 +307,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
_instance.data["version"] = context.data["version"]
# Define nice label
label = "{0} ({1})".format(layer_name, instance.data["asset"])
label = "{0} ({1})".format(layer_name, instance.data["folderPath"])
label += " [{0}-{1}]".format(
int(data["frameStartHandle"]), int(data["frameEndHandle"])
)

View file

@ -99,7 +99,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin):
instance.data.update(data)
# Define nice label
label = "{0} ({1})".format(layer_name, instance.data["asset"])
label = "{0} ({1})".format(layer_name, instance.data["folderPath"])
label += " [{0}-{1}]".format(
int(data["frameStartHandle"]), int(data["frameEndHandle"])
)

View file

@ -5,7 +5,11 @@ import os
from maya import cmds
from ayon_core.hosts.maya.api.lib import maintained_selection
from ayon_core.pipeline import AVALON_CONTAINER_ID, publish
from ayon_core.pipeline import (
AYON_CONTAINER_ID,
AVALON_CONTAINER_ID,
publish,
)
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
from ayon_core.lib import BoolDef
@ -136,7 +140,9 @@ class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin):
continue
id_attr = "{}.id".format(obj_set)
if cmds.getAttr(id_attr) != AVALON_CONTAINER_ID:
if cmds.getAttr(id_attr) not in {
AYON_CONTAINER_ID, AVALON_CONTAINER_ID
}:
continue
set_content = set(cmds.sets(obj_set, query=True))

View file

@ -61,7 +61,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
# we rely on hierarchy under one root.
original_parent = to_extract[0].split("|")[1]
parent_node = instance.data.get("asset")
parent_node = instance.data.get("folderPath")
# this needs to be done for AYON
# WARNING: since AYON supports duplicity of asset names,
# this needs to be refactored throughout the pipeline.

View file

@ -37,7 +37,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
if not self.is_active(instance.data):
return
asset = instance.data.get("asset")
asset = instance.data.get("folderPath")
context_asset = self.get_context_asset(instance)
if asset != context_asset:
raise PublishValidationError(
@ -74,4 +74,4 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
@staticmethod
def get_context_asset(instance):
return instance.context.data["asset"]
return instance.context.data["folderPath"]

View file

@ -1,161 +0,0 @@
# -*- coding: utf-8 -*-
"""Validate model nodes names."""
import os
import platform
import re
import gridfs
import pyblish.api
from maya import cmds
import ayon_core.hosts.maya.api.action
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core.hosts.maya.api.shader_definition_editor import (
DEFINITION_FILENAME)
from ayon_core.pipeline.publish import (
OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder)
class ValidateModelName(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate name of model
starts with (somename)_###_(materialID)_GEO
materialID must be present in list
padding number doesn't have limit
"""
optional = True
order = ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
label = "Model Name"
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
material_file = None
database_file = DEFINITION_FILENAME
@classmethod
def get_invalid(cls, instance):
"""Get invalid nodes."""
use_db = cls.database
def is_group(group_name):
"""Find out if supplied transform is group or not."""
try:
children = cmds.listRelatives(group_name, children=True)
for child in children:
if not cmds.ls(child, transforms=True):
return False
return True
except Exception:
return False
invalid = []
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
pass
# validate top level group name
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
if len(assemblies) != 1:
cls.log.error("Must have exactly one top group")
return assemblies or True
top_group = assemblies[0]
regex = cls.top_level_regex
r = re.compile(regex)
m = r.match(top_group)
project_name = instance.context.data["projectName"]
current_asset_name = instance.context.data["asset"]
if m is None:
cls.log.error("invalid name on: {}".format(top_group))
cls.log.error("name doesn't match regex {}".format(regex))
invalid.append(top_group)
else:
if "asset" in r.groupindex:
if m.group("asset") != current_asset_name:
cls.log.error("Invalid asset name in top level group.")
return top_group
if "subset" in r.groupindex:
if m.group("subset") != instance.data.get("subset"):
cls.log.error("Invalid subset name in top level group.")
return top_group
if "project" in r.groupindex:
if m.group("project") != project_name:
cls.log.error("Invalid project name in top level group.")
return top_group
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
trns = cmds.ls(descendants, long=False, type='transform')
# filter out groups
filtered = [node for node in trns if not is_group(node)]
# load shader list file as utf-8
shaders = []
if not use_db:
material_file = cls.material_file[platform.system().lower()]
if material_file:
if os.path.isfile(material_file):
shader_file = open(material_file, "r")
shaders = shader_file.readlines()
shader_file.close()
else:
cls.log.error("Missing shader name definition file.")
return True
else:
client = OpenPypeMongoConnection.get_mongo_client()
fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")])
shader_file = fs.find_one({"filename": cls.database_file})
if not shader_file:
cls.log.error("Missing shader name definition in database.")
return True
shaders = shader_file.read().splitlines()
shader_file.close()
# strip line endings from list
shaders = [s.rstrip() for s in shaders if s.rstrip()]
# compile regex for testing names
regex = cls.regex
r = re.compile(regex)
for obj in filtered:
cls.log.debug("testing: {}".format(obj))
m = r.match(obj)
if m is None:
cls.log.error("invalid name on: {}".format(obj))
invalid.append(obj)
else:
# if we have shader files and shader named group is in
# regex, test this group against names in shader file
if "shader" in r.groupindex and shaders:
try:
if not m.group('shader') in shaders:
cls.log.error(
"invalid materialID on: {0} ({1})".format(
obj, m.group('shader')))
invalid.append(obj)
except IndexError:
# shader named group doesn't match
cls.log.error(
"shader group doesn't match: {}".format(obj))
invalid.append(obj)
return invalid
def process(self, instance):
"""Plugin entry point."""
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Model naming is invalid. See the log.")

View file

@ -51,7 +51,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin,
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True)
asset_name = instance.data.get("asset")
asset_name = instance.data.get("folderPath")
# Check the number of connected shadingEngines per shape
regex_compile = re.compile(cls.regex)

View file

@ -45,6 +45,8 @@ from ayon_core.pipeline import (
get_current_host_name,
get_current_project_name,
get_current_asset_name,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.pipeline.context_tools import (
get_custom_workfile_template_from_session
@ -2300,12 +2302,16 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
# backward compatibility
# TODO: remove this once old avalon data api will be removed
avalon_knob_data
and avalon_knob_data.get("id") != "pyblish.avalon.instance"
and avalon_knob_data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}
):
continue
elif (
node_data
and node_data.get("id") != "pyblish.avalon.instance"
and node_data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}
):
continue

View file

@ -18,6 +18,8 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
register_inventory_action_path,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
get_current_asset_name,
get_current_task_name,
@ -550,7 +552,9 @@ def list_instances(creator_id=None):
if not instance_data:
continue
if instance_data["id"] != "pyblish.avalon.instance":
if instance_data["id"] not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
if creator_id and instance_data["creator_identifier"] != creator_id:

View file

@ -19,7 +19,9 @@ from ayon_core.pipeline import (
CreatorError,
Creator as NewCreator,
CreatedInstance,
get_current_task_name
get_current_task_name,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.pipeline.colorspace import (
get_display_view_colorspace_name,
@ -493,7 +495,7 @@ def get_colorspace_from_node(node):
def get_review_presets_config():
settings = get_current_project_settings()
review_profiles = (
settings["global"]
settings["core"]
["publish"]
["ExtractReview"]
["profiles"]
@ -1265,7 +1267,9 @@ def convert_to_valid_instaces():
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
if avalon_knob_data["id"] not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
transfer_data.update({
@ -1348,7 +1352,9 @@ def _remove_old_knobs(node):
def exposed_write_knobs(settings, plugin_name, instance_node):
exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"]
exposed_knobs = settings["nuke"]["create"][plugin_name].get(
"exposed_knobs", []
)
if exposed_knobs:
instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs'))
write_node = nuke.allNodes(group=instance_node, filter="Write")[0]

View file

@ -1,3 +1,4 @@
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin
from ayon_core.hosts.nuke.api.lib import (
INSTANCE_DATA_KNOB,
@ -34,7 +35,9 @@ class LegacyConverted(SubsetConvertorPlugin):
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
if avalon_knob_data["id"] not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
# catch and break

View file

@ -23,10 +23,10 @@ class ValidateCorrectAssetContext(
current asset (shot). This validator checks if this is so. It is optional
so it can be disabled when needed.
Checking `asset` and `task` keys.
Checking `folderPath` and `task` keys.
"""
order = ValidateContentsOrder
label = "Validate asset context"
label = "Validate Folder context"
hosts = ["nuke"]
actions = [
RepairAction,
@ -85,7 +85,7 @@ class ValidateCorrectAssetContext(
"""Get invalid keys from instance data and context data."""
invalid_keys = []
testing_keys = ["asset", "task"]
testing_keys = ["folderPath", "task"]
for _key in testing_keys:
if _key not in instance.data:
invalid_keys.append(_key)

View file

@ -65,7 +65,7 @@ class ValidateExposedKnobs(
group_node = instance.data["transientData"]["node"]
nuke_settings = instance.context.data["project_settings"]["nuke"]
create_settings = nuke_settings["create"][plugin]
exposed_knobs = create_settings["exposed_knobs"]
exposed_knobs = create_settings.get("exposed_knobs", [])
unexposed_knobs = []
for knob in exposed_knobs:
if knob not in group_node.knobs():

View file

@ -75,7 +75,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
an LayerSet and marked with a unique identifier;
Identifier:
id (str): "pyblish.avalon.instance"
id (str): "ayon.create.instance"
"""
label = "Instances"

View file

@ -9,6 +9,8 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
AVALON_CONTAINER_ID,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
)
from ayon_core.host import (
@ -121,7 +123,9 @@ class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
layers_meta = stub.get_layers_metadata()
if layers_meta:
for instance in layers_meta:
if instance.get("id") == "pyblish.avalon.instance":
if instance.get("id") in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
instances.append(instance)
return instances

View file

@ -119,14 +119,14 @@ class PhotoshopServerStub:
"active":true,
"subset":"imageBG",
"family":"image",
"id":"pyblish.avalon.instance",
"asset":"Town",
"id":"ayon.create.instance",
"folderPath":"Town",
"uuid": "8"
}] - for created instances
OR
[{
"schema": "openpype:container-2.0",
"id": "pyblish.avalon.instance",
"id": "ayon.create.instance",
"name": "imageMG",
"namespace": "Jungle_imageMG_001",
"loader": "ImageLoader",
@ -420,8 +420,8 @@ class PhotoshopServerStub:
(list)
example:
{"8":{"active":true,"subset":"imageBG",
"family":"image","id":"pyblish.avalon.instance",
"asset":"Town"}}
"family":"image","id":"ayon.create.instance",
"folderPath":"/Town"}}
8 is layer(group) id - used for deletion, update etc.
"""
res = self.websocketserver.call(self.client.call('Photoshop.read'))

View file

@ -209,8 +209,8 @@ class ImageCreator(Creator):
'Use layer name in subset' will explicitly add layer name into subset
name. Position of this name is configurable in
`project_settings/global/tools/creator/subset_name_profiles`.
If layer placeholder ({layer}) is not used in `subset_name_profiles`
`project_settings/global/tools/creator/product_name_profiles`.
If layer placeholder ({layer}) is not used in `product_name_profiles`
but layer name should be used (set explicitly in UI or implicitly if
multiple images should be created), it is added in capitalized form
as a suffix to subset name.

View file

@ -28,7 +28,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
task_name = context.data["task"]
host_name = context.data["hostName"]
asset_doc = context.data["assetEntity"]
asset_name = get_asset_name_identifier(asset_doc)
folder_path = get_asset_name_identifier(asset_doc)
auto_creator = proj_settings.get(
"photoshop", {}).get(
@ -86,7 +86,7 @@ class CollectAutoImage(pyblish.api.ContextPlugin):
instance = context.create_instance(subset_name)
instance.data["family"] = family
instance.data["asset"] = asset_name
instance.data["folderPath"] = folder_path
instance.data["subset"] = subset_name
instance.data["ids"] = publishable_ids
instance.data["publish"] = True

View file

@ -67,7 +67,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
host_name = context.data["hostName"]
asset_doc = context.data["assetEntity"]
asset_name = get_asset_name_identifier(asset_doc)
folder_path = get_asset_name_identifier(asset_doc)
subset_name = get_subset_name(
family,
@ -87,7 +87,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin):
"family": family,
"families": [],
"representations": [],
"asset": asset_name,
"folderPath": folder_path,
"publish": self.publish
})

View file

@ -71,7 +71,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
host_name = context.data["hostName"]
asset_doc = context.data["assetEntity"]
asset_name = get_asset_name_identifier(asset_doc)
folder_path = get_asset_name_identifier(asset_doc)
subset_name = get_subset_name(
family,
variant,
@ -91,7 +91,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin):
"family": family,
"families": [],
"representations": [],
"asset": asset_name
"folderPath": folder_path
})
# creating representation

View file

@ -2,7 +2,7 @@
Provides:
context -> Loaded batch file.
- asset
- folderPath
- task (task name)
- taskType
- project_name
@ -71,7 +71,7 @@ class CollectBatchData(pyblish.api.ContextPlugin):
os.environ["AYON_FOLDER_PATH"] = asset_name
os.environ["AYON_TASK_NAME"] = task_name
context.data["asset"] = asset_name
context.data["folderPath"] = asset_name
context.data["task"] = task_name
context.data["taskType"] = task_type
context.data["project_name"] = project_name

View file

@ -26,7 +26,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
only separate subsets per marked layer.
Identifier:
id (str): "pyblish.avalon.instance"
id (str): "ayon.create.instance"
"""
label = "Collect Color-coded Instances"
@ -56,7 +56,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
existing_subset_names = self._get_existing_subset_names(context)
# from CollectBatchData
asset_name = context.data["asset"]
asset_name = context.data["folderPath"]
task_name = context.data["task"]
variant = context.data["variant"]
project_name = context.data["projectEntity"]["name"]
@ -163,7 +163,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
instance = context.create_instance(layer.name)
instance.data["family"] = family
instance.data["publish"] = True
instance.data["asset"] = asset
instance.data["folderPath"] = asset
instance.data["task"] = task_name
instance.data["subset"] = subset
instance.data["layer"] = layer

View file

@ -31,7 +31,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
current_asset_name = get_current_asset_name()
for instance in instances:
data = stub.read(instance[0])
data["asset"] = current_asset_name
data["folderPath"] = current_asset_name
stub.imprint(instance[0], data)
@ -54,7 +54,7 @@ class ValidateInstanceAsset(OptionalPyblishPluginMixin,
order = ValidateContentsOrder
def process(self, instance):
instance_asset = instance.data["asset"]
instance_asset = instance.data["folderPath"]
current_asset = get_current_asset_name()
if instance_asset != current_asset:

View file

@ -519,7 +519,7 @@ def imprint(timeline_item, data=None):
Examples:
data = {
'asset': 'sq020sh0280',
'folderPath': 'sq020sh0280',
'family': 'render',
'subset': 'subsetMain'
}

View file

@ -2,6 +2,7 @@ from pprint import pformat
import pyblish
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
from ayon_core.hosts.resolve.api.lib import (
get_current_timeline_items,
get_timeline_item_pype_tag,
@ -39,7 +40,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not tag_data:
continue
if tag_data.get("id") != "pyblish.avalon.instance":
if tag_data.get("id") not in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
continue
media_pool_item = timeline_item.GetMediaPoolItem()
@ -66,7 +69,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
data.update({
"name": "{}_{}".format(asset, subset),
"label": "{} {}".format(asset, subset),
"asset": asset,
"folderPath": asset,
"item": timeline_item,
"publish": get_publish_attribute(timeline_item),
"fps": context.data["fps"],
@ -124,7 +127,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
if not hierarchy_data:
return
asset = data["asset"]
asset = data["folderPath"]
subset = "shotMain"
# insert family into families
@ -134,7 +137,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
"name": "{}_{}".format(asset, subset),
"label": "{} {}".format(asset, subset),
"subset": subset,
"asset": asset,
"folderPath": asset,
"family": family,
"families": [],
"publish": get_publish_attribute(timeline_item)

View file

@ -28,7 +28,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
instance_data = {
"name": "{}_{}".format(asset_name, subset),
"label": "{} {}".format(current_asset_name, subset),
"asset": current_asset_name,
"folderPath": current_asset_name,
"subset": subset,
"item": project,
"family": "workfile",

View file

@ -27,8 +27,8 @@ class CollectTextureSet(pyblish.api.InstancePlugin):
config = self.get_export_config(instance)
asset_doc = get_asset_by_name(
project_name=instance.context.data["projectName"],
asset_name=instance.data["asset"]
instance.context.data["projectName"],
instance.data["folderPath"]
)
instance.data["exportConfig"] = config

View file

@ -53,7 +53,7 @@ class OnlineCreator(TrayPublishCreator):
# disable check for existing subset with the same name
"""
asset = get_asset_by_name(
self.project_name, instance_data["asset"], fields=["_id"])
self.project_name, instance_data["folderPath"], fields=["_id"])
if get_subset_by_name(
self.project_name, origin_basename, asset["_id"],

View file

@ -17,7 +17,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
families = ["shot"]
SHARED_KEYS = [
"asset",
"folderPath",
"fps",
"handleStart",
"handleEnd",
@ -132,7 +132,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
"sourceIn": _cr_attrs["sourceIn"],
"sourceOut": _cr_attrs["sourceOut"],
"workfileFrameStart": workfile_start_frame,
"asset": _cr_attrs["folderPath"],
"folderPath": _cr_attrs["folderPath"],
}
def _solve_hierarchy_context(self, instance):
@ -170,7 +170,7 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
parents = instance.data.get('parents', [])
# Split by '/' for AYON where asset is a path
asset_name = instance.data["asset"].split("/")[-1]
asset_name = instance.data["folderPath"].split("/")[-1]
actual = {asset_name: in_info}
for parent in reversed(parents):

View file

@ -40,7 +40,7 @@ class ValidateExistingVersion(
formatting_data = {
"subset_name": subset_name,
"asset_name": instance.data["asset"],
"asset_name": instance.data["folderPath"],
"version": version
}
raise PublishXmlValidationError(

View file

@ -52,7 +52,7 @@ expected that there are also keys `["instances0", "instances1"]`.
Workfile data looks like:
```
[avalon]
instances0=[{{__dq__}id{__dq__}: {__dq__}pyblish.avalon.instance{__dq__...
instances0=[{{__dq__}id{__dq__}: {__dq__}ayon.create.instance{__dq__...
instances1=...more data...
instances=2
```

View file

@ -65,7 +65,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
# Collect and store current context to have reference
current_context = {
"project_name": context.data["projectName"],
"asset_name": context.data["asset"],
"asset_name": context.data["folderPath"],
"task_name": context.data["task"]
}
self.log.debug("Current context is: {}".format(current_context))
@ -105,7 +105,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
))
# Store context asset name
context.data["asset"] = asset_name
context.data["folderPath"] = asset_name
context.data["task"] = task_name
self.log.info(
"Context is set to Asset: \"{}\" and Task: \"{}\"".format(

View file

@ -20,7 +20,7 @@ class FixAssetNames(pyblish.api.Action):
on = "failed"
def process(self, context, plugin):
context_asset_name = context.data["asset"]
context_asset_name = context.data["folderPath"]
old_instance_items = list_instances()
new_instance_items = []
for instance_item in old_instance_items:
@ -51,9 +51,9 @@ class ValidateAssetName(
def process(self, context):
if not self.is_active(context.data):
return
context_asset_name = context.data["asset"]
context_asset_name = context.data["folderPath"]
for instance in context:
asset_name = instance.data.get("asset")
asset_name = instance.data.get("folderPath")
if asset_name and asset_name == context_asset_name:
continue

View file

@ -64,7 +64,7 @@ class CollectRenderInstances(pyblish.api.InstancePlugin):
new_data = new_instance.data
new_data["asset"] = seq_name
new_data["folderPath"] = seq_name
new_data["setMembers"] = seq_name
new_data["family"] = "render"
new_data["families"] = ["render", "review"]

View file

@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
from ayon_core.settings import (
get_system_settings,
get_project_settings,
get_local_settings
)
from ayon_core.settings.constants import (
METADATA_KEYS,
@ -1528,16 +1527,17 @@ def prepare_app_environments(
# Use environments from local settings
filtered_local_envs = {}
system_settings = data["system_settings"]
whitelist_envs = system_settings["general"].get("local_env_white_list")
if whitelist_envs:
local_settings = get_local_settings()
local_envs = local_settings.get("environments") or {}
filtered_local_envs = {
key: value
for key, value in local_envs.items()
if key in whitelist_envs
}
# NOTE Overrides for environment variables are not implemented in AYON.
# system_settings = data["system_settings"]
# whitelist_envs = system_settings["general"].get("local_env_white_list")
# if whitelist_envs:
# local_settings = get_local_settings()
# local_envs = local_settings.get("environments") or {}
# filtered_local_envs = {
# key: value
# for key, value in local_envs.items()
# if key in whitelist_envs
# }
# Apply local environment variables for already existing values
for key, value in filtered_local_envs.items():
@ -1656,8 +1656,9 @@ def apply_project_environments_value(
if project_settings is None:
project_settings = get_project_settings(project_name)
env_value = project_settings["global"]["project_environments"]
env_value = project_settings["core"]["project_environments"]
if env_value:
env_value = json.loads(env_value)
parsed_value = parse_environments(env_value, env_group)
env.update(acre.compute(
_merge_env(parsed_value, env),
@ -1916,7 +1917,7 @@ def should_start_last_workfile(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["last_workfile_on_startup"]
@ -1966,7 +1967,7 @@ def should_workfile_tool_start(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["open_workfile_tool_on_startup"]

View file

@ -5,7 +5,6 @@ import platform
import getpass
import socket
from ayon_core.settings.lib import get_local_settings
from .execute import get_ayon_launcher_args
from .local_settings import get_local_site_id
@ -96,7 +95,6 @@ def get_all_current_info():
return {
"workstation": get_workstation_info(),
"env": os.environ.copy(),
"local_settings": get_local_settings(),
"ayon": get_ayon_info(),
}

View file

@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
def convert_color_values(application, color_value):
"""Get color mapping for ffmpeg and oiiotool.
Args:
application (str): Application for which command should be created.
color_value (list[int]): List of 8bit int values for RGBA.
color_value (tuple[int, int, int, float]): List of 8bit int values
for RGBA.
Returns:
str: ffmpeg returns hex string, oiiotool is string with floats.
"""
red, green, blue, alpha = color_value
if application == "ffmpeg":
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
red, green, blue, (alpha / 255.0)
red, green, blue, alpha
)
elif application == "oiiotool":
red = float(red / 255)
green = float(green / 255)
blue = float(blue / 255)
alpha = float(alpha / 255)
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
red, green, blue, alpha)

View file

@ -4,7 +4,7 @@ import six
import sys
from ayon_core.lib import requests_get, Logger
from ayon_core.modules import OpenPypeModule, IPluginPaths
from ayon_core.modules import AYONAddon, IPluginPaths
class DeadlineWebserviceError(Exception):
@ -13,28 +13,28 @@ class DeadlineWebserviceError(Exception):
"""
class DeadlineModule(OpenPypeModule, IPluginPaths):
class DeadlineModule(AYONAddon, IPluginPaths):
name = "deadline"
def __init__(self, manager, settings):
self.deadline_urls = {}
super(DeadlineModule, self).__init__(manager, settings)
def initialize(self, modules_settings):
def initialize(self, studio_settings):
# This module is always enabled
deadline_settings = modules_settings[self.name]
self.enabled = deadline_settings["enabled"]
deadline_url = deadline_settings.get("DEADLINE_REST_URL")
if deadline_url:
self.deadline_urls = {"default": deadline_url}
else:
self.deadline_urls = deadline_settings.get("deadline_urls") # noqa: E501
deadline_urls = {}
enabled = self.name in studio_settings
if enabled:
deadline_settings = studio_settings[self.name]
deadline_urls = {
url_item["name"]: url_item["value"]
for url_item in deadline_settings["deadline_urls"]
}
if not self.deadline_urls:
self.enabled = False
self.log.warning(("default Deadline Webservice URL "
"not specified. Disabling module."))
return
if enabled and not deadline_urls:
enabled = False
self.log.warning((
"Deadline Webservice URLs are not specified. Disabling addon."
))
self.enabled = enabled
self.deadline_urls = deadline_urls
def get_plugin_paths(self):
"""Deadline plugin paths."""

View file

@ -47,11 +47,11 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
deadline_settings = (
render_instance.context.data
["system_settings"]
["modules"]
["deadline"]
)
default_server = render_instance.context.data["defaultDeadline"]
# QUESTION How and where is this is set? Should be removed?
instance_server = render_instance.data.get("deadlineServers")
if not instance_server:
self.log.debug("Using default server.")
@ -64,7 +64,10 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
asString=True
)
default_servers = deadline_settings["deadline_urls"]
default_servers = {
url_item["name"]: url_item["value"]
for url_item in deadline_settings["deadline_urls"]
}
project_servers = (
render_instance.context.data
["project_settings"]

View file

@ -1,39 +0,0 @@
# -*- coding: utf-8 -*-
"""Collect instances that should be processed and published on DL.
"""
import os
import pyblish.api
from ayon_core.pipeline import PublishValidationError
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
"""Collect instances that should be processed and published on DL.
Some long running publishes (not just renders) could be offloaded to DL,
this plugin compares theirs name against env variable, marks only
publishable by farm.
Triggered only when running only in headless mode, eg on a farm.
"""
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Deadline Publishable Instance"
targets = ["remote"]
def process(self, instance):
self.log.debug("CollectDeadlinePublishableInstances")
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
if not publish_inst:
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
"required for remote publishing")
subset_name = instance.data["subset"]
if subset_name == publish_inst:
self.log.debug("Publish {}".format(subset_name))
instance.data["publish"] = True
instance.data["farm"] = False
else:
self.log.debug("Skipping {}".format(subset_name))
instance.data["publish"] = False

View file

@ -44,7 +44,10 @@ class VrayRenderPluginInfo():
@attr.s
class RedshiftRenderPluginInfo():
SceneFile = attr.ib(default=None)
Version = attr.ib(default=None)
# Use "1" as the default Redshift version just because it
# default fallback version in Deadline's Redshift plugin
# if no version was specified
Version = attr.ib(default="1")
class HoudiniSubmitDeadline(

View file

@ -1,131 +0,0 @@
import os
import attr
from datetime import datetime
from ayon_core.pipeline import PublishXmlValidationError
from ayon_core.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
import pyblish.api
@attr.s
class MayaPluginInfo(object):
Build = attr.ib(default=None) # Don't force build
StrictErrorChecking = attr.ib(default=True)
SceneFile = attr.ib(default=None) # Input scene
Version = attr.ib(default=None) # Mandatory for Deadline
ProjectPath = attr.ib(default=None)
ScriptJob = attr.ib(default=True)
ScriptFilename = attr.ib(default=None)
class MayaSubmitRemotePublishDeadline(
abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit Maya scene to perform a local publish in Deadline.
Publishing in Deadline can be helpful for scenes that publish very slow.
This way it can process in the background on another machine without the
Artist having to wait for the publish to finish on their local machine.
Submission is done through the Deadline Web Service. DL then triggers
`openpype/scripts/remote_publish.py`.
Each publishable instance creates its own full publish job.
Different from `ProcessSubmittedJobOnFarm` which creates publish job
depending on metadata json containing context and instance data of
rendered files.
"""
label = "Submit Scene to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["publish.farm"]
targets = ["local"]
def process(self, instance):
# Ensure no errors so far
if not (all(result["success"]
for result in instance.context.data["results"])):
raise PublishXmlValidationError("Publish process has errors")
if not instance.data["publish"]:
self.log.warning("No active instances found. "
"Skipping submission..")
return
super(MayaSubmitRemotePublishDeadline, self).process(instance)
def get_job_info(self):
instance = self._instance
context = instance.context
project_name = instance.context.data["projectName"]
scene = instance.context.data["currentFile"]
scenename = os.path.basename(scene)
job_name = "{scene} [PUBLISH]".format(scene=scenename)
batch_name = "{code} - {scene}".format(code=project_name,
scene=scenename)
if is_in_tests():
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
job_info = DeadlineJobInfo(Plugin="MayaBatch")
job_info.BatchName = batch_name
job_info.Name = job_name
job_info.UserName = context.data.get("user")
job_info.Comment = context.data.get("comment", "")
# use setting for publish job on farm, no reason to have it separately
project_settings = context.data["project_settings"]
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
job_info.Department = deadline_publish_job_sett["deadline_department"]
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
job_info.Group = deadline_publish_job_sett["deadline_group"]
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
# Include critical environment variables with submission + Session
keys = [
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER"
]
environment = {
key: os.environ[key]
for key in keys
if key in os.environ
}
environment["AYON_PROJECT_NAME"] = project_name
environment["AYON_FOLDER_PATH"] = instance.context.data["asset"]
environment["AYON_TASK_NAME"] = instance.context.data["task"]
environment["AYON_APP_NAME"] = os.environ.get("AYON_APP_NAME")
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
environment["AYON_LOG_NO_COLORS"] = "1"
environment["AYON_USERNAME"] = instance.context.data["user"]
environment["AYON_REMOTE_PUBLISH"] = "1"
for key, value in environment.items():
job_info.EnvironmentKeyValue[key] = value
def get_plugin_info(self):
# Not all hosts can import this module.
from maya import cmds
scene = self._instance.context.data["currentFile"]
plugin_info = MayaPluginInfo()
plugin_info.SceneFile = scene
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
plugin_info.Version = cmds.about(version=True)
plugin_info.ProjectPath = cmds.workspace(query=True,
rootDirectory=True)
return attr.asdict(plugin_info)

View file

@ -112,7 +112,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
output_dir = self._get_publish_folder(
anatomy,
deepcopy(instance.data["anatomyData"]),
instance.data.get("asset"),
instance.data.get("folderPath"),
instance.data["subset"],
instance.context,
instance.data["family"],
@ -126,7 +126,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
environment = {
"AYON_PROJECT_NAME": instance.context.data["projectName"],
"AYON_FOLDER_PATH": instance.context.data["asset"],
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
"AYON_TASK_NAME": instance.context.data["task"],
"AYON_USERNAME": instance.context.data["user"],
"AYON_LOG_NO_COLORS": "1",
@ -359,7 +359,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
# publish job file
publish_job = {
"asset": instance_skeleton_data["asset"],
"folderPath": instance_skeleton_data["folderPath"],
"frameStart": instance_skeleton_data["frameStart"],
"frameEnd": instance_skeleton_data["frameEnd"],
"fps": instance_skeleton_data["fps"],

Some files were not shown because too many files have changed in this diff Show more