mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge pull request #904 from pypeclub/feature/3.0_tvpaint_without_layer_ids
TVPaint without layer ids
This commit is contained in:
commit
bfaef217b5
8 changed files with 239 additions and 60 deletions
|
|
@ -68,8 +68,8 @@ class CreateRenderPass(pipeline.Creator):
|
|||
self.data["render_layer"] = render_layer
|
||||
|
||||
# Collect selected layer ids to be stored into instance
|
||||
layer_ids = [layer["layer_id"] for layer in selected_layers]
|
||||
self.data["layer_ids"] = layer_ids
|
||||
layer_names = [layer["name"] for layer in selected_layers]
|
||||
self.data["layer_names"] = layer_names
|
||||
|
||||
# Replace `beauty` in beauty's subset name with entered name
|
||||
subset_name = self.subset_template.format(**{
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import collections
|
||||
from avalon.pipeline import get_representation_context
|
||||
from avalon.vendor import qargparse
|
||||
from avalon.tvpaint import lib, pipeline
|
||||
|
|
@ -15,7 +16,7 @@ class LoadImage(pipeline.Loader):
|
|||
color = "white"
|
||||
|
||||
import_script = (
|
||||
"filepath = \"{}\"\n"
|
||||
"filepath = '\"'\"{}\"'\"'\n"
|
||||
"layer_name = \"{}\"\n"
|
||||
"tv_loadsequence filepath {}PARSE layer_id\n"
|
||||
"tv_layerrename layer_id layer_name"
|
||||
|
|
@ -92,30 +93,55 @@ class LoadImage(pipeline.Loader):
|
|||
"Loading probably failed during execution of george script."
|
||||
)
|
||||
|
||||
layer_ids = [loaded_layer["layer_id"]]
|
||||
layer_names = [loaded_layer["name"]]
|
||||
namespace = namespace or layer_name
|
||||
return pipeline.containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
layer_ids=layer_ids,
|
||||
members=layer_names,
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def _remove_layers(self, layer_ids, layers=None):
|
||||
if not layer_ids:
|
||||
self.log.warning("Got empty layer ids list.")
|
||||
def _remove_layers(self, layer_names=None, layer_ids=None, layers=None):
|
||||
if not layer_names and not layer_ids:
|
||||
self.log.warning("Got empty layer names list.")
|
||||
return
|
||||
|
||||
if layers is None:
|
||||
layers = lib.layers_data()
|
||||
|
||||
available_ids = set(layer["layer_id"] for layer in layers)
|
||||
layer_ids_to_remove = []
|
||||
|
||||
for layer_id in layer_ids:
|
||||
if layer_id in available_ids:
|
||||
layer_ids_to_remove.append(layer_id)
|
||||
if layer_ids is None:
|
||||
# Backwards compatibility (layer ids were stored instead of names)
|
||||
layer_names_are_ids = True
|
||||
for layer_name in layer_names:
|
||||
if (
|
||||
not isinstance(layer_name, int)
|
||||
and not layer_name.isnumeric()
|
||||
):
|
||||
layer_names_are_ids = False
|
||||
break
|
||||
|
||||
if layer_names_are_ids:
|
||||
layer_ids = layer_names
|
||||
|
||||
layer_ids_to_remove = []
|
||||
if layer_ids is not None:
|
||||
for layer_id in layer_ids:
|
||||
if layer_id in available_ids:
|
||||
layer_ids_to_remove.append(layer_id)
|
||||
|
||||
else:
|
||||
layers_by_name = collections.defaultdict(list)
|
||||
for layer in layers:
|
||||
layers_by_name[layer["name"]].append(layer)
|
||||
|
||||
for layer_name in layer_names:
|
||||
layers = layers_by_name[layer_name]
|
||||
if len(layers) == 1:
|
||||
layer_ids_to_remove.append(layers[0]["layer_id"])
|
||||
|
||||
if not layer_ids_to_remove:
|
||||
self.log.warning("No layers to delete.")
|
||||
|
|
@ -128,16 +154,19 @@ class LoadImage(pipeline.Loader):
|
|||
george_script = "\n".join(george_script_lines)
|
||||
lib.execute_george_through_file(george_script)
|
||||
|
||||
def remove(self, container):
|
||||
layer_ids = self.layer_ids_from_container(container)
|
||||
self.log.warning("Layers to delete {}".format(layer_ids))
|
||||
self._remove_layers(layer_ids)
|
||||
|
||||
def _remove_container(self, container, members=None):
|
||||
if not container:
|
||||
return
|
||||
representation = container["representation"]
|
||||
members = self.get_members_from_container(container)
|
||||
current_containers = pipeline.ls()
|
||||
pop_idx = None
|
||||
for idx, cur_con in enumerate(current_containers):
|
||||
cur_con_layer_ids = self.layer_ids_from_container(cur_con)
|
||||
if cur_con_layer_ids == layer_ids:
|
||||
cur_members = self.get_members_from_container(cur_con)
|
||||
if (
|
||||
cur_members == members
|
||||
and cur_con["representation"] == representation
|
||||
):
|
||||
pop_idx = idx
|
||||
break
|
||||
|
||||
|
|
@ -154,6 +183,12 @@ class LoadImage(pipeline.Loader):
|
|||
pipeline.SECTION_NAME_CONTAINERS, current_containers
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
members = self.get_members_from_container(container)
|
||||
self.log.warning("Layers to delete {}".format(members))
|
||||
self._remove_layers(members)
|
||||
self._remove_container(container)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
|
|
@ -166,39 +201,41 @@ class LoadImage(pipeline.Loader):
|
|||
"""
|
||||
# Create new containers first
|
||||
context = get_representation_context(representation)
|
||||
# Change `fname` to new representation
|
||||
self.fname = self.filepath_from_context(context)
|
||||
|
||||
name = container["name"]
|
||||
namespace = container["namespace"]
|
||||
new_container = self.load(context, name, namespace, {})
|
||||
new_layer_ids = self.layer_ids_from_container(new_container)
|
||||
|
||||
# Get layer ids from previous container
|
||||
old_layer_ids = self.layer_ids_from_container(container)
|
||||
old_layer_names = self.get_members_from_container(container)
|
||||
|
||||
layers = lib.layers_data()
|
||||
layers_by_id = {
|
||||
layer["layer_id"]: layer
|
||||
for layer in layers
|
||||
}
|
||||
# Backwards compatibility (layer ids were stored instead of names)
|
||||
old_layers_are_ids = True
|
||||
for name in old_layer_names:
|
||||
if isinstance(name, int) or name.isnumeric():
|
||||
continue
|
||||
old_layers_are_ids = False
|
||||
break
|
||||
|
||||
old_layers = []
|
||||
new_layers = []
|
||||
for layer_id in old_layer_ids:
|
||||
layer = layers_by_id.get(layer_id)
|
||||
if layer:
|
||||
old_layers.append(layer)
|
||||
layers = lib.layers_data()
|
||||
previous_layer_ids = set(layer["layer_id"] for layer in layers)
|
||||
if old_layers_are_ids:
|
||||
for layer in layers:
|
||||
if layer["layer_id"] in old_layer_names:
|
||||
old_layers.append(layer)
|
||||
else:
|
||||
layers_by_name = collections.defaultdict(list)
|
||||
for layer in layers:
|
||||
layers_by_name[layer["name"]].append(layer)
|
||||
|
||||
for layer_id in new_layer_ids:
|
||||
layer = layers_by_id.get(layer_id)
|
||||
if layer:
|
||||
new_layers.append(layer)
|
||||
for layer_name in old_layer_names:
|
||||
layers = layers_by_name[layer_name]
|
||||
if len(layers) == 1:
|
||||
old_layers.append(layers[0])
|
||||
|
||||
# Prepare few data
|
||||
new_start_position = None
|
||||
new_group_id = None
|
||||
layer_ids_to_remove = set()
|
||||
for layer in old_layers:
|
||||
layer_ids_to_remove.add(layer["layer_id"])
|
||||
position = layer["position"]
|
||||
group_id = layer["group_id"]
|
||||
if new_start_position is None:
|
||||
|
|
@ -213,6 +250,28 @@ class LoadImage(pipeline.Loader):
|
|||
elif new_group_id != group_id:
|
||||
new_group_id = -1
|
||||
|
||||
# Remove old container
|
||||
self._remove_container(container)
|
||||
# Remove old layers
|
||||
self._remove_layers(layer_ids=layer_ids_to_remove)
|
||||
|
||||
# Change `fname` to new representation
|
||||
self.fname = self.filepath_from_context(context)
|
||||
|
||||
name = container["name"]
|
||||
namespace = container["namespace"]
|
||||
new_container = self.load(context, name, namespace, {})
|
||||
new_layer_names = self.get_members_from_container(new_container)
|
||||
|
||||
layers = lib.layers_data()
|
||||
|
||||
new_layers = []
|
||||
for layer in layers:
|
||||
if layer["layer_id"] in previous_layer_ids:
|
||||
continue
|
||||
if layer["name"] in new_layer_names:
|
||||
new_layers.append(layer)
|
||||
|
||||
george_script_lines = []
|
||||
# Group new layers to same group as previous container layers had
|
||||
# - all old layers must be under same group
|
||||
|
|
@ -246,6 +305,3 @@ class LoadImage(pipeline.Loader):
|
|||
if george_script_lines:
|
||||
george_script = "\n".join(george_script_lines)
|
||||
lib.execute_george_through_file(george_script)
|
||||
|
||||
# Remove old container
|
||||
self.remove(container)
|
||||
|
|
|
|||
|
|
@ -148,17 +148,44 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
))
|
||||
|
||||
layers_data = context.data["layersData"]
|
||||
layers_by_id = {
|
||||
layer["layer_id"]: layer
|
||||
layers_by_name = {
|
||||
layer["name"]: layer
|
||||
for layer in layers_data
|
||||
}
|
||||
|
||||
layer_ids = instance_data["layer_ids"]
|
||||
if "layer_names" in instance_data:
|
||||
layer_names = instance_data["layer_names"]
|
||||
else:
|
||||
# Backwards compatibility
|
||||
# - not 100% working as it was found out that layer ids can't be
|
||||
# used as unified identifier across multiple workstations
|
||||
layers_by_id = {
|
||||
layer["id"]: layer
|
||||
for layer in layers_data
|
||||
}
|
||||
layer_ids = instance_data["layer_ids"]
|
||||
layer_names = []
|
||||
for layer_id in layer_ids:
|
||||
layer = layers_by_id.get(layer_id)
|
||||
if layer:
|
||||
layer_names.append(layer["name"])
|
||||
|
||||
if not layer_names:
|
||||
raise ValueError((
|
||||
"Metadata contain old way of storing layers information."
|
||||
" It is not possible to identify layers to publish with"
|
||||
" these data. Please remove Render Pass instances with"
|
||||
" Subset manager and use Creator tool to recreate them."
|
||||
))
|
||||
|
||||
render_pass_layers = []
|
||||
for layer_id in layer_ids:
|
||||
layer = layers_by_id.get(layer_id)
|
||||
for layer_name in layer_names:
|
||||
layer = layers_by_name.get(layer_name)
|
||||
# NOTE This is kind of validation before validators?
|
||||
if not layer:
|
||||
self.log.warning(f"Layer with id {layer_id} was not found.")
|
||||
self.log.warning(
|
||||
f"Layer with name {layer_name} was not found."
|
||||
)
|
||||
continue
|
||||
|
||||
render_pass_layers.append(layer)
|
||||
|
|
|
|||
|
|
@ -89,7 +89,15 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
# Collect information about layers
|
||||
self.log.info("Collecting layers data from workfile")
|
||||
layers_data = lib.layers_data()
|
||||
layers_by_name = {}
|
||||
for layer in layers_data:
|
||||
layer_name = layer["name"]
|
||||
if layer_name not in layers_by_name:
|
||||
layers_by_name[layer_name] = []
|
||||
layers_by_name[layer_name].append(layer)
|
||||
context.data["layersData"] = layers_data
|
||||
context.data["layersByName"] = layers_by_name
|
||||
|
||||
self.log.debug(
|
||||
"Layers data:\"{}".format(json.dumps(layers_data, indent=4))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -62,17 +62,20 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
for layer in layers
|
||||
if layer["visible"]
|
||||
]
|
||||
layer_ids = [str(layer["layer_id"]) for layer in filtered_layers]
|
||||
if not layer_ids:
|
||||
layer_names = [str(layer["name"]) for layer in filtered_layers]
|
||||
if not layer_names:
|
||||
self.log.info(
|
||||
f"None of the layers from the instance"
|
||||
" are visible. Extraction skipped."
|
||||
)
|
||||
return
|
||||
|
||||
joined_layer_names = ", ".join(
|
||||
["\"{}\"".format(name) for name in layer_names]
|
||||
)
|
||||
self.log.debug(
|
||||
"Instance has {} layers with ids: {}".format(
|
||||
len(layer_ids), ", ".join(layer_ids)
|
||||
"Instance has {} layers with names: {}".format(
|
||||
len(layer_names), joined_layer_names
|
||||
)
|
||||
)
|
||||
# This is plugin attribe cleanup method
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateLayersGroup(pyblish.api.InstancePlugin):
|
||||
"""Validate layer names for publishing are unique for whole workfile."""
|
||||
|
||||
label = "Validate Duplicated Layers Names"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["renderPass"]
|
||||
|
||||
def process(self, instance):
|
||||
# Prepare layers
|
||||
layers_by_name = instance.context.data["layersByName"]
|
||||
|
||||
# Layers ids of an instance
|
||||
layer_names = instance.data["layer_names"]
|
||||
|
||||
# Check if all layers from render pass are in right group
|
||||
duplicated_layer_names = []
|
||||
for layer_name in layer_names:
|
||||
layers = layers_by_name.get(layer_name)
|
||||
if len(layers) > 1:
|
||||
duplicated_layer_names.append(layer_name)
|
||||
|
||||
# Everything is OK and skip exception
|
||||
if not duplicated_layer_names:
|
||||
return
|
||||
|
||||
layers_msg = ", ".join([
|
||||
"\"{}\"".format(layer_name)
|
||||
for layer_name in duplicated_layer_names
|
||||
])
|
||||
|
||||
# Raise an error
|
||||
raise AssertionError(
|
||||
(
|
||||
"Layers have duplicated names for instance {}."
|
||||
# Description what's wrong
|
||||
" There are layers with same name and one of them is marked"
|
||||
" for publishing so it is not possible to know which should"
|
||||
" be published. Please look for layers with names: {}"
|
||||
).format(instance.data["label"], layers_msg)
|
||||
)
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateMissingLayers(pyblish.api.InstancePlugin):
|
||||
"""Validate existence of renderPass layers."""
|
||||
|
||||
label = "Validate Missing Layers Names"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["renderPass"]
|
||||
|
||||
def process(self, instance):
|
||||
# Prepare layers
|
||||
layers_by_name = instance.context.data["layersByName"]
|
||||
|
||||
# Layers ids of an instance
|
||||
layer_names = instance.data["layer_names"]
|
||||
|
||||
# Check if all layers from render pass are in right group
|
||||
missing_layer_names = []
|
||||
for layer_name in layer_names:
|
||||
layers = layers_by_name.get(layer_name)
|
||||
if not layers:
|
||||
missing_layer_names.append(layer_name)
|
||||
|
||||
# Everything is OK and skip exception
|
||||
if not missing_layer_names:
|
||||
return
|
||||
|
||||
layers_msg = ", ".join([
|
||||
"\"{}\"".format(layer_name)
|
||||
for layer_name in missing_layer_names
|
||||
])
|
||||
|
||||
# Raise an error
|
||||
raise AssertionError(
|
||||
(
|
||||
"Layers were not found by name for instance \"{}\"."
|
||||
# Description what's wrong
|
||||
" Layer names marked for publishing are not available"
|
||||
" in layers list. Missing layer names: {}"
|
||||
).format(instance.data["label"], layers_msg)
|
||||
)
|
||||
|
|
@ -9,25 +9,25 @@ class ValidateLayersGroup(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
label = "Validate Layers Group"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["renderPass"]
|
||||
|
||||
def process(self, instance):
|
||||
# Prepare layers
|
||||
layers_data = instance.context.data["layersData"]
|
||||
layers_by_id = {
|
||||
layer["layer_id"]: layer
|
||||
layers_by_name = {
|
||||
layer["name"]: layer
|
||||
for layer in layers_data
|
||||
}
|
||||
|
||||
# Expected group id for instance layers
|
||||
group_id = instance.data["group_id"]
|
||||
# Layers ids of an instance
|
||||
layer_ids = instance.data["layer_ids"]
|
||||
layer_names = instance.data["layer_names"]
|
||||
# Check if all layers from render pass are in right group
|
||||
invalid_layers_by_group_id = collections.defaultdict(list)
|
||||
for layer_id in layer_ids:
|
||||
layer = layers_by_id.get(layer_id)
|
||||
for layer_name in layer_names:
|
||||
layer = layers_by_name.get(layer_name)
|
||||
_group_id = layer["group_id"]
|
||||
if _group_id != group_id:
|
||||
invalid_layers_by_group_id[_group_id].append(layer)
|
||||
Loading…
Add table
Add a link
Reference in a new issue