mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #2933 from pypeclub/feature/OP-2766_PS-to-new-publisher
Photoshop: New Publisher
This commit is contained in:
commit
e6557c60ff
25 changed files with 635 additions and 196 deletions
|
|
@ -12,7 +12,10 @@ from .pipeline import (
|
|||
remove_instance,
|
||||
install,
|
||||
uninstall,
|
||||
containerise
|
||||
containerise,
|
||||
get_context_data,
|
||||
update_context_data,
|
||||
get_context_title
|
||||
)
|
||||
from .plugin import (
|
||||
PhotoshopLoader,
|
||||
|
|
@ -43,6 +46,9 @@ __all__ = [
|
|||
"install",
|
||||
"uninstall",
|
||||
"containerise",
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
|
||||
# Plugin
|
||||
"PhotoshopLoader",
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ from avalon import io
|
|||
from openpype.api import Logger
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
BaseCreator,
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
|
|
@ -149,13 +151,9 @@ def list_instances():
|
|||
instances = []
|
||||
layers_meta = stub.get_layers_metadata()
|
||||
if layers_meta:
|
||||
for key, instance in layers_meta.items():
|
||||
schema = instance.get("schema")
|
||||
if schema and "container" in schema:
|
||||
continue
|
||||
|
||||
instance['uuid'] = key
|
||||
instances.append(instance)
|
||||
for instance in layers_meta:
|
||||
if instance.get("id") == "pyblish.avalon.instance":
|
||||
instances.append(instance)
|
||||
|
||||
return instances
|
||||
|
||||
|
|
@ -176,11 +174,18 @@ def remove_instance(instance):
|
|||
if not stub:
|
||||
return
|
||||
|
||||
stub.remove_instance(instance.get("uuid"))
|
||||
layer = stub.get_layer(instance.get("uuid"))
|
||||
if layer:
|
||||
stub.rename_layer(instance.get("uuid"),
|
||||
layer.name.replace(stub.PUBLISH_ICON, ''))
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
|
||||
if not inst_id:
|
||||
log.warning("No instance identifier for {}".format(instance))
|
||||
return
|
||||
|
||||
stub.remove_instance(inst_id)
|
||||
|
||||
if instance.get("members"):
|
||||
item = stub.get_layer(instance["members"][0])
|
||||
if item:
|
||||
stub.rename_layer(item.id,
|
||||
item.name.replace(stub.PUBLISH_ICON, ''))
|
||||
|
||||
|
||||
def _get_stub():
|
||||
|
|
@ -232,6 +237,34 @@ def containerise(
|
|||
"members": [str(layer.id)]
|
||||
}
|
||||
stub = lib.stub()
|
||||
stub.imprint(layer, data)
|
||||
stub.imprint(layer.id, data)
|
||||
|
||||
return layer
|
||||
|
||||
|
||||
def get_context_data():
|
||||
"""Get stored values for context (validation enable/disable etc)"""
|
||||
meta = _get_stub().get_layers_metadata()
|
||||
for item in meta:
|
||||
if item.get("id") == "publish_context":
|
||||
item.pop("id")
|
||||
return item
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def update_context_data(data, changes):
|
||||
"""Store value needed for context"""
|
||||
item = data
|
||||
item["id"] = "publish_context"
|
||||
_get_stub().imprint(item["id"], item)
|
||||
|
||||
|
||||
def get_context_title():
|
||||
"""Returns title for Creator window"""
|
||||
import avalon.api
|
||||
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
task_name = avalon.api.Session["AVALON_TASK"]
|
||||
return "{}/{}/{}".format(project_name, asset_name, task_name)
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ class PSItem(object):
|
|||
members = attr.ib(factory=list)
|
||||
long_name = attr.ib(default=None)
|
||||
color_code = attr.ib(default=None) # color code of layer
|
||||
instance_id = attr.ib(default=None)
|
||||
|
||||
|
||||
class PhotoshopServerStub:
|
||||
|
|
@ -76,13 +77,31 @@ class PhotoshopServerStub:
|
|||
layer: (PSItem)
|
||||
layers_meta: full list from Headline (for performance in loops)
|
||||
Returns:
|
||||
(dict) of layer metadata stored in PS file
|
||||
|
||||
Example:
|
||||
{
|
||||
'id': 'pyblish.avalon.container',
|
||||
'loader': 'ImageLoader',
|
||||
'members': ['64'],
|
||||
'name': 'imageMainMiddle',
|
||||
'namespace': 'Hero_imageMainMiddle_001',
|
||||
'representation': '6203dc91e80934d9f6ee7d96',
|
||||
'schema': 'openpype:container-2.0'
|
||||
}
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_layers_metadata()
|
||||
|
||||
return layers_meta.get(str(layer.id))
|
||||
for layer_meta in layers_meta:
|
||||
layer_id = layer_meta.get("uuid") # legacy
|
||||
if layer_meta.get("members"):
|
||||
layer_id = layer_meta["members"][0]
|
||||
if str(layer.id) == str(layer_id):
|
||||
return layer_meta
|
||||
print("Unable to find layer metadata for {}".format(layer.id))
|
||||
|
||||
def imprint(self, layer, data, all_layers=None, layers_meta=None):
|
||||
def imprint(self, item_id, data, all_layers=None, items_meta=None):
|
||||
"""Save layer metadata to Headline field of active document
|
||||
|
||||
Stores metadata in format:
|
||||
|
|
@ -108,28 +127,37 @@ class PhotoshopServerStub:
|
|||
}] - for loaded instances
|
||||
|
||||
Args:
|
||||
layer (PSItem):
|
||||
item_id (str):
|
||||
data(string): json representation for single layer
|
||||
all_layers (list of PSItem): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
triggered
|
||||
layers_meta(string): json representation from Headline
|
||||
items_meta(string): json representation from Headline
|
||||
(for performance - provide only if imprint is in
|
||||
loop - value should be same)
|
||||
Returns: None
|
||||
"""
|
||||
if not layers_meta:
|
||||
layers_meta = self.get_layers_metadata()
|
||||
if not items_meta:
|
||||
items_meta = self.get_layers_metadata()
|
||||
|
||||
# json.dumps writes integer values in a dictionary to string, so
|
||||
# anticipating it here.
|
||||
if str(layer.id) in layers_meta and layers_meta[str(layer.id)]:
|
||||
if data:
|
||||
layers_meta[str(layer.id)].update(data)
|
||||
item_id = str(item_id)
|
||||
is_new = True
|
||||
result_meta = []
|
||||
for item_meta in items_meta:
|
||||
if ((item_meta.get('members') and
|
||||
item_id == str(item_meta.get('members')[0])) or
|
||||
item_meta.get("instance_id") == item_id):
|
||||
is_new = False
|
||||
if data:
|
||||
item_meta.update(data)
|
||||
result_meta.append(item_meta)
|
||||
else:
|
||||
layers_meta.pop(str(layer.id))
|
||||
else:
|
||||
layers_meta[str(layer.id)] = data
|
||||
result_meta.append(item_meta)
|
||||
|
||||
if is_new:
|
||||
result_meta.append(data)
|
||||
|
||||
# Ensure only valid ids are stored.
|
||||
if not all_layers:
|
||||
|
|
@ -137,12 +165,14 @@ class PhotoshopServerStub:
|
|||
layer_ids = [layer.id for layer in all_layers]
|
||||
cleaned_data = []
|
||||
|
||||
for layer_id in layers_meta:
|
||||
if int(layer_id) in layer_ids:
|
||||
cleaned_data.append(layers_meta[layer_id])
|
||||
for item in result_meta:
|
||||
if item.get("members"):
|
||||
if int(item["members"][0]) not in layer_ids:
|
||||
continue
|
||||
|
||||
cleaned_data.append(item)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.imprint', payload=payload)
|
||||
)
|
||||
|
|
@ -370,38 +400,27 @@ class PhotoshopServerStub:
|
|||
(Headline accessible by File > File Info)
|
||||
|
||||
Returns:
|
||||
(string): - json documents
|
||||
(list)
|
||||
example:
|
||||
{"8":{"active":true,"subset":"imageBG",
|
||||
"family":"image","id":"pyblish.avalon.instance",
|
||||
"asset":"Town"}}
|
||||
8 is layer(group) id - used for deletion, update etc.
|
||||
"""
|
||||
layers_data = {}
|
||||
res = self.websocketserver.call(self.client.call('Photoshop.read'))
|
||||
layers_data = []
|
||||
try:
|
||||
layers_data = json.loads(res)
|
||||
if res:
|
||||
layers_data = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
pass
|
||||
raise ValueError("{} cannot be parsed, recreate meta".format(res))
|
||||
# format of metadata changed from {} to [] because of standardization
|
||||
# keep current implementation logic as its working
|
||||
if not isinstance(layers_data, dict):
|
||||
temp_layers_meta = {}
|
||||
for layer_meta in layers_data:
|
||||
layer_id = layer_meta.get("uuid")
|
||||
if not layer_id:
|
||||
layer_id = layer_meta.get("members")[0]
|
||||
|
||||
temp_layers_meta[layer_id] = layer_meta
|
||||
layers_data = temp_layers_meta
|
||||
else:
|
||||
# legacy version of metadata
|
||||
if isinstance(layers_data, dict):
|
||||
for layer_id, layer_meta in layers_data.items():
|
||||
if layer_meta.get("schema") != "openpype:container-2.0":
|
||||
layer_meta["uuid"] = str(layer_id)
|
||||
else:
|
||||
layer_meta["members"] = [str(layer_id)]
|
||||
|
||||
layers_data = list(layers_data.values())
|
||||
return layers_data
|
||||
|
||||
def import_smart_object(self, path, layer_name, as_reference=False):
|
||||
|
|
@ -472,11 +491,12 @@ class PhotoshopServerStub:
|
|||
)
|
||||
|
||||
def remove_instance(self, instance_id):
|
||||
cleaned_data = {}
|
||||
cleaned_data = []
|
||||
|
||||
for key, instance in self.get_layers_metadata().items():
|
||||
if key != instance_id:
|
||||
cleaned_data[key] = instance
|
||||
for item in self.get_layers_metadata():
|
||||
inst_id = item.get("instance_id") or item.get("uuid")
|
||||
if inst_id != instance_id:
|
||||
cleaned_data.append(item)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
|
|
@ -528,6 +548,7 @@ class PhotoshopServerStub:
|
|||
d.get('type'),
|
||||
d.get('members'),
|
||||
d.get('long_name'),
|
||||
d.get("color_code")
|
||||
d.get("color_code"),
|
||||
d.get("instance_id")
|
||||
))
|
||||
return ret
|
||||
|
|
|
|||
|
|
@ -1,99 +1,145 @@
|
|||
from Qt import QtWidgets
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from avalon import api as avalon_api
|
||||
from openpype.hosts.photoshop import api
|
||||
from openpype.lib import BoolDef
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
class CreateImage(LegacyCreator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
class ImageCreator(Creator):
|
||||
"""Creates image instance for publishing."""
|
||||
identifier = "image"
|
||||
label = "Image"
|
||||
family = "image"
|
||||
defaults = ["Main"]
|
||||
description = "Image creator"
|
||||
|
||||
def process(self):
|
||||
groups = []
|
||||
layers = []
|
||||
create_group = False
|
||||
def collect_instances(self):
|
||||
for instance_data in api.list_instances():
|
||||
# legacy instances have family=='image'
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family"))
|
||||
|
||||
stub = photoshop.stub()
|
||||
if (self.options or {}).get("useSelection"):
|
||||
multiple_instances = False
|
||||
selection = stub.get_selected_layers()
|
||||
self.log.info("selection {}".format(selection))
|
||||
if len(selection) > 1:
|
||||
# Ask user whether to create one image or image per selected
|
||||
# item.
|
||||
msg_box = QtWidgets.QMessageBox()
|
||||
msg_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg_box.setText(
|
||||
"Multiple layers selected."
|
||||
"\nDo you want to make one image per layer?"
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
layer = api.stub().get_layer(instance_data["members"][0])
|
||||
instance_data["layer"] = layer
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
msg_box.setStandardButtons(
|
||||
QtWidgets.QMessageBox.Yes |
|
||||
QtWidgets.QMessageBox.No |
|
||||
QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
ret = msg_box.exec_()
|
||||
if ret == QtWidgets.QMessageBox.Yes:
|
||||
multiple_instances = True
|
||||
elif ret == QtWidgets.QMessageBox.Cancel:
|
||||
return
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
if multiple_instances:
|
||||
for item in selection:
|
||||
if item.group:
|
||||
groups.append(item)
|
||||
else:
|
||||
layers.append(item)
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
groups_to_create = []
|
||||
top_layers_to_wrap = []
|
||||
create_empty_group = False
|
||||
|
||||
stub = api.stub() # only after PS is up
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
if pre_create_data.get("use_selection"):
|
||||
only_single_item_selected = len(top_level_selected_items) == 1
|
||||
for selected_item in top_level_selected_items:
|
||||
if (
|
||||
only_single_item_selected or
|
||||
pre_create_data.get("create_multiple")):
|
||||
if selected_item.group:
|
||||
groups_to_create.append(selected_item)
|
||||
else:
|
||||
top_layers_to_wrap.append(selected_item)
|
||||
else:
|
||||
group = stub.group_selected_layers(self.name)
|
||||
groups.append(group)
|
||||
group = stub.group_selected_layers(subset_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
|
||||
elif len(selection) == 1:
|
||||
# One selected item. Use group if its a LayerSet (group), else
|
||||
# create a new group.
|
||||
if selection[0].group:
|
||||
groups.append(selection[0])
|
||||
else:
|
||||
layers.append(selection[0])
|
||||
elif len(selection) == 0:
|
||||
# No selection creates an empty group.
|
||||
create_group = True
|
||||
else:
|
||||
group = stub.create_group(self.name)
|
||||
groups.append(group)
|
||||
if not groups_to_create and not top_layers_to_wrap:
|
||||
group = stub.create_group(subset_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
|
||||
if create_group:
|
||||
group = stub.create_group(self.name)
|
||||
groups.append(group)
|
||||
|
||||
for layer in layers:
|
||||
# wrap each top level layer into separate new group
|
||||
for layer in top_layers_to_wrap:
|
||||
stub.select_layers([layer])
|
||||
group = stub.group_selected_layers(layer.name)
|
||||
groups.append(group)
|
||||
groups_to_create.append(group)
|
||||
|
||||
creator_subset_name = self.data["subset"]
|
||||
for group in groups:
|
||||
long_names = []
|
||||
group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
|
||||
replace(stub.LOADED_ICON, '')
|
||||
creating_multiple_groups = len(groups_to_create) > 1
|
||||
for group in groups_to_create:
|
||||
subset_name = subset_name_from_ui # reset to name from creator UI
|
||||
layer_names_in_hierarchy = []
|
||||
created_group_name = self._clean_highlights(stub, group.name)
|
||||
|
||||
subset_name = creator_subset_name
|
||||
if len(groups) > 1:
|
||||
if creating_multiple_groups:
|
||||
# concatenate with layer name to differentiate subsets
|
||||
subset_name += group.name.title().replace(" ", "")
|
||||
|
||||
if group.long_name:
|
||||
for directory in group.long_name[::-1]:
|
||||
name = directory.replace(stub.PUBLISH_ICON, '').\
|
||||
replace(stub.LOADED_ICON, '')
|
||||
long_names.append(name)
|
||||
name = self._clean_highlights(stub, directory)
|
||||
layer_names_in_hierarchy.append(name)
|
||||
|
||||
self.data.update({"subset": subset_name})
|
||||
self.data.update({"uuid": str(group.id)})
|
||||
self.data.update({"long_name": "_".join(long_names)})
|
||||
stub.imprint(group, self.data)
|
||||
data.update({"subset": subset_name})
|
||||
data.update({"members": [str(group.id)]})
|
||||
data.update({"long_name": "_".join(layer_names_in_hierarchy)})
|
||||
|
||||
new_instance = CreatedInstance(self.family, subset_name, data,
|
||||
self)
|
||||
|
||||
stub.imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
# reusing existing group, need to rename afterwards
|
||||
if not create_group:
|
||||
stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name)
|
||||
if not create_empty_group:
|
||||
stub.rename_layer(group.id,
|
||||
stub.PUBLISH_ICON + created_group_name)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
self.log.debug("update_list:: {}".format(update_list))
|
||||
for created_inst, _changes in update_list:
|
||||
if created_inst.get("layer"):
|
||||
# not storing PSItem layer to metadata
|
||||
created_inst.pop("layer")
|
||||
api.stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
api.remove_instance(instance)
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return [
|
||||
"Main"
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", default=True,
|
||||
label="Create only for selected"),
|
||||
BoolDef("create_multiple",
|
||||
default=True,
|
||||
label="Create separate instance for each selected")
|
||||
]
|
||||
return output
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Image instances"""
|
||||
|
||||
def _handle_legacy(self, instance_data):
|
||||
"""Converts old instances to new format."""
|
||||
if not instance_data.get("members"):
|
||||
instance_data["members"] = [instance_data.get("uuid")]
|
||||
|
||||
if instance_data.get("uuid"):
|
||||
# uuid not needed, replaced with unique instance_id
|
||||
api.stub().remove_instance(instance_data.get("uuid"))
|
||||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = avalon_api.Session.get("AVALON_TASK")
|
||||
|
||||
if not instance_data.get("variant"):
|
||||
instance_data["variant"] = ''
|
||||
|
||||
return instance_data
|
||||
|
||||
def _clean_highlights(self, stub, item):
|
||||
return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON,
|
||||
'')
|
||||
|
|
|
|||
100
openpype/hosts/photoshop/plugins/create/create_legacy_image.py
Normal file
100
openpype/hosts/photoshop/plugins/create/create_legacy_image.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
from Qt import QtWidgets
|
||||
from openpype.pipeline import create
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
|
||||
|
||||
class CreateImage(create.LegacyCreator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
label = "Image"
|
||||
family = "image"
|
||||
defaults = ["Main"]
|
||||
|
||||
def process(self):
|
||||
groups = []
|
||||
layers = []
|
||||
create_group = False
|
||||
|
||||
stub = photoshop.stub()
|
||||
if (self.options or {}).get("useSelection"):
|
||||
multiple_instances = False
|
||||
selection = stub.get_selected_layers()
|
||||
self.log.info("selection {}".format(selection))
|
||||
if len(selection) > 1:
|
||||
# Ask user whether to create one image or image per selected
|
||||
# item.
|
||||
msg_box = QtWidgets.QMessageBox()
|
||||
msg_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg_box.setText(
|
||||
"Multiple layers selected."
|
||||
"\nDo you want to make one image per layer?"
|
||||
)
|
||||
msg_box.setStandardButtons(
|
||||
QtWidgets.QMessageBox.Yes |
|
||||
QtWidgets.QMessageBox.No |
|
||||
QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
ret = msg_box.exec_()
|
||||
if ret == QtWidgets.QMessageBox.Yes:
|
||||
multiple_instances = True
|
||||
elif ret == QtWidgets.QMessageBox.Cancel:
|
||||
return
|
||||
|
||||
if multiple_instances:
|
||||
for item in selection:
|
||||
if item.group:
|
||||
groups.append(item)
|
||||
else:
|
||||
layers.append(item)
|
||||
else:
|
||||
group = stub.group_selected_layers(self.name)
|
||||
groups.append(group)
|
||||
|
||||
elif len(selection) == 1:
|
||||
# One selected item. Use group if its a LayerSet (group), else
|
||||
# create a new group.
|
||||
if selection[0].group:
|
||||
groups.append(selection[0])
|
||||
else:
|
||||
layers.append(selection[0])
|
||||
elif len(selection) == 0:
|
||||
# No selection creates an empty group.
|
||||
create_group = True
|
||||
else:
|
||||
group = stub.create_group(self.name)
|
||||
groups.append(group)
|
||||
|
||||
if create_group:
|
||||
group = stub.create_group(self.name)
|
||||
groups.append(group)
|
||||
|
||||
for layer in layers:
|
||||
stub.select_layers([layer])
|
||||
group = stub.group_selected_layers(layer.name)
|
||||
groups.append(group)
|
||||
|
||||
creator_subset_name = self.data["subset"]
|
||||
for group in groups:
|
||||
long_names = []
|
||||
group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
|
||||
replace(stub.LOADED_ICON, '')
|
||||
|
||||
subset_name = creator_subset_name
|
||||
if len(groups) > 1:
|
||||
subset_name += group.name.title().replace(" ", "")
|
||||
|
||||
if group.long_name:
|
||||
for directory in group.long_name[::-1]:
|
||||
name = directory.replace(stub.PUBLISH_ICON, '').\
|
||||
replace(stub.LOADED_ICON, '')
|
||||
long_names.append(name)
|
||||
|
||||
self.data.update({"subset": subset_name})
|
||||
self.data.update({"uuid": str(group.id)})
|
||||
self.data.update({"members": [str(group.id)]})
|
||||
self.data.update({"long_name": "_".join(long_names)})
|
||||
stub.imprint(group, self.data)
|
||||
# reusing existing group, need to rename afterwards
|
||||
if not create_group:
|
||||
stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name)
|
||||
73
openpype/hosts/photoshop/plugins/create/workfile_creator.py
Normal file
73
openpype/hosts/photoshop/plugins/create/workfile_creator.py
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
from avalon import io
|
||||
|
||||
import openpype.hosts.photoshop.api as api
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
class PSWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in api.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
# nothing to change on workfiles
|
||||
pass
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
variant = ''
|
||||
project_name = io.Session["AVALON_PROJECT"]
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
host_name = io.Session["AVALON_APP"]
|
||||
if existing_instance is None:
|
||||
asset_doc = io.find_one({"type": "asset", "name": asset_name})
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
api.stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif (
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = io.find_one({"type": "asset", "name": asset_name})
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
|
|
@ -61,7 +61,7 @@ class ImageLoader(photoshop.PhotoshopLoader):
|
|||
)
|
||||
|
||||
stub.imprint(
|
||||
layer, {"representation": str(representation["_id"])}
|
||||
layer.id, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
|
@ -73,7 +73,7 @@ class ImageLoader(photoshop.PhotoshopLoader):
|
|||
stub = self.get_stub()
|
||||
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_layer(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader):
|
|||
)
|
||||
|
||||
stub.imprint(
|
||||
layer, {"representation": str(representation["_id"])}
|
||||
layer.id, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
|
@ -72,7 +72,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader):
|
|||
"""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_layer(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import pprint
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -9,8 +10,8 @@ from openpype.lib import prepare_template_data
|
|||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by LayerSet and file metadata
|
||||
|
||||
This collector takes into account assets that are associated with
|
||||
an LayerSet and marked with a unique identifier;
|
||||
Collects publishable instances from file metadata or enhance
|
||||
already collected by creator (family == "image").
|
||||
|
||||
If no image instances are explicitly created, it looks if there is value
|
||||
in `flatten_subset_template` (configurable in Settings), in that case it
|
||||
|
|
@ -20,7 +21,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
id (str): "pyblish.avalon.instance"
|
||||
"""
|
||||
|
||||
label = "Instances"
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
families_mapping = {
|
||||
|
|
@ -30,42 +31,53 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
flatten_subset_template = ""
|
||||
|
||||
def process(self, context):
|
||||
instance_by_layer_id = {}
|
||||
for instance in context:
|
||||
if (
|
||||
instance.data["family"] == "image" and
|
||||
instance.data.get("members")):
|
||||
layer_id = str(instance.data["members"][0])
|
||||
instance_by_layer_id[layer_id] = instance
|
||||
|
||||
stub = photoshop.stub()
|
||||
layers = stub.get_layers()
|
||||
layer_items = stub.get_layers()
|
||||
layers_meta = stub.get_layers_metadata()
|
||||
instance_names = []
|
||||
|
||||
all_layer_ids = []
|
||||
for layer in layers:
|
||||
all_layer_ids.append(layer.id)
|
||||
layer_data = stub.read(layer, layers_meta)
|
||||
for layer_item in layer_items:
|
||||
layer_meta_data = stub.read(layer_item, layers_meta)
|
||||
all_layer_ids.append(layer_item.id)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_data is None:
|
||||
if layer_meta_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_data["id"]:
|
||||
if "container" in layer_meta_data["id"]:
|
||||
continue
|
||||
|
||||
# child_layers = [*layer.Layers]
|
||||
# self.log.debug("child_layers {}".format(child_layers))
|
||||
# if not child_layers:
|
||||
# self.log.info("%s skipped, it was empty." % layer.Name)
|
||||
# continue
|
||||
# active might not be in legacy meta
|
||||
if not layer_meta_data.get("active", True):
|
||||
continue
|
||||
|
||||
instance = context.create_instance(layer_data["subset"])
|
||||
instance.data["layer"] = layer
|
||||
instance.data.update(layer_data)
|
||||
instance = instance_by_layer_id.get(str(layer_item.id))
|
||||
if instance is None:
|
||||
instance = context.create_instance(layer_meta_data["subset"])
|
||||
|
||||
instance.data["layer"] = layer_item
|
||||
instance.data.update(layer_meta_data)
|
||||
instance.data["families"] = self.families_mapping[
|
||||
layer_data["family"]
|
||||
layer_meta_data["family"]
|
||||
]
|
||||
instance.data["publish"] = layer.visible
|
||||
instance_names.append(layer_data["subset"])
|
||||
instance.data["publish"] = layer_item.visible
|
||||
instance_names.append(layer_meta_data["subset"])
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
self.log.info("instance: {} ".format(instance.data))
|
||||
self.log.info("instance: {} ".format(
|
||||
pprint.pformat(instance.data, indent=4)))
|
||||
|
||||
if len(instance_names) != len(set(instance_names)):
|
||||
self.log.warning("Duplicate instances found. " +
|
||||
|
|
|
|||
|
|
@ -1,3 +1,11 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -6,11 +14,16 @@ from openpype.lib import get_subset_name_with_asset_doc
|
|||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Gather the active document as review instance."""
|
||||
"""Gather the active document as review instance.
|
||||
|
||||
Triggers once even if no 'image' is published as by defaults it creates
|
||||
flatten image from a workfile.
|
||||
"""
|
||||
|
||||
label = "Collect Review"
|
||||
label = "Review"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
family = "review"
|
||||
|
|
@ -23,16 +36,13 @@ class CollectReview(pyblish.api.ContextPlugin):
|
|||
host_name=context.data["hostName"]
|
||||
)
|
||||
|
||||
file_path = context.data["currentFile"]
|
||||
base_name = os.path.basename(file_path)
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"label": subset,
|
||||
"name": subset,
|
||||
"family": family,
|
||||
"families": ["ftrack"],
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
|
|
|
|||
|
|
@ -12,6 +12,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
hosts = ["photoshop"]
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
family = "workfile"
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
|
|
@ -27,16 +34,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
base_name = os.path.basename(file_path)
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
if existing_instance is None:
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
else:
|
||||
instance = existing_instance
|
||||
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ class ExtractImage(openpype.api.Extractor):
|
|||
formats = ["png", "jpg"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset name</title>
|
||||
<description>
|
||||
## Invalid subset or layer name
|
||||
|
||||
Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when subset name is used in a published file name.
|
||||
{msg}
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with "repair" button on the right.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
Not all characters are available in a file names on all OS. Wrong characters could be configured in Settings.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset not unique</title>
|
||||
<description>
|
||||
## Non unique subset name found
|
||||
|
||||
Non unique subset names: '{non_unique}'
|
||||
### How to repair?
|
||||
|
||||
Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances?
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -2,6 +2,7 @@ import re
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
|
||||
|
||||
|
|
@ -22,33 +23,34 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
failed.append(result["instance"])
|
||||
|
||||
invalid_chars, replace_char = plugin.get_replace_chars()
|
||||
self.log.info("{} --- {}".format(invalid_chars, replace_char))
|
||||
self.log.debug("{} --- {}".format(invalid_chars, replace_char))
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
stub = photoshop.stub()
|
||||
for instance in instances:
|
||||
self.log.info("validate_naming instance {}".format(instance))
|
||||
layer_item = instance.data["layer"]
|
||||
metadata = stub.read(layer_item)
|
||||
self.log.info("metadata instance {}".format(metadata))
|
||||
layer_name = None
|
||||
if metadata.get("uuid"):
|
||||
layer_data = stub.get_layer(metadata["uuid"])
|
||||
self.log.info("layer_data {}".format(layer_data))
|
||||
if layer_data:
|
||||
layer_name = re.sub(invalid_chars,
|
||||
replace_char,
|
||||
layer_data.name)
|
||||
self.log.debug("validate_naming instance {}".format(instance))
|
||||
current_layer_state = stub.get_layer(instance.data["layer"].id)
|
||||
self.log.debug("current_layer{}".format(current_layer_state))
|
||||
|
||||
stub.rename_layer(instance.data["uuid"], layer_name)
|
||||
layer_meta = stub.read(current_layer_state)
|
||||
instance_id = (layer_meta.get("instance_id") or
|
||||
layer_meta.get("uuid"))
|
||||
if not instance_id:
|
||||
self.log.warning("Unable to repair, cannot find layer")
|
||||
continue
|
||||
|
||||
layer_name = re.sub(invalid_chars,
|
||||
replace_char,
|
||||
current_layer_state.name)
|
||||
|
||||
stub.rename_layer(current_layer_state.id, layer_name)
|
||||
|
||||
subset_name = re.sub(invalid_chars, replace_char,
|
||||
instance.data["subset"])
|
||||
|
||||
layer_item.name = layer_name or subset_name
|
||||
metadata["subset"] = subset_name
|
||||
stub.imprint(layer_item, metadata)
|
||||
layer_meta["subset"] = subset_name
|
||||
stub.imprint(instance_id, layer_meta)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -73,11 +75,18 @@ class ValidateNaming(pyblish.api.InstancePlugin):
|
|||
help_msg = ' Use Repair action (A) in Pyblish to fix it.'
|
||||
msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"],
|
||||
help_msg)
|
||||
assert not re.search(self.invalid_chars, instance.data["name"]), msg
|
||||
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, instance.data["name"]):
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"],
|
||||
help_msg)
|
||||
assert not re.search(self.invalid_chars, instance.data["subset"]), msg
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, instance.data["subset"]):
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
@classmethod
|
||||
def get_replace_chars(cls):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import collections
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
|
||||
|
||||
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
||||
|
|
@ -27,4 +28,10 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
|||
if count > 1]
|
||||
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
|
||||
"Remove duplicates via SubsetManager.")
|
||||
assert not non_unique, msg
|
||||
formatting_data = {
|
||||
"non_unique": ",".join(non_unique)
|
||||
}
|
||||
|
||||
if non_unique:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -89,7 +89,9 @@ class BaseCreator:
|
|||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = logging.getLogger(self.__class__.__name__)
|
||||
from openpype.api import Logger
|
||||
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def _add_instance_to_context(self, instance):
|
||||
|
|
|
|||
|
|
@ -349,6 +349,18 @@
|
|||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
},
|
||||
{
|
||||
"hosts": [
|
||||
"photoshop"
|
||||
],
|
||||
"families": [
|
||||
"review"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -111,3 +111,67 @@ You can switch to a previous version of the image or update to the latest.
|
|||
|
||||

|
||||

|
||||
|
||||
|
||||
### New Publisher
|
||||
|
||||
All previous screenshot came from regular [pyblish](https://pyblish.com/) process, there is also a different UI available. This process extends existing implementation and adds new functionalities.
|
||||
|
||||
To test this in Photoshop, the artist needs first to enable experimental `New publisher` in Settings. (Tray > Settings > Experimental tools)
|
||||

|
||||
|
||||
New dialog opens after clicking on `Experimental tools` button in Openpype extension menu.
|
||||

|
||||
|
||||
After you click on this button, this dialog will show up.
|
||||
|
||||

|
||||
|
||||
You can see the first instance, called `workfileYourTaskName`. (Name depends on studio naming convention for Photoshop's workfiles.). This instance is so called "automatic",
|
||||
it was created without instigation by the artist. You shouldn't delete this instance as it might hold necessary values for future publishing, but you can choose to skip it
|
||||
from publishing (by toggling the pill button inside of the rectangular object denoting instance).
|
||||
|
||||
New publisher allows publishing into different context, just click on a workfile instance, update `Variant`, `Asset` or `Task` in the form in the middle and don't forget to click on the 'Confirm' button.
|
||||
|
||||
Similarly to the old publishing approach, you need to create instances for everything you want to publish. You will initiate by clicking on the '+' sign in the bottom left corner.
|
||||
|
||||

|
||||
|
||||
In this dialog you can select the family for the published layer or group. Currently only 'image' is implemented.
|
||||
|
||||
On right hand side you can see creator attributes:
|
||||
- `Create only for selected` - mimics `Use selected` option of regular publish
|
||||
- `Create separate instance for each selected` - if separate instance should be created for each layer if multiple selected
|
||||
|
||||

|
||||
|
||||
Here you can see a newly created instance of image family. (Name depends on studio naming convention for image family.) You can disable instance from publishing in the same fashion as a workfile instance.
|
||||
You could also decide delete instance by selecting it and clicking on a trashcan icon (next to plus button on left button)
|
||||
|
||||
Buttons on the bottom right are for:
|
||||
- `Refresh publishing` - set publishing process to starting position - useful if previous publish failed, or you changed configuration of a publish
|
||||
- `Stop/pause publishing` - if you would like to pause publishing process at any time
|
||||
- `Validate` - if you would like to run only collecting and validating phases (nothing will be published yet)
|
||||
- `Publish` - standard way how to kick off full publishing process
|
||||
|
||||
In the unfortunate case of some error during publishing, you would receive this kind of error dialog.
|
||||
|
||||

|
||||
|
||||
In this case there is an issue that you are publishing two or more instances with the same subset name ('imageMaing'). If the error is recoverable by the artist, you should
|
||||
see helpful information in a `How to repair?` section or fix it automatically by clicking on a 'Wrench' button on the right if present.
|
||||
|
||||
If you would like to ask for help admin or support, you could use any of the three buttons on bottom left:
|
||||
- `Copy report` - stash full publishing log to a clipboard
|
||||
- `Export and save report` - save log into a file for sending it via mail or any communication tool
|
||||
- `Show details` - switches into a more detailed list of published instances and plugins. Similar to the old pyblish list.
|
||||
|
||||
If you are able to fix the workfile yourself, use the first button on the right to set the UI to initial state before publish. (Click the `Publish` button to start again.)
|
||||
|
||||
New publishing process should be backward compatible, eg. if you have a workfile with instances created in the previous publishing approach, they will be translated automatically and
|
||||
could be used right away.
|
||||
|
||||
If you would create instances in a new publisher, you cannot use them in the old approach though!
|
||||
|
||||
If you would hit on unexpected behaviour with old instances, contact support first, then you could try some steps to recover your publish. Delete instances in New publisher UI, or try `Subset manager` in the extension menu.
|
||||
Nuclear option is to purge workfile metadata in `File > File Info > Origin > Headline`. This is only for most determined daredevils though!
|
||||
|
|
|
|||
BIN
website/docs/assets/artist_photoshop_new_publisher_instance.png
Normal file
BIN
website/docs/assets/artist_photoshop_new_publisher_instance.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 26 KiB |
BIN
website/docs/assets/artist_photoshop_new_publisher_workfile.png
Normal file
BIN
website/docs/assets/artist_photoshop_new_publisher_workfile.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 22 KiB |
BIN
website/docs/assets/experimental_tools_menu.png
Normal file
BIN
website/docs/assets/experimental_tools_menu.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 9.1 KiB |
BIN
website/docs/assets/experimental_tools_settings.png
Normal file
BIN
website/docs/assets/experimental_tools_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.3 KiB |
Loading…
Add table
Add a link
Reference in a new issue