mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge pull request #3093 from pypeclub/chore/OP-3113_SAP-ExtractBGMainGroups
StandalonePublisher: removed Extract Background plugins
This commit is contained in:
commit
a1c0179a46
5 changed files with 2 additions and 733 deletions
|
|
@ -1,70 +0,0 @@
|
|||
import copy
|
||||
import pyblish.api
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectBatchInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect all available instances for batch publish."""
|
||||
|
||||
label = "Collect Batch Instances"
|
||||
order = pyblish.api.CollectorOrder + 0.489
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["background_batch"]
|
||||
|
||||
# presets
|
||||
default_subset_task = {
|
||||
"background_batch": "background"
|
||||
}
|
||||
subsets = {
|
||||
"background_batch": {
|
||||
"backgroundLayout": {
|
||||
"task": "background",
|
||||
"family": "backgroundLayout"
|
||||
},
|
||||
"backgroundComp": {
|
||||
"task": "background",
|
||||
"family": "backgroundComp"
|
||||
},
|
||||
"workfileBackground": {
|
||||
"task": "background",
|
||||
"family": "workfile"
|
||||
}
|
||||
}
|
||||
}
|
||||
unchecked_by_default = []
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
asset_name = instance.data["asset"]
|
||||
family = instance.data["family"]
|
||||
|
||||
default_task_name = self.default_subset_task.get(family)
|
||||
for subset_name, subset_data in self.subsets[family].items():
|
||||
instance_name = f"{asset_name}_{subset_name}"
|
||||
task_name = subset_data.get("task") or default_task_name
|
||||
|
||||
# create new instance
|
||||
new_instance = context.create_instance(instance_name)
|
||||
|
||||
# add original instance data except name key
|
||||
for key, value in instance.data.items():
|
||||
if key not in ["name"]:
|
||||
# Make sure value is copy since value may be object which
|
||||
# can be shared across all new created objects
|
||||
new_instance.data[key] = copy.deepcopy(value)
|
||||
|
||||
# add subset data from preset
|
||||
new_instance.data.update(subset_data)
|
||||
|
||||
new_instance.data["label"] = instance_name
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["task"] = task_name
|
||||
|
||||
if subset_name in self.unchecked_by_default:
|
||||
new_instance.data["publish"] = False
|
||||
|
||||
self.log.info(f"Created new instance: {instance_name}")
|
||||
self.log.debug(f"_ inst_data: {pformat(new_instance.data)}")
|
||||
|
||||
# delete original instance
|
||||
context.remove(instance)
|
||||
|
|
@ -1,243 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import copy
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
PSDImage = None
|
||||
|
||||
|
||||
class ExtractBGForComp(openpype.api.Extractor):
|
||||
label = "Extract Background for Compositing"
|
||||
families = ["backgroundComp"]
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
new_instance_family = "background"
|
||||
|
||||
# Presetable
|
||||
allowed_group_names = [
|
||||
"OL", "BG", "MG", "FG", "SB", "UL", "SKY", "Field Guide", "Field_Guide",
|
||||
"ANIM"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
# Check if python module `psd_tools` is installed
|
||||
try:
|
||||
global PSDImage
|
||||
from psd_tools import PSDImage
|
||||
except Exception:
|
||||
raise AssertionError(
|
||||
"BUG: Python module `psd-tools` is not installed!"
|
||||
)
|
||||
|
||||
self.allowed_group_names = [
|
||||
name.lower()
|
||||
for name in self.allowed_group_names
|
||||
]
|
||||
|
||||
self.redo_global_plugins(instance)
|
||||
|
||||
repres = instance.data.get("representations")
|
||||
if not repres:
|
||||
self.log.info("There are no representations on instance.")
|
||||
return
|
||||
|
||||
if not instance.data.get("transfers"):
|
||||
instance.data["transfers"] = []
|
||||
|
||||
# Prepare staging dir
|
||||
staging_dir = self.staging_dir(instance)
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
|
||||
for repre in tuple(repres):
|
||||
# Skip all files without .psd extension
|
||||
repre_ext = repre["ext"].lower()
|
||||
if repre_ext.startswith("."):
|
||||
repre_ext = repre_ext[1:]
|
||||
|
||||
if repre_ext != "psd":
|
||||
continue
|
||||
|
||||
# Prepare publish dir for transfers
|
||||
publish_dir = instance.data["publishDir"]
|
||||
|
||||
# Prepare json filepath where extracted metadata are stored
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_full_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
self.log.debug(f"`staging_dir` is \"{staging_dir}\"")
|
||||
|
||||
# Prepare new repre data
|
||||
new_repre = {
|
||||
"name": "json",
|
||||
"ext": "json",
|
||||
"files": json_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
# TODO add check of list
|
||||
psd_filename = repre["files"]
|
||||
psd_folder_path = repre["stagingDir"]
|
||||
psd_filepath = os.path.join(psd_folder_path, psd_filename)
|
||||
self.log.debug(f"psd_filepath: \"{psd_filepath}\"")
|
||||
psd_object = PSDImage.open(psd_filepath)
|
||||
|
||||
json_data, transfers = self.export_compositing_images(
|
||||
psd_object, staging_dir, publish_dir
|
||||
)
|
||||
self.log.info("Json file path: {}".format(json_full_path))
|
||||
with open(json_full_path, "w") as json_filestream:
|
||||
json.dump(json_data, json_filestream, indent=4)
|
||||
|
||||
instance.data["transfers"].extend(transfers)
|
||||
instance.data["representations"].remove(repre)
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
def export_compositing_images(self, psd_object, output_dir, publish_dir):
|
||||
json_data = {
|
||||
"__schema_version__": 1,
|
||||
"children": []
|
||||
}
|
||||
transfers = []
|
||||
for main_idx, main_layer in enumerate(psd_object):
|
||||
if (
|
||||
not main_layer.is_visible()
|
||||
or main_layer.name.lower() not in self.allowed_group_names
|
||||
or not main_layer.is_group
|
||||
):
|
||||
continue
|
||||
|
||||
export_layers = []
|
||||
layers_idx = 0
|
||||
for layer in main_layer:
|
||||
# TODO this way may be added also layers next to "ADJ"
|
||||
if layer.name.lower() == "adj":
|
||||
for _layer in layer:
|
||||
export_layers.append((layers_idx, _layer))
|
||||
layers_idx += 1
|
||||
|
||||
else:
|
||||
export_layers.append((layers_idx, layer))
|
||||
layers_idx += 1
|
||||
|
||||
if not export_layers:
|
||||
continue
|
||||
|
||||
main_layer_data = {
|
||||
"index": main_idx,
|
||||
"name": main_layer.name,
|
||||
"children": []
|
||||
}
|
||||
|
||||
for layer_idx, layer in export_layers:
|
||||
has_size = layer.width > 0 and layer.height > 0
|
||||
if not has_size:
|
||||
self.log.debug((
|
||||
"Skipping layer \"{}\" because does "
|
||||
"not have any content."
|
||||
).format(layer.name))
|
||||
continue
|
||||
|
||||
main_layer_name = main_layer.name.replace(" ", "_")
|
||||
layer_name = layer.name.replace(" ", "_")
|
||||
|
||||
filename = "{:0>2}_{}_{:0>2}_{}.png".format(
|
||||
main_idx + 1, main_layer_name, layer_idx + 1, layer_name
|
||||
)
|
||||
layer_data = {
|
||||
"index": layer_idx,
|
||||
"name": layer.name,
|
||||
"filename": filename
|
||||
}
|
||||
output_filepath = os.path.join(output_dir, filename)
|
||||
dst_filepath = os.path.join(publish_dir, filename)
|
||||
transfers.append((output_filepath, dst_filepath))
|
||||
|
||||
pil_object = layer.composite(viewport=psd_object.viewbox)
|
||||
pil_object.save(output_filepath, "PNG")
|
||||
|
||||
main_layer_data["children"].append(layer_data)
|
||||
|
||||
if main_layer_data["children"]:
|
||||
json_data["children"].append(main_layer_data)
|
||||
|
||||
return json_data, transfers
|
||||
|
||||
def redo_global_plugins(self, instance):
|
||||
# TODO do this in collection phase
|
||||
# Copy `families` and check if `family` is not in current families
|
||||
families = instance.data.get("families") or list()
|
||||
if families:
|
||||
families = list(set(families))
|
||||
|
||||
if self.new_instance_family in families:
|
||||
families.remove(self.new_instance_family)
|
||||
|
||||
self.log.debug(
|
||||
"Setting new instance families {}".format(str(families))
|
||||
)
|
||||
instance.data["families"] = families
|
||||
|
||||
# Override instance data with new information
|
||||
instance.data["family"] = self.new_instance_family
|
||||
|
||||
subset_name = instance.data["anatomyData"]["subset"]
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
latest_version = self.find_last_version(subset_name, asset_doc)
|
||||
version_number = 1
|
||||
if latest_version is not None:
|
||||
version_number += latest_version
|
||||
|
||||
instance.data["latestVersion"] = latest_version
|
||||
instance.data["version"] = version_number
|
||||
|
||||
# Same data apply to anatomy data
|
||||
instance.data["anatomyData"].update({
|
||||
"family": self.new_instance_family,
|
||||
"version": version_number
|
||||
})
|
||||
|
||||
# Redo publish and resources dir
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({
|
||||
"frame": "FRAME_TEMP",
|
||||
"representation": "TEMP"
|
||||
})
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
if "folder" in anatomy.templates["publish"]:
|
||||
publish_folder = anatomy_filled["publish"]["folder"]
|
||||
else:
|
||||
publish_folder = os.path.dirname(anatomy_filled["publish"]["path"])
|
||||
|
||||
publish_folder = os.path.normpath(publish_folder)
|
||||
resources_folder = os.path.join(publish_folder, "resources")
|
||||
|
||||
instance.data["publishDir"] = publish_folder
|
||||
instance.data["resourcesDir"] = resources_folder
|
||||
|
||||
self.log.debug("publishDir: \"{}\"".format(publish_folder))
|
||||
self.log.debug("resourcesDir: \"{}\"".format(resources_folder))
|
||||
|
||||
def find_last_version(self, subset_name, asset_doc):
|
||||
subset_doc = legacy_io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
|
||||
if subset_doc is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_doc = legacy_io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_doc:
|
||||
return int(version_doc["name"])
|
||||
return None
|
||||
|
|
@ -1,248 +0,0 @@
|
|||
import os
|
||||
import copy
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
PSDImage = None
|
||||
|
||||
|
||||
class ExtractBGMainGroups(openpype.api.Extractor):
|
||||
label = "Extract Background Layout"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
families = ["backgroundLayout"]
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
new_instance_family = "background"
|
||||
|
||||
# Presetable
|
||||
allowed_group_names = [
|
||||
"OL", "BG", "MG", "FG", "UL", "SB", "SKY", "Field Guide", "Field_Guide",
|
||||
"ANIM"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
# Check if python module `psd_tools` is installed
|
||||
try:
|
||||
global PSDImage
|
||||
from psd_tools import PSDImage
|
||||
except Exception:
|
||||
raise AssertionError(
|
||||
"BUG: Python module `psd-tools` is not installed!"
|
||||
)
|
||||
|
||||
self.allowed_group_names = [
|
||||
name.lower()
|
||||
for name in self.allowed_group_names
|
||||
]
|
||||
repres = instance.data.get("representations")
|
||||
if not repres:
|
||||
self.log.info("There are no representations on instance.")
|
||||
return
|
||||
|
||||
self.redo_global_plugins(instance)
|
||||
|
||||
repres = instance.data.get("representations")
|
||||
if not repres:
|
||||
self.log.info("There are no representations on instance.")
|
||||
return
|
||||
|
||||
if not instance.data.get("transfers"):
|
||||
instance.data["transfers"] = []
|
||||
|
||||
# Prepare staging dir
|
||||
staging_dir = self.staging_dir(instance)
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
|
||||
# Prepare publish dir for transfers
|
||||
publish_dir = instance.data["publishDir"]
|
||||
|
||||
for repre in tuple(repres):
|
||||
# Skip all files without .psd extension
|
||||
repre_ext = repre["ext"].lower()
|
||||
if repre_ext.startswith("."):
|
||||
repre_ext = repre_ext[1:]
|
||||
|
||||
if repre_ext != "psd":
|
||||
continue
|
||||
|
||||
# Prepare json filepath where extracted metadata are stored
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_full_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
self.log.debug(f"`staging_dir` is \"{staging_dir}\"")
|
||||
|
||||
# Prepare new repre data
|
||||
new_repre = {
|
||||
"name": "json",
|
||||
"ext": "json",
|
||||
"files": json_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
# TODO add check of list
|
||||
psd_filename = repre["files"]
|
||||
psd_folder_path = repre["stagingDir"]
|
||||
psd_filepath = os.path.join(psd_folder_path, psd_filename)
|
||||
self.log.debug(f"psd_filepath: \"{psd_filepath}\"")
|
||||
psd_object = PSDImage.open(psd_filepath)
|
||||
|
||||
json_data, transfers = self.export_compositing_images(
|
||||
psd_object, staging_dir, publish_dir
|
||||
)
|
||||
self.log.info("Json file path: {}".format(json_full_path))
|
||||
with open(json_full_path, "w") as json_filestream:
|
||||
json.dump(json_data, json_filestream, indent=4)
|
||||
|
||||
instance.data["transfers"].extend(transfers)
|
||||
instance.data["representations"].remove(repre)
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
def export_compositing_images(self, psd_object, output_dir, publish_dir):
|
||||
json_data = {
|
||||
"__schema_version__": 1,
|
||||
"children": []
|
||||
}
|
||||
output_ext = ".png"
|
||||
|
||||
to_export = []
|
||||
for layer_idx, layer in enumerate(psd_object):
|
||||
layer_name = layer.name.replace(" ", "_")
|
||||
if (
|
||||
not layer.is_visible()
|
||||
or layer_name.lower() not in self.allowed_group_names
|
||||
):
|
||||
continue
|
||||
|
||||
has_size = layer.width > 0 and layer.height > 0
|
||||
if not has_size:
|
||||
self.log.debug((
|
||||
"Skipping layer \"{}\" because does not have any content."
|
||||
).format(layer.name))
|
||||
continue
|
||||
|
||||
filebase = "{:0>2}_{}".format(layer_idx, layer_name)
|
||||
if layer_name.lower() == "anim":
|
||||
if not layer.is_group:
|
||||
self.log.warning("ANIM layer is not a group layer.")
|
||||
continue
|
||||
|
||||
children = []
|
||||
for anim_idx, anim_layer in enumerate(layer):
|
||||
anim_layer_name = anim_layer.name.replace(" ", "_")
|
||||
filename = "{}_{:0>2}_{}{}".format(
|
||||
filebase, anim_idx, anim_layer_name, output_ext
|
||||
)
|
||||
children.append({
|
||||
"index": anim_idx,
|
||||
"name": anim_layer.name,
|
||||
"filename": filename
|
||||
})
|
||||
to_export.append((anim_layer, filename))
|
||||
|
||||
json_data["children"].append({
|
||||
"index": layer_idx,
|
||||
"name": layer.name,
|
||||
"children": children
|
||||
})
|
||||
continue
|
||||
|
||||
filename = filebase + output_ext
|
||||
json_data["children"].append({
|
||||
"index": layer_idx,
|
||||
"name": layer.name,
|
||||
"filename": filename
|
||||
})
|
||||
to_export.append((layer, filename))
|
||||
|
||||
transfers = []
|
||||
for layer, filename in to_export:
|
||||
output_filepath = os.path.join(output_dir, filename)
|
||||
dst_filepath = os.path.join(publish_dir, filename)
|
||||
transfers.append((output_filepath, dst_filepath))
|
||||
|
||||
pil_object = layer.composite(viewport=psd_object.viewbox)
|
||||
pil_object.save(output_filepath, "PNG")
|
||||
|
||||
return json_data, transfers
|
||||
|
||||
def redo_global_plugins(self, instance):
|
||||
# TODO do this in collection phase
|
||||
# Copy `families` and check if `family` is not in current families
|
||||
families = instance.data.get("families") or list()
|
||||
if families:
|
||||
families = list(set(families))
|
||||
|
||||
if self.new_instance_family in families:
|
||||
families.remove(self.new_instance_family)
|
||||
|
||||
self.log.debug(
|
||||
"Setting new instance families {}".format(str(families))
|
||||
)
|
||||
instance.data["families"] = families
|
||||
|
||||
# Override instance data with new information
|
||||
instance.data["family"] = self.new_instance_family
|
||||
|
||||
subset_name = instance.data["anatomyData"]["subset"]
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
latest_version = self.find_last_version(subset_name, asset_doc)
|
||||
version_number = 1
|
||||
if latest_version is not None:
|
||||
version_number += latest_version
|
||||
|
||||
instance.data["latestVersion"] = latest_version
|
||||
instance.data["version"] = version_number
|
||||
|
||||
# Same data apply to anatomy data
|
||||
instance.data["anatomyData"].update({
|
||||
"family": self.new_instance_family,
|
||||
"version": version_number
|
||||
})
|
||||
|
||||
# Redo publish and resources dir
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({
|
||||
"frame": "FRAME_TEMP",
|
||||
"representation": "TEMP"
|
||||
})
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
if "folder" in anatomy.templates["publish"]:
|
||||
publish_folder = anatomy_filled["publish"]["folder"]
|
||||
else:
|
||||
publish_folder = os.path.dirname(anatomy_filled["publish"]["path"])
|
||||
|
||||
publish_folder = os.path.normpath(publish_folder)
|
||||
resources_folder = os.path.join(publish_folder, "resources")
|
||||
|
||||
instance.data["publishDir"] = publish_folder
|
||||
instance.data["resourcesDir"] = resources_folder
|
||||
|
||||
self.log.debug("publishDir: \"{}\"".format(publish_folder))
|
||||
self.log.debug("resourcesDir: \"{}\"".format(resources_folder))
|
||||
|
||||
def find_last_version(self, subset_name, asset_doc):
|
||||
subset_doc = legacy_io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
|
||||
if subset_doc is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_doc = legacy_io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_doc:
|
||||
return int(version_doc["name"])
|
||||
return None
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
import os
|
||||
import copy
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
PSDImage = None
|
||||
|
||||
|
||||
class ExtractImagesFromPSD(openpype.api.Extractor):
|
||||
# PLUGIN is not currently enabled because was decided to use different
|
||||
# approach
|
||||
enabled = False
|
||||
active = False
|
||||
label = "Extract Images from PSD"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
families = ["backgroundLayout"]
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
new_instance_family = "image"
|
||||
ignored_instance_data_keys = ("name", "label", "stagingDir", "version")
|
||||
# Presetable
|
||||
allowed_group_names = [
|
||||
"OL", "BG", "MG", "FG", "UL", "SKY", "Field Guide", "Field_Guide",
|
||||
"ANIM"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
# Check if python module `psd_tools` is installed
|
||||
try:
|
||||
global PSDImage
|
||||
from psd_tools import PSDImage
|
||||
except Exception:
|
||||
raise AssertionError(
|
||||
"BUG: Python module `psd-tools` is not installed!"
|
||||
)
|
||||
|
||||
self.allowed_group_names = [
|
||||
name.lower()
|
||||
for name in self.allowed_group_names
|
||||
]
|
||||
repres = instance.data.get("representations")
|
||||
if not repres:
|
||||
self.log.info("There are no representations on instance.")
|
||||
return
|
||||
|
||||
for repre in tuple(repres):
|
||||
# Skip all files without .psd extension
|
||||
repre_ext = repre["ext"].lower()
|
||||
if repre_ext.startswith("."):
|
||||
repre_ext = repre_ext[1:]
|
||||
|
||||
if repre_ext != "psd":
|
||||
continue
|
||||
|
||||
# TODO add check of list of "files" value
|
||||
psd_filename = repre["files"]
|
||||
psd_folder_path = repre["stagingDir"]
|
||||
psd_filepath = os.path.join(psd_folder_path, psd_filename)
|
||||
self.log.debug(f"psd_filepath: \"{psd_filepath}\"")
|
||||
psd_object = PSDImage.open(psd_filepath)
|
||||
|
||||
self.create_new_instances(instance, psd_object)
|
||||
|
||||
# Remove the instance from context
|
||||
instance.context.remove(instance)
|
||||
|
||||
def create_new_instances(self, instance, psd_object):
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
for layer in psd_object:
|
||||
if (
|
||||
not layer.is_visible()
|
||||
or layer.name.lower() not in self.allowed_group_names
|
||||
):
|
||||
continue
|
||||
|
||||
has_size = layer.width > 0 and layer.height > 0
|
||||
if not has_size:
|
||||
self.log.debug((
|
||||
"Skipping layer \"{}\" because does "
|
||||
"not have any content."
|
||||
).format(layer.name))
|
||||
continue
|
||||
|
||||
layer_name = layer.name.replace(" ", "_")
|
||||
instance_name = subset_name = f"image{layer_name}"
|
||||
self.log.info(
|
||||
f"Creating new instance with name \"{instance_name}\""
|
||||
)
|
||||
new_instance = instance.context.create_instance(instance_name)
|
||||
for key, value in instance.data.items():
|
||||
if key not in self.ignored_instance_data_keys:
|
||||
new_instance.data[key] = copy.deepcopy(value)
|
||||
|
||||
new_instance.data["label"] = " ".join(
|
||||
(new_instance.data["asset"], instance_name)
|
||||
)
|
||||
|
||||
# Find latest version
|
||||
latest_version = self.find_last_version(subset_name, asset_doc)
|
||||
version_number = 1
|
||||
if latest_version is not None:
|
||||
version_number += latest_version
|
||||
|
||||
self.log.info(
|
||||
"Next version of instance \"{}\" will be {}".format(
|
||||
instance_name, version_number
|
||||
)
|
||||
)
|
||||
|
||||
# Set family and subset
|
||||
new_instance.data["family"] = self.new_instance_family
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["version"] = version_number
|
||||
new_instance.data["latestVersion"] = latest_version
|
||||
|
||||
new_instance.data["anatomyData"].update({
|
||||
"subset": subset_name,
|
||||
"family": self.new_instance_family,
|
||||
"version": version_number
|
||||
})
|
||||
|
||||
# Copy `families` and check if `family` is not in current families
|
||||
families = new_instance.data.get("families") or list()
|
||||
if families:
|
||||
families = list(set(families))
|
||||
|
||||
if self.new_instance_family in families:
|
||||
families.remove(self.new_instance_family)
|
||||
new_instance.data["families"] = families
|
||||
|
||||
# Prepare staging dir for new instance
|
||||
staging_dir = self.staging_dir(new_instance)
|
||||
|
||||
output_filename = "{}.png".format(layer_name)
|
||||
output_filepath = os.path.join(staging_dir, output_filename)
|
||||
pil_object = layer.composite(viewport=psd_object.viewbox)
|
||||
pil_object.save(output_filepath, "PNG")
|
||||
|
||||
new_repre = {
|
||||
"name": "png",
|
||||
"ext": "png",
|
||||
"files": output_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
self.log.debug(
|
||||
"Creating new representation: {}".format(new_repre)
|
||||
)
|
||||
new_instance.data["representations"] = [new_repre]
|
||||
|
||||
def find_last_version(self, subset_name, asset_doc):
|
||||
subset_doc = legacy_io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
|
||||
if subset_doc is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_doc = legacy_io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_doc:
|
||||
return int(version_doc["name"])
|
||||
return None
|
||||
3
setup.py
3
setup.py
|
|
@ -107,7 +107,8 @@ install_requires = [
|
|||
# Python defaults (cx_Freeze skip them by default)
|
||||
"dbm",
|
||||
"sqlite3",
|
||||
"dataclasses"
|
||||
"dataclasses",
|
||||
"timeit"
|
||||
]
|
||||
|
||||
includes = []
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue