mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
containerise all loaders
This commit is contained in:
parent
94b5b86cae
commit
7cee74677d
5 changed files with 36 additions and 33 deletions
|
|
@ -40,3 +40,19 @@ class ImportAudioLoader(api.Loader):
|
|||
harmony.send(
|
||||
{"function": func, "args": [context["subset"]["name"], wav_file]}
|
||||
)
|
||||
|
||||
subset_name = context["subset"]["name"]
|
||||
|
||||
return harmony.containerise(
|
||||
subset_name,
|
||||
namespace,
|
||||
subset_name,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -243,17 +243,12 @@ class BackgroundLoader(api.Loader):
|
|||
|
||||
for child in data['children']:
|
||||
if child.get("filename"):
|
||||
print(child["filename"])
|
||||
layers.append(child["filename"])
|
||||
else:
|
||||
for layer in child['children']:
|
||||
if layer.get("filename"):
|
||||
print(layer["filename"])
|
||||
layers.append(layer["filename"])
|
||||
|
||||
print(layers)
|
||||
print(data)
|
||||
|
||||
bg_folder = os.path.dirname(self.fname)
|
||||
|
||||
subset_name = context["subset"]["name"]
|
||||
|
|
@ -263,8 +258,6 @@ class BackgroundLoader(api.Loader):
|
|||
for layer in sorted(layers):
|
||||
file_to_import = [os.path.join(bg_folder, layer).replace("\\", "/")]
|
||||
|
||||
print(f"FILE TO IMPORT: {file_to_import}")
|
||||
|
||||
read_node = harmony.send(
|
||||
{
|
||||
"function": copy_files + import_files,
|
||||
|
|
@ -273,7 +266,6 @@ class BackgroundLoader(api.Loader):
|
|||
)["result"]
|
||||
container_nodes.append(read_node)
|
||||
|
||||
|
||||
return harmony.containerise(
|
||||
subset_name,
|
||||
namespace,
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ class ImageSequenceLoader(api.Loader):
|
|||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
families = ["shot", "render", "image"]
|
||||
families = ["shot", "render", "image", "plate"]
|
||||
representations = ["jpeg", "png", "jpg"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
|
@ -267,7 +267,8 @@ class ImageSequenceLoader(api.Loader):
|
|||
namespace,
|
||||
read_node,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
self.__class__.__name__,
|
||||
nodes=[read_node]
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -336,6 +337,7 @@ class ImageSequenceLoader(api.Loader):
|
|||
harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)
|
||||
harmony.imprint(node, {}, remove=True)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
import os
|
||||
|
||||
import json
|
||||
|
||||
file = r"G:\My Drive\pypeRoot\milo_s01\episodes\ml102\ml102_shots\ml102_sh0850\publish\image\imageForComp\v001\ml_ml102_sh0850_imageForComp_v001.json"
|
||||
|
||||
with open(file) as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
layers = list()
|
||||
|
||||
for child in data['children']:
|
||||
if child.get("filename"):
|
||||
print(child["filename"])
|
||||
layers.append(child["filename"])
|
||||
else:
|
||||
for layer in child['children']:
|
||||
if layer.get("filename"):
|
||||
print(layer["filename"])
|
||||
layers.append(layer["filename"])
|
||||
|
||||
for layer in sorted(layers):
|
||||
print(layer)
|
||||
|
|
@ -35,6 +35,22 @@ class ImportTemplateLoader(api.Loader):
|
|||
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
subset_name = context["subset"]["name"]
|
||||
|
||||
return harmony.containerise(
|
||||
subset_name,
|
||||
namespace,
|
||||
subset_name,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
pass
|
||||
|
||||
|
||||
class ImportWorkfileLoader(ImportTemplateLoader):
|
||||
"""Import workfiles."""
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue