mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #1562 from simonebarbieri/feature/unreal-blender-materials
This commit is contained in:
commit
ebfa0bb2a3
4 changed files with 419 additions and 0 deletions
218
openpype/hosts/blender/plugins/load/load_look.py
Normal file
218
openpype/hosts/blender/plugins/load/load_look.py
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
"""Load a model asset in Blender."""
|
||||
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import os
|
||||
import json
|
||||
import bpy
|
||||
|
||||
from avalon import api, blender
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
|
||||
|
||||
class BlendLookLoader(plugin.AssetLoader):
|
||||
"""Load models from a .blend file.
|
||||
|
||||
Because they come from a .blend file we can simply link the collection that
|
||||
contains the model. There is no further need to 'containerise' it.
|
||||
"""
|
||||
|
||||
families = ["look"]
|
||||
representations = ["json"]
|
||||
|
||||
label = "Load Look"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def get_all_children(self, obj):
|
||||
children = list(obj.children)
|
||||
|
||||
for child in children:
|
||||
children.extend(child.children)
|
||||
|
||||
return children
|
||||
|
||||
def _process(self, libpath, container_name, objects):
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
path = os.path.dirname(libpath)
|
||||
materials_path = f"{path}/resources"
|
||||
|
||||
materials = []
|
||||
|
||||
for entry in data:
|
||||
file = entry.get('fbx_filename')
|
||||
if file is None:
|
||||
continue
|
||||
|
||||
bpy.ops.import_scene.fbx(filepath=f"{materials_path}/{file}")
|
||||
|
||||
mesh = [o for o in bpy.context.scene.objects if o.select_get()][0]
|
||||
material = mesh.data.materials[0]
|
||||
material.name = f"{material.name}:{container_name}"
|
||||
|
||||
texture_file = entry.get('tga_filename')
|
||||
if texture_file:
|
||||
node_tree = material.node_tree
|
||||
pbsdf = node_tree.nodes['Principled BSDF']
|
||||
base_color = pbsdf.inputs[0]
|
||||
tex_node = base_color.links[0].from_node
|
||||
tex_node.image.filepath = f"{materials_path}/{texture_file}"
|
||||
|
||||
materials.append(material)
|
||||
|
||||
for obj in objects:
|
||||
for child in self.get_all_children(obj):
|
||||
mesh_name = child.name.split(':')[0]
|
||||
if mesh_name == material.name.split(':')[0]:
|
||||
child.data.materials.clear()
|
||||
child.data.materials.append(material)
|
||||
break
|
||||
|
||||
bpy.data.objects.remove(mesh)
|
||||
|
||||
return materials, objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
container_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
metadata = container.get(blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
metadata["libpath"] = libpath
|
||||
metadata["lib_container"] = lib_container
|
||||
|
||||
selected = [o for o in bpy.context.scene.objects if o.select_get()]
|
||||
|
||||
materials, objects = self._process(libpath, container_name, selected)
|
||||
|
||||
# Save the list of imported materials in the metadata container
|
||||
metadata["objects"] = objects
|
||||
metadata["materials"] = materials
|
||||
|
||||
metadata["parent"] = str(context["representation"]["parent"])
|
||||
metadata["family"] = context["representation"]["context"]["family"]
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
for obj in collection_metadata['objects']:
|
||||
for child in self.get_all_children(obj):
|
||||
child.data.materials.clear()
|
||||
|
||||
for material in collection_metadata['materials']:
|
||||
bpy.data.materials.remove(material)
|
||||
|
||||
namespace = collection_metadata['namespace']
|
||||
name = collection_metadata['name']
|
||||
|
||||
container_name = f"{namespace}_{name}"
|
||||
|
||||
materials, objects = self._process(
|
||||
libpath, container_name, collection_metadata['objects'])
|
||||
|
||||
collection_metadata["objects"] = objects
|
||||
collection_metadata["materials"] = materials
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
if not collection:
|
||||
return False
|
||||
|
||||
collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
for obj in collection_metadata['objects']:
|
||||
for child in self.get_all_children(obj):
|
||||
child.data.materials.clear()
|
||||
|
||||
for material in collection_metadata['materials']:
|
||||
bpy.data.materials.remove(material)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
|
|
@ -54,6 +54,14 @@ class ExtractFBX(openpype.api.Extractor):
|
|||
# We set the scale of the scene for the export
|
||||
scene.unit_settings.scale_length = 0.01
|
||||
|
||||
new_materials = []
|
||||
|
||||
for obj in collections[0].all_objects:
|
||||
if obj.type == 'MESH':
|
||||
mat = bpy.data.materials.new(obj.name)
|
||||
obj.data.materials.append(mat)
|
||||
new_materials.append(mat)
|
||||
|
||||
# We export the fbx
|
||||
bpy.ops.export_scene.fbx(
|
||||
filepath=filepath,
|
||||
|
|
@ -66,6 +74,13 @@ class ExtractFBX(openpype.api.Extractor):
|
|||
|
||||
scene.unit_settings.scale_length = old_scale
|
||||
|
||||
for mat in new_materials:
|
||||
bpy.data.materials.remove(mat)
|
||||
|
||||
for obj in collections[0].all_objects:
|
||||
if obj.type == 'MESH':
|
||||
obj.data.materials.pop()
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
|
|
|
|||
66
openpype/hosts/unreal/plugins/create/create_look.py
Normal file
66
openpype/hosts/unreal/plugins/create/create_look.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import unreal
|
||||
from openpype.hosts.unreal.api.plugin import Creator
|
||||
from avalon.unreal import pipeline
|
||||
|
||||
|
||||
class CreateLook(Creator):
|
||||
"""Shader connections defining shape look"""
|
||||
|
||||
name = "unrealLook"
|
||||
label = "Unreal - Look"
|
||||
family = "look"
|
||||
icon = "paint-brush"
|
||||
|
||||
root = "/Game/Avalon/Assets"
|
||||
suffix = "_INS"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateLook, self).__init__(*args, **kwargs)
|
||||
|
||||
def process(self):
|
||||
name = self.data["subset"]
|
||||
|
||||
selection = []
|
||||
if (self.options or {}).get("useSelection"):
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
# Create the folder
|
||||
path = f"{self.root}/{self.data['asset']}"
|
||||
new_name = pipeline.create_folder(path, name)
|
||||
full_path = f"{path}/{new_name}"
|
||||
|
||||
# Create a new cube static mesh
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
cube = ar.get_asset_by_object_path("/Engine/BasicShapes/Cube.Cube")
|
||||
|
||||
# Create the avalon publish instance object
|
||||
container_name = f"{name}{self.suffix}"
|
||||
pipeline.create_publish_instance(
|
||||
instance=container_name, path=full_path)
|
||||
|
||||
# Get the mesh of the selected object
|
||||
original_mesh = ar.get_asset_by_object_path(selection[0]).get_asset()
|
||||
materials = original_mesh.get_editor_property('materials')
|
||||
|
||||
self.data["members"] = []
|
||||
|
||||
# Add the materials to the cube
|
||||
for material in materials:
|
||||
name = material.get_editor_property('material_slot_name')
|
||||
object_path = f"{full_path}/{name}.{name}"
|
||||
object = unreal.EditorAssetLibrary.duplicate_loaded_asset(
|
||||
cube.get_asset(), object_path
|
||||
)
|
||||
|
||||
# Remove the default material of the cube object
|
||||
object.get_editor_property('static_materials').pop()
|
||||
|
||||
object.add_material(
|
||||
material.get_editor_property('material_interface'))
|
||||
|
||||
self.data["members"].append(object_path)
|
||||
|
||||
unreal.EditorAssetLibrary.save_asset(object_path)
|
||||
|
||||
pipeline.imprint(f"{full_path}/{container_name}", self.data)
|
||||
120
openpype/hosts/unreal/plugins/publish/extract_look.py
Normal file
120
openpype/hosts/unreal/plugins/publish/extract_look.py
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
import unreal
|
||||
from unreal import MaterialEditingLibrary as mat_lib
|
||||
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ExtractLook(openpype.api.Extractor):
|
||||
"""Extract look."""
|
||||
|
||||
label = "Extract Look"
|
||||
hosts = ["unreal"]
|
||||
families = ["look"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
transfers = []
|
||||
|
||||
json_data = []
|
||||
|
||||
for member in instance:
|
||||
asset = ar.get_asset_by_object_path(member)
|
||||
object = asset.get_asset()
|
||||
|
||||
name = asset.get_editor_property('asset_name')
|
||||
|
||||
json_element = {'material': str(name)}
|
||||
|
||||
material_obj = object.get_editor_property('static_materials')[0]
|
||||
material = material_obj.material_interface
|
||||
|
||||
base_color = mat_lib.get_material_property_input_node(
|
||||
material, unreal.MaterialProperty.MP_BASE_COLOR)
|
||||
|
||||
base_color_name = base_color.get_editor_property('parameter_name')
|
||||
|
||||
texture = mat_lib.get_material_default_texture_parameter_value(
|
||||
material, base_color_name)
|
||||
|
||||
if texture:
|
||||
# Export Texture
|
||||
tga_filename = f"{instance.name}_{name}_texture.tga"
|
||||
|
||||
tga_exporter = unreal.TextureExporterTGA()
|
||||
|
||||
tga_export_task = unreal.AssetExportTask()
|
||||
|
||||
tga_export_task.set_editor_property('exporter', tga_exporter)
|
||||
tga_export_task.set_editor_property('automated', True)
|
||||
tga_export_task.set_editor_property('object', texture)
|
||||
tga_export_task.set_editor_property(
|
||||
'filename', f"{stagingdir}/{tga_filename}")
|
||||
tga_export_task.set_editor_property('prompt', False)
|
||||
tga_export_task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(tga_export_task)
|
||||
|
||||
json_element['tga_filename'] = tga_filename
|
||||
|
||||
transfers.append((
|
||||
f"{stagingdir}/{tga_filename}",
|
||||
f"{resources_dir}/{tga_filename}"))
|
||||
|
||||
fbx_filename = f"{instance.name}_{name}.fbx"
|
||||
|
||||
fbx_exporter = unreal.StaticMeshExporterFBX()
|
||||
fbx_exporter.set_editor_property('text', False)
|
||||
|
||||
options = unreal.FbxExportOption()
|
||||
options.set_editor_property('ascii', False)
|
||||
options.set_editor_property('collision', False)
|
||||
|
||||
task = unreal.AssetExportTask()
|
||||
task.set_editor_property('exporter', fbx_exporter)
|
||||
task.set_editor_property('options', options)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('object', object)
|
||||
task.set_editor_property(
|
||||
'filename', f"{stagingdir}/{fbx_filename}")
|
||||
task.set_editor_property('prompt', False)
|
||||
task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(task)
|
||||
|
||||
json_element['fbx_filename'] = fbx_filename
|
||||
|
||||
transfers.append((
|
||||
f"{stagingdir}/{fbx_filename}",
|
||||
f"{resources_dir}/{fbx_filename}"))
|
||||
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = f"{instance.name}.json"
|
||||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(json_representation)
|
||||
instance.data["transfers"].extend(transfers)
|
||||
Loading…
Add table
Add a link
Reference in a new issue