mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
preliminary copy of blender integration from sonar
This commit is contained in:
parent
061e0056e5
commit
ccbef04605
11 changed files with 690 additions and 0 deletions
35
pype/plugins/blender/create/submarine_model.py
Normal file
35
pype/plugins/blender/create/submarine_model.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
"""Create a model asset."""
|
||||
|
||||
import bpy
|
||||
|
||||
import sonar.blender
|
||||
from avalon import api
|
||||
from avalon.blender import Creator, lib
|
||||
|
||||
|
||||
class CreateModel(Creator):
|
||||
"""Polygonal static geometry"""
|
||||
|
||||
name = "model_default"
|
||||
label = "Model"
|
||||
family = "model"
|
||||
icon = "cube"
|
||||
|
||||
def process(self):
|
||||
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = sonar.blender.plugin.model_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in bpy.context.selected_objects:
|
||||
collection.objects.link(obj)
|
||||
|
||||
if bpy.data.workspaces.get('Modeling'):
|
||||
bpy.context.window.workspace = bpy.data.workspaces['Modeling']
|
||||
|
||||
return collection
|
||||
264
pype/plugins/blender/load/submarine_model.py
Normal file
264
pype/plugins/blender/load/submarine_model.py
Normal file
|
|
@ -0,0 +1,264 @@
|
|||
"""Load a model asset in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import avalon.blender.pipeline
|
||||
import bpy
|
||||
import pype.blender
|
||||
from avalon import api
|
||||
|
||||
logger = logging.getLogger("pype").getChild("blender").getChild("load_model")
|
||||
|
||||
|
||||
class BlendModelLoader(pype.blender.AssetLoader):
|
||||
"""Load models from a .blend file.
|
||||
|
||||
Because they come from a .blend file we can simply link the collection that
|
||||
contains the model. There is no further need to 'containerise' it.
|
||||
|
||||
Warning:
|
||||
Loading the same asset more then once is not properly supported at the
|
||||
moment.
|
||||
"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["blend"]
|
||||
|
||||
label = "Link Model"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]:
|
||||
"""Find the collection(s) with name, loaded from libpath.
|
||||
|
||||
Note:
|
||||
It is assumed that only 1 matching collection is found.
|
||||
"""
|
||||
for collection in bpy.data.collections:
|
||||
if collection.name != name:
|
||||
continue
|
||||
if collection.library is None:
|
||||
continue
|
||||
if not collection.library.filepath:
|
||||
continue
|
||||
collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve())
|
||||
normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
if collection_lib_path == normalized_libpath:
|
||||
return collection
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool:
|
||||
"""Check if the collection contains the object."""
|
||||
for obj in collection.objects:
|
||||
if obj == object:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = pype.blender.plugin.model_name(asset, subset)
|
||||
container_name = pype.blender.plugin.model_name(asset, subset, namespace)
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
||||
with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
instance_empty = bpy.data.objects.new(container_name, None)
|
||||
if not instance_empty.get("avalon"):
|
||||
instance_empty["avalon"] = dict()
|
||||
avalon_info = instance_empty["avalon"]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
scene.collection.objects.link(instance_empty)
|
||||
instance_empty.instance_type = 'COLLECTION'
|
||||
container = bpy.data.collections[lib_container]
|
||||
container.name = container_name
|
||||
instance_empty.instance_collection = container
|
||||
container.make_local()
|
||||
avalon.blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
nodes.append(instance_empty)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
logger.debug(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, f"The asset is not loaded: {container['objectName']}"
|
||||
assert not (collection.children), "Nested collections are not supported."
|
||||
assert libpath, ("No existing library file found for {container['objectName']}")
|
||||
assert libpath.is_file(), f"The file doesn't exist: {libpath}"
|
||||
assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}"
|
||||
collection_libpath = self._get_library_from_container(collection).filepath
|
||||
normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
logger.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
logger.info("Library already loaded, not updating...")
|
||||
return
|
||||
# Let Blender's garbage collection take care of removing the library
|
||||
# itself after removing the objects.
|
||||
objects_to_remove = set()
|
||||
collection_objects = list()
|
||||
collection_objects[:] = collection.objects
|
||||
for obj in collection_objects:
|
||||
# Unlink every object
|
||||
collection.objects.unlink(obj)
|
||||
remove_obj = True
|
||||
for coll in [coll for coll in bpy.data.collections if coll != collection]:
|
||||
if coll.objects and self._collection_contains_object(coll, obj):
|
||||
remove_obj = False
|
||||
if remove_obj:
|
||||
objects_to_remove.add(obj)
|
||||
for obj in objects_to_remove:
|
||||
# Only delete objects that are not used elsewhere
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name]
|
||||
if instance_empties:
|
||||
instance_empty = instance_empties[0]
|
||||
container_name = instance_empty["avalon"]["container_name"]
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to):
|
||||
data_to.collections = [container_name]
|
||||
new_collection = self._get_lib_collection(container_name, libpath)
|
||||
if new_collection is None:
|
||||
raise ValueError("A matching collection '{container_name}' "
|
||||
"should have been found in: {libpath}")
|
||||
for obj in new_collection.objects:
|
||||
collection.objects.link(obj)
|
||||
bpy.data.collections.remove(new_collection)
|
||||
# Update the representation on the collection
|
||||
avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_prop["representation"] = str(representation["_id"])
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (avalon-core:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
if not collection:
|
||||
return False
|
||||
assert not (collection.children), "Nested collections are not supported."
|
||||
instance_parents = list(collection.users_dupli_group)
|
||||
instance_objects = list(collection.objects)
|
||||
for obj in instance_objects + instance_parents:
|
||||
bpy.data.objects.remove(obj)
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CacheModelLoader(pype.blender.AssetLoader):
|
||||
"""Load cache models.
|
||||
|
||||
Stores the imported asset in a collection named after the asset.
|
||||
|
||||
Note:
|
||||
At least for now it only supports Alembic files.
|
||||
"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Link Model"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
raise NotImplementedError("Loading of Alembic files is not yet implemented.")
|
||||
# TODO (jasper): implement Alembic import.
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
# TODO (jasper): evaluate use of namespace which is 'alien' to Blender.
|
||||
lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace)
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
||||
with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
instance_empty = bpy.data.objects.new(container_name, None)
|
||||
scene.collection.objects.link(instance_empty)
|
||||
instance_empty.instance_type = 'COLLECTION'
|
||||
collection = bpy.data.collections[lib_container]
|
||||
collection.name = container_name
|
||||
instance_empty.instance_collection = collection
|
||||
|
||||
nodes = list(collection.objects)
|
||||
nodes.append(collection)
|
||||
nodes.append(instance_empty)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
16
pype/plugins/blender/publish/collect_current_file.py
Normal file
16
pype/plugins/blender/publish/collect_current_file.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Blender Current File"
|
||||
hosts = ['blender']
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
current_file = bpy.data.filepath
|
||||
context.data['currentFile'] = current_file
|
||||
52
pype/plugins/blender/publish/collect_model.py
Normal file
52
pype/plugins/blender/publish/collect_model.py
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import typing
|
||||
from typing import Generator
|
||||
|
||||
import bpy
|
||||
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class CollectModel(pyblish.api.ContextPlugin):
|
||||
"""Collect the data of a model."""
|
||||
|
||||
hosts = ["blender"]
|
||||
label = "Collect Model"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
||||
@staticmethod
|
||||
def get_model_collections() -> Generator:
|
||||
"""Return all 'model' collections.
|
||||
|
||||
Check if the family is 'model' and if it doesn't have the
|
||||
representation set. If the representation is set, it is a loaded model
|
||||
and we don't want to publish it.
|
||||
"""
|
||||
for collection in bpy.data.collections:
|
||||
avalon_prop = collection.get(AVALON_PROPERTY) or dict()
|
||||
if (avalon_prop.get('family') == 'model'
|
||||
and not avalon_prop.get('representation')):
|
||||
yield collection
|
||||
|
||||
def process(self, context):
|
||||
"""Collect the models from the current Blender scene."""
|
||||
collections = self.get_model_collections()
|
||||
for collection in collections:
|
||||
avalon_prop = collection[AVALON_PROPERTY]
|
||||
asset = avalon_prop['asset']
|
||||
family = avalon_prop['family']
|
||||
subset = avalon_prop['subset']
|
||||
task = avalon_prop['task']
|
||||
name = f"{asset}_{subset}"
|
||||
instance = context.create_instance(
|
||||
name=name,
|
||||
family=family,
|
||||
subset=subset,
|
||||
asset=asset,
|
||||
task=task,
|
||||
)
|
||||
members = list(collection.objects)
|
||||
members.append(collection)
|
||||
instance[:] = members
|
||||
self.log.debug(instance.data)
|
||||
34
pype/plugins/blender/publish/extract_model.py
Normal file
34
pype/plugins/blender/publish/extract_model.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from pathlib import Path
|
||||
import avalon.blender.workio
|
||||
|
||||
import sonar.api
|
||||
|
||||
|
||||
class ExtractModel(sonar.api.Extractor):
|
||||
"""Extract as model."""
|
||||
|
||||
label = "Model"
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
stagingdir = Path(self.staging_dir(instance))
|
||||
filename = f"{instance.name}.blend"
|
||||
filepath = str(stagingdir / filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Just save the file to a temporary location. At least for now it's no
|
||||
# problem to have (possibly) extra stuff in the file.
|
||||
avalon.blender.workio.save_file(filepath, copy=True)
|
||||
|
||||
# Store reference for integration
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
instance.data["files"].append(filename)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s", instance.name, filepath)
|
||||
47
pype/plugins/blender/publish/validate_mesh_has_uv.py
Normal file
47
pype/plugins/blender/publish/validate_mesh_has_uv.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import sonar.blender.action
|
||||
|
||||
|
||||
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current mesh has UV's."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh Has UV's"
|
||||
actions = [sonar.blender.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@staticmethod
|
||||
def has_uvs(obj: bpy.types.Object) -> bool:
|
||||
"""Check if an object has uv's."""
|
||||
if not obj.data.uv_layers:
|
||||
return False
|
||||
for uv_layer in obj.data.uv_layers:
|
||||
for polygon in obj.data.polygons:
|
||||
for loop_index in polygon.loop_indices:
|
||||
if not uv_layer.data[loop_index].uv:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']:
|
||||
# Make sure we are in object mode.
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
if not cls.has_uvs(obj):
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import sonar.blender.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Mesh No Negative Scale"
|
||||
actions = [sonar.blender.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']:
|
||||
if any(v < 0 for v in obj.scale):
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}")
|
||||
Loading…
Add table
Add a link
Reference in a new issue