diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py
index 07080a86c4..d0b81148c3 100644
--- a/pype/hosts/blender/plugin.py
+++ b/pype/hosts/blender/plugin.py
@@ -7,7 +7,7 @@ import bpy
from avalon import api
-VALID_EXTENSIONS = [".blend"]
+VALID_EXTENSIONS = [".blend", ".json"]
def asset_name(
@@ -29,15 +29,19 @@ def get_unique_number(
c for c in bpy.data.collections
if c.name == 'AVALON_CONTAINERS'
]
- loaded_assets = []
+ containers = []
+ # First, add the children of avalon containers
for c in avalon_containers:
- loaded_assets.extend(c.children)
- collections_names = [
- c.name for c in loaded_assets
+ containers.extend(c.children)
+ # then keep looping to include all the children
+ for c in containers:
+ containers.extend(c.children)
+ container_names = [
+ c.name for c in containers
]
count = 1
name = f"{asset}_{count:0>2}_{subset}_CON"
- while name in collections_names:
+ while name in container_names:
count += 1
name = f"{asset}_{count:0>2}_{subset}_CON"
return f"{count:0>2}"
@@ -59,20 +63,20 @@ def create_blender_context(active: Optional[bpy.types.Object] = None,
if not isinstance(selected, list):
selected = [selected]
+ override_context = bpy.context.copy()
+
for win in bpy.context.window_manager.windows:
for area in win.screen.areas:
if area.type == 'VIEW_3D':
for region in area.regions:
if region.type == 'WINDOW':
- override_context = {
- 'window': win,
- 'screen': win.screen,
- 'area': area,
- 'region': region,
- 'scene': bpy.context.scene,
- 'active_object': active,
- 'selected_objects': selected
- }
+ override_context['window'] = win
+ override_context['screen'] = win.screen
+ override_context['area'] = area
+ override_context['region'] = region
+ override_context['scene'] = bpy.context.scene
+ override_context['active_object'] = active
+ override_context['selected_objects'] = selected
return override_context
raise Exception("Could not create a custom Blender context.")
@@ -175,7 +179,17 @@ class AssetLoader(api.Loader):
# just re-using the collection
assert Path(self.fname).exists(), f"{self.fname} doesn't exist."
- self.process_asset(
+ asset = context["asset"]["name"]
+ subset = context["subset"]["name"]
+ unique_number = get_unique_number(
+ asset, subset
+ )
+ namespace = namespace or f"{asset}_{unique_number}"
+ name = name or asset_name(
+ asset, subset, unique_number
+ )
+
+ nodes = self.process_asset(
context=context,
name=name,
namespace=namespace,
@@ -183,25 +197,24 @@ class AssetLoader(api.Loader):
)
# Only containerise if anything was loaded by the Loader.
- nodes = self[:]
if not nodes:
return None
# Only containerise if it's not already a collection from a .blend file.
- representation = context["representation"]["name"]
- if representation != "blend":
- from avalon.blender.pipeline import containerise
- return containerise(
- name=name,
- namespace=namespace,
- nodes=nodes,
- context=context,
- loader=self.__class__.__name__,
- )
+ # representation = context["representation"]["name"]
+ # if representation != "blend":
+ # from avalon.blender.pipeline import containerise
+ # return containerise(
+ # name=name,
+ # namespace=namespace,
+ # nodes=nodes,
+ # context=context,
+ # loader=self.__class__.__name__,
+ # )
asset = context["asset"]["name"]
subset = context["subset"]["name"]
- instance_name = asset_name(asset, subset, namespace)
+ instance_name = asset_name(asset, subset, unique_number) + '_CON'
return self._get_instance_collection(instance_name, nodes)
diff --git a/pype/hosts/harmony/js/PypeHarmony.js b/pype/hosts/harmony/js/PypeHarmony.js
index 504bcc9ba2..a98dbd52cd 100644
--- a/pype/hosts/harmony/js/PypeHarmony.js
+++ b/pype/hosts/harmony/js/PypeHarmony.js
@@ -1,7 +1,12 @@
+/* global include */
// ***************************************************************************
// * Pype Harmony Host *
// ***************************************************************************
+var LD_OPENHARMONY_PATH = System.getenv('LIB_OPENHARMONY_PATH');
+include(LD_OPENHARMONY_PATH + '/openHarmony.js');
+this.__proto__['$'] = $;
+
/**
* @namespace
diff --git a/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js b/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js
index 7801f65cdd..3e2c853146 100644
--- a/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js
+++ b/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js
@@ -24,13 +24,42 @@ var ImageSequenceLoader = function() {
};
+ImageSequenceLoader.getCurrentGroup = function () {
+ var doc = $.scn;
+ var nodeView = '';
+ for (var i = 0; i < 200; i++) {
+ nodeView = 'View' + (i);
+ if (view.type(nodeView) == 'Node View') {
+ break;
+ }
+ }
+
+ if (!nodeView) {
+ $.alert('You must have a Node View open!',
+ 'No Node View is currently open!\n' +
+ 'Open a Node View and Try Again.',
+ 'OK!');
+ return;
+ }
+
+ var currentGroup;
+ if (!nodeView) {
+ currentGroup = doc.root;
+ } else {
+ currentGroup = doc.$node(view.group(nodeView));
+ }
+
+ return currentGroup.path;
+};
+
+
/**
* Get unique column name.
* @function
* @param {string} columnPrefix Column name.
* @return {string} Unique column name.
*/
-ImageSequenceLoader.prototype.getUniqueColumnName = function(columnPrefix) {
+ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) {
var suffix = 0;
// finds if unique name for a column
var columnName = columnPrefix;
@@ -63,6 +92,12 @@ ImageSequenceLoader.prototype.getUniqueColumnName = function(columnPrefix) {
* ];
*/
ImageSequenceLoader.prototype.importFiles = function(args) {
+ var PNGTransparencyMode = 0; // Premultiplied wih Black
+ var TGATransparencyMode = 0; // Premultiplied wih Black
+ var SGITransparencyMode = 0; // Premultiplied wih Black
+ var LayeredPSDTransparencyMode = 1; // Straight
+ var FlatPSDTransparencyMode = 2; // Premultiplied wih White
+
var doc = $.scn;
var files = args[0];
var asset = args[1];
@@ -78,20 +113,8 @@ ImageSequenceLoader.prototype.importFiles = function(args) {
}
// Get the current group
- var nodeViewWidget = $.app.getWidgetByName('Node View');
- if (!nodeViewWidget) {
- $.alert('You must have a Node View open!', 'No Node View!', 'OK!');
- return;
- }
+ var currentGroup = doc.$node(ImageSequenceLoader.getCurrentGroup());
- nodeViewWidget.setFocus();
- var nodeView = view.currentView();
- var currentGroup = null;
- if (!nodeView) {
- currentGroup = doc.root;
- } else {
- currentGroup = doc.$node(view.group(nodeView));
- }
// Get a unique iterative name for the container read node
var num = 0;
var name = '';
@@ -99,7 +122,6 @@ ImageSequenceLoader.prototype.importFiles = function(args) {
name = asset + '_' + (num++) + '_' + subset;
} while (currentGroup.getNodeByName(name) != null);
-
extension = filename.substr(pos+1).toLowerCase();
if (extension == 'jpeg') {
extension = 'jpg';
@@ -123,7 +145,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) {
return null; // no read to add.
}
- var uniqueColumnName = this.getUniqueColumnName(name);
+ var uniqueColumnName = ImageSequenceLoader.getUniqueColumnName(name);
column.add(uniqueColumnName, 'DRAWING');
column.setElementIdOfDrawing(uniqueColumnName, elemId);
var read = node.add(currentGroup, name, 'READ', 0, 0, 0);
@@ -139,19 +161,19 @@ ImageSequenceLoader.prototype.importFiles = function(args) {
read, frame.current(), 'applyMatteToColor'
);
if (extension === 'png') {
- transparencyModeAttr.setValue(this.PNGTransparencyMode);
+ transparencyModeAttr.setValue(PNGTransparencyMode);
}
if (extension === 'tga') {
- transparencyModeAttr.setValue(this.TGATransparencyMode);
+ transparencyModeAttr.setValue(TGATransparencyMode);
}
if (extension === 'sgi') {
- transparencyModeAttr.setValue(this.SGITransparencyMode);
+ transparencyModeAttr.setValue(SGITransparencyMode);
}
if (extension === 'psd') {
- transparencyModeAttr.setValue(this.FlatPSDTransparencyMode);
+ transparencyModeAttr.setValue(FlatPSDTransparencyMode);
}
if (extension === 'jpg') {
- transparencyModeAttr.setValue(this.LayeredPSDTransparencyMode);
+ transparencyModeAttr.setValue(LayeredPSDTransparencyMode);
}
var drawingFilePath;
diff --git a/pype/hosts/maya/expected_files.py b/pype/hosts/maya/expected_files.py
index 07b3f94aa0..d39e5fa204 100644
--- a/pype/hosts/maya/expected_files.py
+++ b/pype/hosts/maya/expected_files.py
@@ -627,6 +627,11 @@ class ExpectedFilesVray(AExpectedFiles):
if default_ext == "exr (multichannel)" or default_ext == "exr (deep)":
default_ext = "exr"
+ # add beauty as default
+ enabled_aovs.append(
+ (u"beauty", default_ext)
+ )
+
# handle aovs from references
use_ref_aovs = self.render_instance.data.get(
"vrayUseReferencedAovs", False) or False
@@ -653,9 +658,7 @@ class ExpectedFilesVray(AExpectedFiles):
# todo: find how vray set format for AOVs
enabled_aovs.append(
(self._get_vray_aov_name(aov), default_ext))
- enabled_aovs.append(
- (u"beauty", default_ext)
- )
+
return enabled_aovs
def _get_vray_aov_name(self, node):
diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py
index 3a538b57eb..de61728a62 100644
--- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py
+++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py
@@ -4,16 +4,46 @@ import ftrack_api
from pype.modules.ftrack.lib import ServerAction
-class PushFrameValuesToTaskAction(ServerAction):
- """Action for testing purpose or as base for new actions."""
+class PushHierValuesToNonHier(ServerAction):
+ """Action push hierarchical custom attribute values to non hierarchical.
- # Ignore event handler by default
- ignore_me = True
+ Hierarchical value is also pushed to their task entities.
- identifier = "admin.push_frame_values_to_task"
+ Action has 3 configurable attributes:
+ - `role_list`: List of use roles that can discover the action.
+ - `interest_attributes`: Keys of custom attributes that will be looking
+ for to push values. Attribute key must have both custom attribute types
+ hierarchical and on specific object type (entity type).
+ - `interest_entity_types`: Entity types that will be in focus of pushing
+ hierarchical to object type's custom attribute.
+
+ EXAMPLE:
+ * Before action
+ |_ Project
+ |_ Shot1
+ - hierarchical custom attribute value: `frameStart`: 1001
+ - custom attribute for `Shot`: frameStart: 1
+ |_ Task1
+ - hierarchical custom attribute value: `frameStart`: 10
+ - custom attribute for `Task`: frameStart: 0
+
+ * After action
+ |_ Project
+ |_ Shot1
+ - hierarchical custom attribute value: `frameStart`: 1001
+ - custom attribute for `Shot`: frameStart: 1001
+ |_ Task1
+ - hierarchical custom attribute value: `frameStart`: 1001
+ - custom attribute for `Task`: frameStart: 1001
+ """
+
+ identifier = "admin.push_hier_values_to_non_hier"
label = "Pype Admin"
- variant = "- Push Frame values to Task"
+ variant = "- Push Hierarchical values To Non-Hierarchical"
+ hierarchy_entities_query = (
+ "select id, parent_id from TypedContext where project_id is \"{}\""
+ )
entities_query = (
"select id, name, parent_id, link from TypedContext"
" where project_id is \"{}\" and object_type_id in ({})"
@@ -28,20 +58,17 @@ class PushFrameValuesToTaskAction(ServerAction):
" where entity_id in ({}) and configuration_id in ({})"
)
- pushing_entity_types = {"Shot"}
- hierarchical_custom_attribute_keys = {"frameStart", "frameEnd"}
- custom_attribute_mapping = {
- "frameStart": "fstart",
- "frameEnd": "fend"
- }
- role_list = {"Pypeclub", "Administrator", "Project Manager"}
+ # configurable
+ interest_entity_types = ["Shot"]
+ interest_attributes = ["frameStart", "frameEnd"]
+ role_list = ["Pypeclub", "Administrator", "Project Manager"]
def discover(self, session, entities, event):
""" Validation """
# Check if selection is valid
for ent in event["data"]["selection"]:
# Ignore entities that are not tasks or projects
- if ent["entityType"].lower() == "show":
+ if ent["entityType"].lower() in ("task", "show"):
return True
return False
@@ -61,8 +88,7 @@ class PushFrameValuesToTaskAction(ServerAction):
session.commit()
try:
- project_entity = self.get_project_from_entity(entities[0])
- result = self.propagate_values(session, project_entity, event)
+ result = self.propagate_values(session, entities)
job["status"] = "done"
session.commit()
@@ -85,165 +111,193 @@ class PushFrameValuesToTaskAction(ServerAction):
job["status"] = "failed"
session.commit()
- def task_attributes(self, session):
- task_object_type = session.query(
- "ObjectType where name is \"Task\""
- ).one()
+ def attrs_configurations(self, session, object_ids):
+ attrs = session.query(self.cust_attrs_query.format(
+ self.join_query_keys(self.interest_attributes),
+ self.join_query_keys(object_ids)
+ )).all()
- hier_attr_names = list(
- self.custom_attribute_mapping.keys()
- )
- entity_type_specific_names = list(
- self.custom_attribute_mapping.values()
- )
- joined_keys = self.join_keys(
- hier_attr_names + entity_type_specific_names
- )
- attribute_entities = session.query(
- self.cust_attrs_query.format(joined_keys)
- ).all()
-
- hier_attrs = []
- task_attrs = {}
- for attr in attribute_entities:
- attr_key = attr["key"]
+ output = {}
+ hiearchical = []
+ for attr in attrs:
if attr["is_hierarchical"]:
- if attr_key in hier_attr_names:
- hier_attrs.append(attr)
- elif attr["object_type_id"] == task_object_type["id"]:
- if attr_key in entity_type_specific_names:
- task_attrs[attr_key] = attr["id"]
- return task_attrs, hier_attrs
+ hiearchical.append(attr)
+ continue
+ obj_id = attr["object_type_id"]
+ if obj_id not in output:
+ output[obj_id] = []
+ output[obj_id].append(attr)
+ return output, hiearchical
- def join_keys(self, items):
- return ",".join(["\"{}\"".format(item) for item in items])
+ def propagate_values(self, session, selected_entities):
+ project_entity = self.get_project_from_entity(selected_entities[0])
+ selected_ids = [entity["id"] for entity in selected_entities]
- def propagate_values(self, session, project_entity, event):
self.log.debug("Querying project's entities \"{}\".".format(
project_entity["full_name"]
))
- pushing_entity_types = tuple(
+ interest_entity_types = tuple(
ent_type.lower()
- for ent_type in self.pushing_entity_types
+ for ent_type in self.interest_entity_types
)
- destination_object_types = []
all_object_types = session.query("ObjectType").all()
- for object_type in all_object_types:
- lowered_name = object_type["name"].lower()
- if (
- lowered_name == "task"
- or lowered_name in pushing_entity_types
- ):
- destination_object_types.append(object_type)
+ object_types_by_low_name = {
+ object_type["name"].lower(): object_type
+ for object_type in all_object_types
+ }
- destination_object_type_ids = tuple(
+ task_object_type = object_types_by_low_name["task"]
+ destination_object_types = [task_object_type]
+ for ent_type in interest_entity_types:
+ obj_type = object_types_by_low_name.get(ent_type)
+ if obj_type and obj_type not in destination_object_types:
+ destination_object_types.append(obj_type)
+
+ destination_object_type_ids = set(
obj_type["id"]
for obj_type in destination_object_types
)
+
+ # Find custom attributes definitions
+ attrs_by_obj_id, hier_attrs = self.attrs_configurations(
+ session, destination_object_type_ids
+ )
+ # Filter destination object types if they have any object specific
+ # custom attribute
+ for obj_id in tuple(destination_object_type_ids):
+ if obj_id not in attrs_by_obj_id:
+ destination_object_type_ids.remove(obj_id)
+
+ if not destination_object_type_ids:
+ # TODO report that there are not matching custom attributes
+ return {
+ "success": True,
+ "message": "Nothing has changed."
+ }
+
entities = session.query(self.entities_query.format(
project_entity["id"],
- self.join_keys(destination_object_type_ids)
+ self.join_query_keys(destination_object_type_ids)
)).all()
- entities_by_id = {
- entity["id"]: entity
- for entity in entities
+ self.log.debug("Preparing whole project hierarchy by ids.")
+ parent_id_by_entity_id = self.all_hierarchy_ids(
+ session, project_entity
+ )
+ filtered_entities = self.filter_entities_by_selection(
+ entities, selected_ids, parent_id_by_entity_id
+ )
+ entities_by_obj_id = {
+ obj_id: []
+ for obj_id in destination_object_type_ids
}
self.log.debug("Filtering Task entities.")
- task_entities_by_parent_id = collections.defaultdict(list)
- non_task_entities = []
+ focus_entity_ids = []
non_task_entity_ids = []
- for entity in entities:
- if entity.entity_type.lower() != "task":
- non_task_entities.append(entity)
- non_task_entity_ids.append(entity["id"])
- continue
+ task_entity_ids = []
+ for entity in filtered_entities:
+ entity_id = entity["id"]
+ focus_entity_ids.append(entity_id)
+ if entity.entity_type.lower() == "task":
+ task_entity_ids.append(entity_id)
+ else:
+ non_task_entity_ids.append(entity_id)
- parent_id = entity["parent_id"]
- if parent_id in entities_by_id:
- task_entities_by_parent_id[parent_id].append(entity)
+ obj_id = entity["object_type_id"]
+ entities_by_obj_id[obj_id].append(entity_id)
- task_attr_id_by_keys, hier_attrs = self.task_attributes(session)
+ if not non_task_entity_ids:
+ return {
+ "success": True,
+ "message": "Nothing to do in your selection."
+ }
- self.log.debug("Getting Custom attribute values from tasks' parents.")
+ self.log.debug("Getting Hierarchical custom attribute values parents.")
hier_values_by_entity_id = self.get_hier_values(
session,
hier_attrs,
- non_task_entity_ids
+ non_task_entity_ids,
+ parent_id_by_entity_id
)
self.log.debug("Setting parents' values to task.")
- task_missing_keys = self.set_task_attr_values(
+ self.set_task_attr_values(
session,
- task_entities_by_parent_id,
+ hier_attrs,
+ task_entity_ids,
hier_values_by_entity_id,
- task_attr_id_by_keys
+ parent_id_by_entity_id
)
self.log.debug("Setting values to entities themselves.")
- missing_keys_by_object_name = self.push_values_to_entities(
+ self.push_values_to_entities(
session,
- non_task_entities,
+ entities_by_obj_id,
+ attrs_by_obj_id,
hier_values_by_entity_id
)
- if task_missing_keys:
- missing_keys_by_object_name["Task"] = task_missing_keys
- if missing_keys_by_object_name:
- self.report(missing_keys_by_object_name, event)
+
return True
- def report(self, missing_keys_by_object_name, event):
- splitter = {"type": "label", "value": "---"}
+ def all_hierarchy_ids(self, session, project_entity):
+ parent_id_by_entity_id = {}
- title = "Push Custom Attribute values report:"
-
- items = []
- items.append({
- "type": "label",
- "value": "# Pushing values was not complete"
- })
- items.append({
- "type": "label",
- "value": (
- "
It was due to missing custom"
- " attribute configurations for specific entity type/s."
- " These configurations are not created automatically.
"
- )
- })
-
- log_message_items = []
- log_message_item_template = (
- "Entity type \"{}\" does not have created Custom Attribute/s: {}"
+ hierarchy_entities = session.query(
+ self.hierarchy_entities_query.format(project_entity["id"])
)
- for object_name, missing_attr_names in (
- missing_keys_by_object_name.items()
- ):
- log_message_items.append(log_message_item_template.format(
- object_name, self.join_keys(missing_attr_names)
- ))
+ for hierarchy_entity in hierarchy_entities:
+ entity_id = hierarchy_entity["id"]
+ parent_id = hierarchy_entity["parent_id"]
+ parent_id_by_entity_id[entity_id] = parent_id
+ return parent_id_by_entity_id
- items.append(splitter)
- items.append({
- "type": "label",
- "value": "## Entity type: {}".format(object_name)
- })
+ def filter_entities_by_selection(
+ self, entities, selected_ids, parent_id_by_entity_id
+ ):
+ filtered_entities = []
+ for entity in entities:
+ entity_id = entity["id"]
+ if entity_id in selected_ids:
+ filtered_entities.append(entity)
+ continue
- items.append({
- "type": "label",
- "value": "{}
".format("
".join(missing_attr_names))
- })
+ parent_id = entity["parent_id"]
+ while True:
+ if parent_id in selected_ids:
+ filtered_entities.append(entity)
+ break
- self.log.warning((
- "Couldn't finish pushing attribute values because"
- " few entity types miss Custom attribute configurations:\n{}"
- ).format("\n".join(log_message_items)))
+ parent_id = parent_id_by_entity_id.get(parent_id)
+ if parent_id is None:
+ break
- self.show_interface(items, title, event)
+ return filtered_entities
- def get_hier_values(self, session, hier_attrs, focus_entity_ids):
- joined_entity_ids = self.join_keys(focus_entity_ids)
- hier_attr_ids = self.join_keys(
+ def get_hier_values(
+ self,
+ session,
+ hier_attrs,
+ focus_entity_ids,
+ parent_id_by_entity_id
+ ):
+ all_ids_with_parents = set()
+ for entity_id in focus_entity_ids:
+ all_ids_with_parents.add(entity_id)
+ _entity_id = entity_id
+ while True:
+ parent_id = parent_id_by_entity_id.get(_entity_id)
+ if (
+ not parent_id
+ or parent_id in all_ids_with_parents
+ ):
+ break
+ all_ids_with_parents.add(parent_id)
+ _entity_id = parent_id
+
+ joined_entity_ids = self.join_query_keys(all_ids_with_parents)
+
+ hier_attr_ids = self.join_query_keys(
tuple(hier_attr["id"] for hier_attr in hier_attrs)
)
hier_attrs_key_by_id = {
@@ -262,120 +316,106 @@ class PushFrameValuesToTaskAction(ServerAction):
[values] = session._call(call_expr)
values_per_entity_id = {}
+ for entity_id in all_ids_with_parents:
+ values_per_entity_id[entity_id] = {}
+ for key in hier_attrs_key_by_id.values():
+ values_per_entity_id[entity_id][key] = None
+
for item in values["data"]:
entity_id = item["entity_id"]
key = hier_attrs_key_by_id[item["configuration_id"]]
- if entity_id not in values_per_entity_id:
- values_per_entity_id[entity_id] = {}
- value = item["value"]
- if value is not None:
- values_per_entity_id[entity_id][key] = value
+ values_per_entity_id[entity_id][key] = item["value"]
output = {}
for entity_id in focus_entity_ids:
- value = values_per_entity_id.get(entity_id)
- if value:
- output[entity_id] = value
+ output[entity_id] = {}
+ for key in hier_attrs_key_by_id.values():
+ value = values_per_entity_id[entity_id][key]
+ tried_ids = set()
+ if value is None:
+ tried_ids.add(entity_id)
+ _entity_id = entity_id
+ while value is None:
+ parent_id = parent_id_by_entity_id.get(_entity_id)
+ if not parent_id:
+ break
+ value = values_per_entity_id[parent_id][key]
+ if value is not None:
+ break
+ _entity_id = parent_id
+ tried_ids.add(parent_id)
+ if value is not None:
+ for ent_id in tried_ids:
+ values_per_entity_id[ent_id][key] = value
+
+ output[entity_id][key] = value
return output
def set_task_attr_values(
self,
session,
- task_entities_by_parent_id,
+ hier_attrs,
+ task_entity_ids,
hier_values_by_entity_id,
- task_attr_id_by_keys
+ parent_id_by_entity_id
):
- missing_keys = set()
- for parent_id, values in hier_values_by_entity_id.items():
- task_entities = task_entities_by_parent_id[parent_id]
- for hier_key, value in values.items():
- key = self.custom_attribute_mapping[hier_key]
- if key not in task_attr_id_by_keys:
- missing_keys.add(key)
- continue
+ hier_attr_id_by_key = {
+ attr["key"]: attr["id"]
+ for attr in hier_attrs
+ }
+ for task_id in task_entity_ids:
+ parent_id = parent_id_by_entity_id.get(task_id) or {}
+ parent_values = hier_values_by_entity_id.get(parent_id)
+ if not parent_values:
+ continue
- for task_entity in task_entities:
- _entity_key = collections.OrderedDict({
- "configuration_id": task_attr_id_by_keys[key],
- "entity_id": task_entity["id"]
- })
+ hier_values_by_entity_id[task_id] = {}
+ for key, value in parent_values.items():
+ hier_values_by_entity_id[task_id][key] = value
+ configuration_id = hier_attr_id_by_key[key]
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": task_id
+ })
- session.recorded_operations.push(
- ftrack_api.operation.UpdateEntityOperation(
- "ContextCustomAttributeValue",
- _entity_key,
- "value",
- ftrack_api.symbol.NOT_SET,
- value
- )
+ session.recorded_operations.push(
+ ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ ftrack_api.symbol.NOT_SET,
+ value
)
+ )
session.commit()
- return missing_keys
-
def push_values_to_entities(
self,
session,
- non_task_entities,
+ entities_by_obj_id,
+ attrs_by_obj_id,
hier_values_by_entity_id
):
- object_types = session.query(
- "ObjectType where name in ({})".format(
- self.join_keys(self.pushing_entity_types)
- )
- ).all()
- object_type_names_by_id = {
- object_type["id"]: object_type["name"]
- for object_type in object_types
- }
- joined_keys = self.join_keys(
- self.custom_attribute_mapping.values()
- )
- attribute_entities = session.query(
- self.cust_attrs_query.format(joined_keys)
- ).all()
-
- attrs_by_obj_id = {}
- for attr in attribute_entities:
- if attr["is_hierarchical"]:
+ for object_id, entity_ids in entities_by_obj_id.items():
+ attrs = attrs_by_obj_id.get(object_id)
+ if not attrs or not entity_ids:
continue
- obj_id = attr["object_type_id"]
- if obj_id not in object_type_names_by_id:
- continue
-
- if obj_id not in attrs_by_obj_id:
- attrs_by_obj_id[obj_id] = {}
-
- attr_key = attr["key"]
- attrs_by_obj_id[obj_id][attr_key] = attr["id"]
-
- entities_by_obj_id = collections.defaultdict(list)
- for entity in non_task_entities:
- entities_by_obj_id[entity["object_type_id"]].append(entity)
-
- missing_keys_by_object_id = collections.defaultdict(set)
- for obj_type_id, attr_keys in attrs_by_obj_id.items():
- entities = entities_by_obj_id.get(obj_type_id)
- if not entities:
- continue
-
- for entity in entities:
- values = hier_values_by_entity_id.get(entity["id"])
- if not values:
- continue
-
- for hier_key, value in values.items():
- key = self.custom_attribute_mapping[hier_key]
- if key not in attr_keys:
- missing_keys_by_object_id[obj_type_id].add(key)
+ for attr in attrs:
+ for entity_id in entity_ids:
+ value = (
+ hier_values_by_entity_id
+ .get(entity_id, {})
+ .get(attr["key"])
+ )
+ if value is None:
continue
_entity_key = collections.OrderedDict({
- "configuration_id": attr_keys[key],
- "entity_id": entity["id"]
+ "configuration_id": attr["id"],
+ "entity_id": entity_id
})
session.recorded_operations.push(
@@ -389,13 +429,6 @@ class PushFrameValuesToTaskAction(ServerAction):
)
session.commit()
- missing_keys_by_object_name = {}
- for obj_id, missing_keys in missing_keys_by_object_id.items():
- obj_name = object_type_names_by_id[obj_id]
- missing_keys_by_object_name[obj_name] = missing_keys
-
- return missing_keys_by_object_name
-
def register(session, plugins_presets={}):
- PushFrameValuesToTaskAction(session, plugins_presets).register()
+ PushHierValuesToNonHier(session, plugins_presets).register()
diff --git a/pype/modules/ftrack/lib/ftrack_base_handler.py b/pype/modules/ftrack/lib/ftrack_base_handler.py
index e928f2fb88..30efe0c99b 100644
--- a/pype/modules/ftrack/lib/ftrack_base_handler.py
+++ b/pype/modules/ftrack/lib/ftrack_base_handler.py
@@ -37,6 +37,11 @@ class BaseHandler(object):
preactions = []
role_list = []
+ @staticmethod
+ def join_query_keys(keys):
+ """Helper to join keys to query."""
+ return ",".join(["\"{}\"".format(key) for key in keys])
+
def __init__(self, session, plugins_presets=None):
'''Expects a ftrack_api.Session instance'''
self.log = Logger().get_logger(self.__class__.__name__)
diff --git a/pype/plugins/blender/create/create_animation.py b/pype/plugins/blender/create/create_animation.py
index de74f9a358..acfd6ac1f3 100644
--- a/pype/plugins/blender/create/create_animation.py
+++ b/pype/plugins/blender/create/create_animation.py
@@ -2,12 +2,11 @@
import bpy
-from avalon import api
-from avalon.blender import Creator, lib
+from avalon import api, blender
import pype.hosts.blender.plugin
-class CreateAnimation(Creator):
+class CreateAnimation(blender.Creator):
"""Animation output for character rigs"""
name = "animationMain"
@@ -16,37 +15,16 @@ class CreateAnimation(Creator):
icon = "male"
def process(self):
-
asset = self.data["asset"]
subset = self.data["subset"]
name = pype.hosts.blender.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
self.data['task'] = api.Session.get('AVALON_TASK')
- lib.imprint(collection, self.data)
-
- # Add the rig object and all the children meshes to
- # a set and link them all at the end to avoid duplicates.
- # Blender crashes if trying to link an object that is already linked.
- # This links automatically the children meshes if they were not
- # selected, and doesn't link them twice if they, insted,
- # were manually selected by the user.
- objects_to_link = set()
+ blender.lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
-
- for obj in lib.get_selection():
-
- objects_to_link.add(obj)
-
- if obj.type == 'ARMATURE':
-
- for subobj in obj.children:
-
- objects_to_link.add(subobj)
-
- for obj in objects_to_link:
-
- collection.objects.link(obj)
+ for obj in blender.lib.get_selection():
+ collection.objects.link(obj)
return collection
diff --git a/pype/plugins/blender/create/create_setdress.py b/pype/plugins/blender/create/create_setdress.py
new file mode 100644
index 0000000000..06acf716e5
--- /dev/null
+++ b/pype/plugins/blender/create/create_setdress.py
@@ -0,0 +1,24 @@
+import bpy
+
+from avalon import api, blender
+import pype.hosts.blender.plugin
+
+class CreateSetDress(blender.Creator):
+ """A grouped package of loaded content"""
+
+ name = "setdressMain"
+ label = "Set Dress"
+ family = "setdress"
+ icon = "cubes"
+ defaults = ["Main", "Anim"]
+
+ def process(self):
+ asset = self.data["asset"]
+ subset = self.data["subset"]
+ name = pype.hosts.blender.plugin.asset_name(asset, subset)
+ collection = bpy.data.collections.new(name=name)
+ bpy.context.scene.collection.children.link(collection)
+ self.data['task'] = api.Session.get('AVALON_TASK')
+ blender.lib.imprint(collection, self.data)
+
+ return collection
diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py
index 2c8948dd48..c8ef73aea3 100644
--- a/pype/plugins/blender/load/load_layout.py
+++ b/pype/plugins/blender/load/load_layout.py
@@ -1,11 +1,15 @@
"""Load a layout in Blender."""
+import json
+from logging import log, warning
+import math
+
import logging
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
-from avalon import api, blender
+from avalon import api, blender, pipeline
import bpy
import pype.hosts.blender.plugin as plugin
@@ -150,8 +154,9 @@ class BlendLayoutLoader(plugin.AssetLoader):
# Save the list of objects in the metadata container
container_metadata["objects"] = obj_container.all_objects
- nodes = list(container.objects)
- nodes.append(container)
+ # nodes = list(container.objects)
+ # nodes.append(container)
+ nodes = [container]
self[:] = nodes
return nodes
@@ -192,7 +197,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
assert libpath.is_file(), (
f"The file doesn't exist: {libpath}"
)
- assert extension in pype.hosts.blender.plugin.VALID_EXTENSIONS, (
+ assert extension in plugin.VALID_EXTENSIONS, (
f"Unsupported file: {libpath}"
)
@@ -271,3 +276,378 @@ class BlendLayoutLoader(plugin.AssetLoader):
bpy.data.collections.remove(collection)
return True
+
+
+class UnrealLayoutLoader(plugin.AssetLoader):
+ """Load layout published from Unreal."""
+
+ families = ["layout"]
+ representations = ["json"]
+
+ label = "Link Layout"
+ icon = "code-fork"
+ color = "orange"
+
+ def _remove_objects(self, objects):
+ for obj in list(objects):
+ if obj.type == 'ARMATURE':
+ bpy.data.armatures.remove(obj.data)
+ elif obj.type == 'MESH':
+ bpy.data.meshes.remove(obj.data)
+ elif obj.type == 'CAMERA':
+ bpy.data.cameras.remove(obj.data)
+ elif obj.type == 'CURVE':
+ bpy.data.curves.remove(obj.data)
+ else:
+ self.log.error(
+ f"Object {obj.name} of type {obj.type} not recognized.")
+
+ def _remove_collections(self, collection):
+ if collection.children:
+ for child in collection.children:
+ self._remove_collections(child)
+ bpy.data.collections.remove(child)
+
+ def _remove(self, layout_container):
+ layout_container_metadata = layout_container.get(
+ blender.pipeline.AVALON_PROPERTY)
+
+ if layout_container.children:
+ for child in layout_container.children:
+ child_container = child.get(blender.pipeline.AVALON_PROPERTY)
+ child_container['objectName'] = child.name
+ api.remove(child_container)
+
+ for c in bpy.data.collections:
+ metadata = c.get('avalon')
+ if metadata:
+ print("metadata.get('id')")
+ print(metadata.get('id'))
+ if metadata and metadata.get('id') == 'pyblish.avalon.instance':
+ print("metadata.get('dependencies')")
+ print(metadata.get('dependencies'))
+ print("layout_container_metadata.get('representation')")
+ print(layout_container_metadata.get('representation'))
+ if metadata.get('dependencies') == layout_container_metadata.get('representation'):
+
+ for child in c.children:
+ bpy.data.collections.remove(child)
+ bpy.data.collections.remove(c)
+ break
+
+ def _get_loader(self, loaders, family):
+ name = ""
+ if family == 'rig':
+ name = "BlendRigLoader"
+ elif family == 'model':
+ name = "BlendModelLoader"
+
+ if name == "":
+ return None
+
+ for loader in loaders:
+ if loader.__name__ == name:
+ return loader
+
+ return None
+
+ def set_transform(self, obj, transform):
+ location = transform.get('translation')
+ rotation = transform.get('rotation')
+ scale = transform.get('scale')
+
+ # Y position is inverted in sign because Unreal and Blender have the
+ # Y axis mirrored
+ obj.location = (
+ location.get('x'),
+ -location.get('y'),
+ location.get('z')
+ )
+ obj.rotation_euler = (
+ rotation.get('x'),
+ -rotation.get('y'),
+ -rotation.get('z')
+ )
+ obj.scale = (
+ scale.get('x'),
+ scale.get('y'),
+ scale.get('z')
+ )
+
+ def _process(
+ self, libpath, layout_container, container_name, representation,
+ actions, parent
+ ):
+ with open(libpath, "r") as fp:
+ data = json.load(fp)
+
+ scene = bpy.context.scene
+ layout_collection = bpy.data.collections.new(container_name)
+ scene.collection.children.link(layout_collection)
+
+ all_loaders = api.discover(api.Loader)
+
+ avalon_container = bpy.data.collections.get(
+ blender.pipeline.AVALON_CONTAINERS)
+
+ for element in data:
+ reference = element.get('reference')
+ family = element.get('family')
+
+ loaders = api.loaders_from_representation(all_loaders, reference)
+ loader = self._get_loader(loaders, family)
+
+ if not loader:
+ continue
+
+ instance_name = element.get('instance_name')
+
+ element_container = api.load(
+ loader,
+ reference,
+ namespace=instance_name
+ )
+
+ if not element_container:
+ continue
+
+ avalon_container.children.unlink(element_container)
+ layout_container.children.link(element_container)
+
+ element_metadata = element_container.get(
+ blender.pipeline.AVALON_PROPERTY)
+
+ # Unlink the object's collection from the scene collection and
+ # link it in the layout collection
+ element_collection = element_metadata.get('obj_container')
+ scene.collection.children.unlink(element_collection)
+ layout_collection.children.link(element_collection)
+
+ objects = element_metadata.get('objects')
+ element_metadata['instance_name'] = instance_name
+
+ objects_to_transform = []
+
+ if family == 'rig':
+ for o in objects:
+ if o.type == 'ARMATURE':
+ objects_to_transform.append(o)
+ # Create an animation subset for each rig
+ o.select_set(True)
+ asset = api.Session["AVALON_ASSET"]
+ c = api.create(
+ name="animation_" + element_collection.name,
+ asset=asset,
+ family="animation",
+ options={"useSelection": True},
+ data={"dependencies": representation})
+ scene.collection.children.unlink(c)
+ parent.children.link(c)
+ o.select_set(False)
+ break
+ elif family == 'model':
+ objects_to_transform = objects
+
+ for o in objects_to_transform:
+ self.set_transform(o, element.get('transform'))
+
+ if actions:
+ if o.type == 'ARMATURE':
+ action = actions.get(instance_name, None)
+
+ if action:
+ if o.animation_data is None:
+ o.animation_data_create()
+ o.animation_data.action = action
+
+ return layout_collection
+
+ def process_asset(self,
+ context: dict,
+ name: str,
+ namespace: Optional[str] = None,
+ options: Optional[Dict] = None):
+ """
+ Arguments:
+ name: Use pre-defined name
+ namespace: Use pre-defined namespace
+ context: Full parenthood of representation to load
+ options: Additional settings dictionary
+ """
+ libpath = self.fname
+ asset = context["asset"]["name"]
+ subset = context["subset"]["name"]
+ lib_container = plugin.asset_name(
+ asset, subset
+ )
+ unique_number = plugin.get_unique_number(
+ asset, subset
+ )
+ namespace = namespace or f"{asset}_{unique_number}"
+ container_name = plugin.asset_name(
+ asset, subset, unique_number
+ )
+
+ layout_container = bpy.data.collections.new(container_name)
+ blender.pipeline.containerise_existing(
+ layout_container,
+ name,
+ namespace,
+ context,
+ self.__class__.__name__,
+ )
+
+ container_metadata = layout_container.get(
+ blender.pipeline.AVALON_PROPERTY)
+
+ container_metadata["libpath"] = libpath
+ container_metadata["lib_container"] = lib_container
+
+ # Create a setdress subset to contain all the animation for all
+ # the rigs in the layout
+ parent = api.create(
+ name="animation",
+ asset=api.Session["AVALON_ASSET"],
+ family="setdress",
+ options={"useSelection": True},
+ data={"dependencies": str(context["representation"]["_id"])})
+
+ layout_collection = self._process(
+ libpath, layout_container, container_name,
+ str(context["representation"]["_id"]), None, parent)
+
+ container_metadata["obj_container"] = layout_collection
+
+ # Save the list of objects in the metadata container
+ container_metadata["objects"] = layout_collection.all_objects
+
+ nodes = [layout_container]
+ self[:] = nodes
+ return nodes
+
+ def update(self, container: Dict, representation: Dict):
+ """Update the loaded asset.
+
+ This will remove all objects of the current collection, load the new
+ ones and add them to the collection.
+ If the objects of the collection are used in another collection they
+ will not be removed, only unlinked. Normally this should not be the
+ case though.
+ """
+ layout_container = bpy.data.collections.get(
+ container["objectName"]
+ )
+ if not layout_container:
+ return False
+
+ libpath = Path(api.get_representation_path(representation))
+ extension = libpath.suffix.lower()
+
+ self.log.info(
+ "Container: %s\nRepresentation: %s",
+ pformat(container, indent=2),
+ pformat(representation, indent=2),
+ )
+
+ assert layout_container, (
+ f"The asset is not loaded: {container['objectName']}"
+ )
+ assert libpath, (
+ "No existing library file found for {container['objectName']}"
+ )
+ assert libpath.is_file(), (
+ f"The file doesn't exist: {libpath}"
+ )
+ assert extension in plugin.VALID_EXTENSIONS, (
+ f"Unsupported file: {libpath}"
+ )
+
+ layout_container_metadata = layout_container.get(
+ blender.pipeline.AVALON_PROPERTY)
+ collection_libpath = layout_container_metadata["libpath"]
+ lib_container = layout_container_metadata["lib_container"]
+ obj_container = plugin.get_local_collection_with_name(
+ layout_container_metadata["obj_container"].name
+ )
+ objects = obj_container.all_objects
+
+ container_name = obj_container.name
+
+ normalized_collection_libpath = (
+ str(Path(bpy.path.abspath(collection_libpath)).resolve())
+ )
+ normalized_libpath = (
+ str(Path(bpy.path.abspath(str(libpath))).resolve())
+ )
+ self.log.debug(
+ "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
+ normalized_collection_libpath,
+ normalized_libpath,
+ )
+ if normalized_collection_libpath == normalized_libpath:
+ self.log.info("Library already loaded, not updating...")
+ return
+
+ actions = {}
+
+ for obj in objects:
+ if obj.type == 'ARMATURE':
+ if obj.animation_data and obj.animation_data.action:
+ obj_cont_name = obj.get(
+ blender.pipeline.AVALON_PROPERTY).get('container_name')
+ obj_cont = plugin.get_local_collection_with_name(
+ obj_cont_name)
+ element_metadata = obj_cont.get(
+ blender.pipeline.AVALON_PROPERTY)
+ instance_name = element_metadata.get('instance_name')
+ actions[instance_name] = obj.animation_data.action
+
+ self._remove(layout_container)
+
+ bpy.data.collections.remove(obj_container)
+
+ parent = api.create(
+ name="animation",
+ asset=api.Session["AVALON_ASSET"],
+ family="setdress",
+ options={"useSelection": True},
+ data={"dependencies": str(representation["_id"])})
+
+ layout_collection = self._process(
+ libpath, layout_container, container_name,
+ str(representation["_id"]), actions, parent)
+
+ layout_container_metadata["obj_container"] = layout_collection
+ layout_container_metadata["objects"] = layout_collection.all_objects
+ layout_container_metadata["libpath"] = str(libpath)
+ layout_container_metadata["representation"] = str(
+ representation["_id"])
+
+ def remove(self, container: Dict) -> bool:
+ """Remove an existing container from a Blender scene.
+
+ Arguments:
+ container (avalon-core:container-1.0): Container to remove,
+ from `host.ls()`.
+
+ Returns:
+ bool: Whether the container was deleted.
+ """
+ layout_container = bpy.data.collections.get(
+ container["objectName"]
+ )
+ if not layout_container:
+ return False
+
+ layout_container_metadata = layout_container.get(
+ blender.pipeline.AVALON_PROPERTY)
+ obj_container = plugin.get_local_collection_with_name(
+ layout_container_metadata["obj_container"].name
+ )
+
+ self._remove(layout_container)
+
+ bpy.data.collections.remove(obj_container)
+ bpy.data.collections.remove(layout_container)
+
+ return True
diff --git a/pype/plugins/blender/load/load_rig.py b/pype/plugins/blender/load/load_rig.py
index 7b60b20064..12017fdbb2 100644
--- a/pype/plugins/blender/load/load_rig.py
+++ b/pype/plugins/blender/load/load_rig.py
@@ -30,14 +30,28 @@ class BlendRigLoader(plugin.AssetLoader):
bpy.data.armatures.remove(obj.data)
elif obj.type == 'MESH':
bpy.data.meshes.remove(obj.data)
+ elif obj.type == 'CURVE':
+ bpy.data.curves.remove(obj.data)
for child in obj_container.children:
bpy.data.collections.remove(child)
bpy.data.collections.remove(obj_container)
+ def make_local_and_metadata(self, obj, collection_name):
+ local_obj = plugin.prepare_data(obj, collection_name)
+ plugin.prepare_data(local_obj.data, collection_name)
+
+ if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
+ local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
+
+ avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
+ avalon_info.update({"container_name": collection_name + '_CON'})
+
+ return local_obj
+
def _process(
- self, libpath, lib_container, container_name,
+ self, libpath, lib_container, collection_name,
action, parent_collection
):
relative = bpy.context.preferences.filepaths.use_relative_paths
@@ -54,34 +68,53 @@ class BlendRigLoader(plugin.AssetLoader):
parent.children.link(bpy.data.collections[lib_container])
rig_container = parent.children[lib_container].make_local()
- rig_container.name = container_name
+ rig_container.name = collection_name
- meshes = []
+ objects = []
armatures = [
obj for obj in rig_container.objects
if obj.type == 'ARMATURE'
]
for child in rig_container.children:
- local_child = plugin.prepare_data(child, container_name)
- meshes.extend(local_child.objects)
+ local_child = plugin.prepare_data(child, collection_name)
+ objects.extend(local_child.objects)
- # Link meshes first, then armatures.
+ # for obj in bpy.data.objects:
+ # obj.select_set(False)
+
+ constraints = []
+
+ for armature in armatures:
+ for bone in armature.pose.bones:
+ for constraint in bone.constraints:
+ if hasattr(constraint, 'target'):
+ constraints.append(constraint)
+
+ # Link armatures after other objects.
# The armature is unparented for all the non-local meshes,
# when it is made local.
- for obj in meshes + armatures:
- local_obj = plugin.prepare_data(obj, container_name)
- plugin.prepare_data(local_obj.data, container_name)
-
- if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
- local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
- avalon_info.update({"container_name": container_name})
-
- if local_obj.type == 'ARMATURE' and action is not None:
- local_obj.animation_data.action = action
-
+ for obj in objects:
+ local_obj = self.make_local_and_metadata(obj, collection_name)
+
+ if obj != local_obj:
+ for constraint in constraints:
+ if constraint.target == obj:
+ constraint.target = local_obj
+
+ for armature in armatures:
+ local_obj = self.make_local_and_metadata(armature, collection_name)
+
+ if action is not None:
+ local_obj.animation_data.action = action
+
+ # Set link the drivers to the local object
+ if local_obj.data.animation_data:
+ for d in local_obj.data.animation_data.drivers:
+ for v in d.driver.variables:
+ for t in v.targets:
+ t.id = local_obj
+
rig_container.pop(blender.pipeline.AVALON_PROPERTY)
bpy.ops.object.select_all(action='DESELECT')
@@ -99,7 +132,6 @@ class BlendRigLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
-
libpath = self.fname
asset = context["asset"]["name"]
subset = context["subset"]["name"]
@@ -110,12 +142,11 @@ class BlendRigLoader(plugin.AssetLoader):
asset, subset
)
namespace = namespace or f"{asset}_{unique_number}"
- container_name = plugin.asset_name(
+ collection_name = plugin.asset_name(
asset, subset, unique_number
)
- container = bpy.data.collections.new(lib_container)
- container.name = container_name
+ container = bpy.data.collections.new(collection_name)
blender.pipeline.containerise_existing(
container,
name,
@@ -131,10 +162,9 @@ class BlendRigLoader(plugin.AssetLoader):
container_metadata["lib_container"] = lib_container
obj_container = self._process(
- libpath, lib_container, container_name, None, None)
+ libpath, lib_container, collection_name, None, None)
container_metadata["obj_container"] = obj_container
-
# Save the list of objects in the metadata container
container_metadata["objects"] = obj_container.all_objects
@@ -214,9 +244,9 @@ class BlendRigLoader(plugin.AssetLoader):
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
assert(len(armatures) == 1)
- action = None
- if armatures[0].animation_data and armatures[0].animation_data.action:
- action = armatures[0].animation_data.action
+ action = None
+ if armatures[0].animation_data and armatures[0].animation_data.action:
+ action = armatures[0].animation_data.action
parent = plugin.get_parent_collection(obj_container)
diff --git a/pype/plugins/blender/publish/extract_animation_collection.py b/pype/plugins/blender/publish/extract_animation_collection.py
new file mode 100644
index 0000000000..e5e0877280
--- /dev/null
+++ b/pype/plugins/blender/publish/extract_animation_collection.py
@@ -0,0 +1,56 @@
+import os
+import json
+
+import pype.api
+import pyblish.api
+
+import bpy
+
+class ExtractSetDress(pype.api.Extractor):
+ """Extract setdress."""
+
+ label = "Extract SetDress"
+ hosts = ["blender"]
+ families = ["setdress"]
+ optional = True
+ order = pyblish.api.ExtractorOrder + 0.1
+
+ def process(self, instance):
+ stagingdir = self.staging_dir(instance)
+
+ json_data = []
+
+ for i in instance.context:
+ collection = i.data.get('name')
+ container = None
+ for obj in bpy.data.collections[collection].objects:
+ if obj.type == 'ARMATURE':
+ container_name = obj.get('avalon').get('container_name')
+ container = bpy.data.collections[container_name]
+ if container:
+ json_dict = {}
+ json_dict['subset'] = i.data.get('subset')
+ json_dict['container'] = container.name
+ json_dict['instance_name'] = container.get('avalon').get('instance_name')
+ json_data.append(json_dict)
+
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ json_filename = f"{instance.name}.json"
+ json_path = os.path.join(stagingdir, json_filename)
+
+ with open(json_path, "w+") as file:
+ json.dump(json_data, fp=file, indent=2)
+
+ json_representation = {
+ 'name': 'json',
+ 'ext': 'json',
+ 'files': json_filename,
+ "stagingDir": stagingdir,
+ }
+ instance.data["representations"].append(json_representation)
+
+ self.log.info("Extracted instance '{}' to: {}".format(
+ instance.name, json_representation))
+
diff --git a/pype/plugins/blender/publish/extract_fbx_animation.py b/pype/plugins/blender/publish/extract_fbx_animation.py
index d51c641e9c..9c421560f0 100644
--- a/pype/plugins/blender/publish/extract_fbx_animation.py
+++ b/pype/plugins/blender/publish/extract_fbx_animation.py
@@ -17,14 +17,10 @@ class ExtractAnimationFBX(pype.api.Extractor):
def process(self, instance):
# Define extract output file path
-
stagingdir = self.staging_dir(instance)
- filename = f"{instance.name}.fbx"
- filepath = os.path.join(stagingdir, filename)
context = bpy.context
scene = context.scene
- view_layer = context.view_layer
# Perform extraction
self.log.info("Performing extraction..")
@@ -35,22 +31,6 @@ class ExtractAnimationFBX(pype.api.Extractor):
assert len(collections) == 1, "There should be one and only one " \
"collection collected for this asset"
- old_active_layer_collection = view_layer.active_layer_collection
-
- layers = view_layer.layer_collection.children
-
- # Get the layer collection from the collection we need to export.
- # This is needed because in Blender you can only set the active
- # collection with the layer collection, and there is no way to get
- # the layer collection from the collection
- # (but there is the vice versa).
- layer_collections = [
- layer for layer in layers if layer.collection == collections[0]]
-
- assert len(layer_collections) == 1
-
- view_layer.active_layer_collection = layer_collections[0]
-
old_scale = scene.unit_settings.scale_length
# We set the scale of the scene for the export
@@ -59,6 +39,15 @@ class ExtractAnimationFBX(pype.api.Extractor):
armatures = [
obj for obj in collections[0].objects if obj.type == 'ARMATURE']
+ assert len(collections) == 1, "There should be one and only one " \
+ "armature collected for this asset"
+
+ armature = armatures[0]
+
+ armature_name = armature.name
+ original_name = armature_name.split(':')[0]
+ armature.name = original_name
+
object_action_pairs = []
original_actions = []
@@ -66,23 +55,23 @@ class ExtractAnimationFBX(pype.api.Extractor):
ending_frames = []
# For each armature, we make a copy of the current action
- for obj in armatures:
+ curr_action = None
+ copy_action = None
- curr_action = None
- copy_action = None
+ if armature.animation_data and armature.animation_data.action:
+ curr_action = armature.animation_data.action
+ copy_action = curr_action.copy()
- if obj.animation_data and obj.animation_data.action:
+ curr_frame_range = curr_action.frame_range
- curr_action = obj.animation_data.action
- copy_action = curr_action.copy()
+ starting_frames.append(curr_frame_range[0])
+ ending_frames.append(curr_frame_range[1])
+ else:
+ self.log.info("Object have no animation.")
+ return
- curr_frame_range = curr_action.frame_range
-
- starting_frames.append(curr_frame_range[0])
- ending_frames.append(curr_frame_range[1])
-
- object_action_pairs.append((obj, copy_action))
- original_actions.append(curr_action)
+ object_action_pairs.append((armature, copy_action))
+ original_actions.append(curr_action)
# We compute the starting and ending frames
max_frame = min(starting_frames)
@@ -96,44 +85,52 @@ class ExtractAnimationFBX(pype.api.Extractor):
do_clean=False
)
- # We export the fbx
+ for obj in bpy.data.objects:
+ obj.select_set(False)
+
+ armature.select_set(True)
+ fbx_filename = f"{instance.name}_{armature.name}.fbx"
+ filepath = os.path.join(stagingdir, fbx_filename)
+
+ override = bpy.context.copy()
+ override['selected_objects'] = [armature]
bpy.ops.export_scene.fbx(
+ override,
filepath=filepath,
- use_active_collection=True,
+ use_selection=True,
bake_anim_use_nla_strips=False,
bake_anim_use_all_actions=False,
- add_leaf_bones=False
+ add_leaf_bones=False,
+ armature_nodetype='ROOT',
+ object_types={'ARMATURE'}
)
-
- view_layer.active_layer_collection = old_active_layer_collection
+ armature.name = armature_name
+ armature.select_set(False)
scene.unit_settings.scale_length = old_scale
# We delete the baked action and set the original one back
for i in range(0, len(object_action_pairs)):
-
pair = object_action_pairs[i]
action = original_actions[i]
if action:
-
pair[0].animation_data.action = action
if pair[1]:
-
pair[1].user_clear()
bpy.data.actions.remove(pair[1])
if "representations" not in instance.data:
instance.data["representations"] = []
- representation = {
+ fbx_representation = {
'name': 'fbx',
'ext': 'fbx',
- 'files': filename,
+ 'files': fbx_filename,
"stagingDir": stagingdir,
}
- instance.data["representations"].append(representation)
+ instance.data["representations"].append(fbx_representation)
- self.log.info("Extracted instance '%s' to: %s",
- instance.name, representation)
+ self.log.info("Extracted instance '{}' to: {}".format(
+ instance.name, fbx_representation))
diff --git a/pype/plugins/blender/publish/integrate_animation.py b/pype/plugins/blender/publish/integrate_animation.py
new file mode 100644
index 0000000000..90e94a4aac
--- /dev/null
+++ b/pype/plugins/blender/publish/integrate_animation.py
@@ -0,0 +1,49 @@
+import json
+
+from avalon import io
+import pyblish.api
+
+
+class IntegrateAnimation(pyblish.api.InstancePlugin):
+ """Generate a JSON file for animation."""
+
+ label = "Integrate Animation"
+ order = pyblish.api.IntegratorOrder + 0.1
+ optional = True
+ hosts = ["blender"]
+ families = ["setdress"]
+
+ def process(self, instance):
+ self.log.info("Integrate Animation")
+
+ representation = instance.data.get('representations')[0]
+ json_path = representation.get('publishedFiles')[0]
+
+ with open(json_path, "r") as file:
+ data = json.load(file)
+
+ # Update the json file for the setdress to add the published
+ # representations of the animations
+ for json_dict in data:
+ i = None
+ for elem in instance.context:
+ if elem.data.get('subset') == json_dict['subset']:
+ i = elem
+ break
+ if not i:
+ continue
+ rep = None
+ pub_repr = i.data.get('published_representations')
+ for elem in pub_repr:
+ if pub_repr.get(elem).get('representation').get('name') == "fbx":
+ rep = pub_repr.get(elem)
+ break
+ if not rep:
+ continue
+ obj_id = rep.get('representation').get('_id')
+
+ if obj_id:
+ json_dict['_id'] = str(obj_id)
+
+ with open(json_path, "w") as file:
+ json.dump(data, fp=file, indent=2)
diff --git a/pype/plugins/global/publish/extract_scanline_exr.py b/pype/plugins/global/publish/extract_scanline_exr.py
index ca62476ab2..9c3073d61d 100644
--- a/pype/plugins/global/publish/extract_scanline_exr.py
+++ b/pype/plugins/global/publish/extract_scanline_exr.py
@@ -46,6 +46,10 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin):
stagingdir = os.path.normpath(repre.get("stagingDir"))
oiio_tool_path = os.getenv("PYPE_OIIO_PATH", "")
+ if not os.path.exists(oiio_tool_path):
+ self.log.error(
+ "OIIO tool not found in {}".format(oiio_tool_path))
+ raise AssertionError("OIIO tool not found")
for file in input_files:
@@ -53,14 +57,11 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin):
temp_name = os.path.join(stagingdir, "__{}".format(file))
# move original render to temp location
shutil.move(original_name, temp_name)
- oiio_cmd = []
- oiio_cmd.append(oiio_tool_path)
- oiio_cmd.append(
- os.path.join(stagingdir, temp_name)
- )
- oiio_cmd.append("--scanline")
- oiio_cmd.append("-o")
- oiio_cmd.append(os.path.join(stagingdir, original_name))
+ oiio_cmd = [
+ oiio_tool_path,
+ os.path.join(stagingdir, temp_name), "--scanline", "-o",
+ os.path.join(stagingdir, original_name)
+ ]
subprocess_exr = " ".join(oiio_cmd)
self.log.info(f"running: {subprocess_exr}")
diff --git a/pype/plugins/harmony/load/load_palette.py b/pype/plugins/harmony/load/load_palette.py
index fd3f99b06d..5dd5171aa1 100644
--- a/pype/plugins/harmony/load/load_palette.py
+++ b/pype/plugins/harmony/load/load_palette.py
@@ -7,7 +7,7 @@ from avalon import api, harmony
class ImportPaletteLoader(api.Loader):
"""Import palettes."""
- families = ["palette"]
+ families = ["palette", "harmony.palette"]
representations = ["plt"]
label = "Import Palette"
diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py
index 0ae19cbb81..1354e3d512 100644
--- a/pype/plugins/maya/publish/submit_maya_deadline.py
+++ b/pype/plugins/maya/publish/submit_maya_deadline.py
@@ -42,7 +42,7 @@ from pype.hosts.maya import lib
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
-payload_skeleton = {
+payload_skeleton_template = {
"JobInfo": {
"BatchName": None, # Top-level group name
"Name": None, # Job name, as seen in Monitor
@@ -268,6 +268,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"""Plugin entry point."""
instance.data["toBeRenderedOn"] = "deadline"
self._instance = instance
+ self.payload_skeleton = copy.deepcopy(payload_skeleton_template)
self._deadline_url = os.environ.get(
"DEADLINE_REST_URL", "http://localhost:8082")
assert self._deadline_url, "Requires DEADLINE_REST_URL"
@@ -388,32 +389,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.log.info("- {}: {}".format(k, v))
self.log.info("-" * 20)
- frame_pattern = payload_skeleton["JobInfo"]["Frames"]
- payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format(
+ frame_pattern = self.payload_skeleton["JobInfo"]["Frames"]
+ self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format(
start=int(self._instance.data["frameStartHandle"]),
end=int(self._instance.data["frameEndHandle"]),
step=int(self._instance.data["byFrameStep"]))
- payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get(
+ self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get(
"mayaRenderPlugin", "MayaPype")
- payload_skeleton["JobInfo"]["BatchName"] = filename
+ self.payload_skeleton["JobInfo"]["BatchName"] = filename
# Job name, as seen in Monitor
- payload_skeleton["JobInfo"]["Name"] = jobname
+ self.payload_skeleton["JobInfo"]["Name"] = jobname
# Arbitrary username, for visualisation in Monitor
- payload_skeleton["JobInfo"]["UserName"] = deadline_user
+ self.payload_skeleton["JobInfo"]["UserName"] = deadline_user
# Set job priority
- payload_skeleton["JobInfo"]["Priority"] = self._instance.data.get(
+ self.payload_skeleton["JobInfo"]["Priority"] = self._instance.data.get(
"priority", 50)
# Optional, enable double-click to preview rendered
# frames from Deadline Monitor
- payload_skeleton["JobInfo"]["OutputDirectory0"] = \
+ self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \
os.path.dirname(output_filename_0).replace("\\", "/")
- payload_skeleton["JobInfo"]["OutputFilename0"] = \
+ self.payload_skeleton["JobInfo"]["OutputFilename0"] = \
output_filename_0.replace("\\", "/")
- payload_skeleton["JobInfo"]["Comment"] = comment
- payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer
+ self.payload_skeleton["JobInfo"]["Comment"] = comment
+ self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer
# Adding file dependencies.
dependencies = instance.context.data["fileDependencies"]
@@ -421,7 +422,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
if self.asset_dependencies:
for dependency in dependencies:
key = "AssetDependency" + str(dependencies.index(dependency))
- payload_skeleton["JobInfo"][key] = dependency
+ self.payload_skeleton["JobInfo"][key] = dependency
# Handle environments -----------------------------------------------
# We need those to pass them to pype for it to set correct context
@@ -441,7 +442,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
if key in os.environ}, **api.Session)
environment["PYPE_LOG_NO_COLORS"] = "1"
environment["PYPE_MAYA_VERSION"] = cmds.about(v=True)
- payload_skeleton["JobInfo"].update({
+ self.payload_skeleton["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
@@ -449,9 +450,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
})
# Add options from RenderGlobals-------------------------------------
render_globals = instance.data.get("renderGlobals", {})
- payload_skeleton["JobInfo"].update(render_globals)
+ self.payload_skeleton["JobInfo"].update(render_globals)
- # Submit preceeding export jobs -------------------------------------
+ # Submit preceding export jobs -------------------------------------
export_job = None
assert not all(x in instance.data["families"]
for x in ['vrayscene', 'assscene']), (
@@ -731,7 +732,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
instance.data["deadlineSubmissionJob"] = response.json()
def _get_maya_payload(self, data):
- payload = copy.deepcopy(payload_skeleton)
+ payload = copy.deepcopy(self.payload_skeleton)
if not self.asset_dependencies:
job_info_ext = {}
@@ -765,7 +766,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
return payload
def _get_vray_export_payload(self, data):
- payload = copy.deepcopy(payload_skeleton)
+ payload = copy.deepcopy(self.payload_skeleton)
vray_settings = cmds.ls(type="VRaySettingsNode")
node = vray_settings[0]
template = cmds.getAttr("{}.vrscene_filename".format(node))
@@ -816,7 +817,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
script = os.path.normpath(module_path)
- payload = copy.deepcopy(payload_skeleton)
+ payload = copy.deepcopy(self.payload_skeleton)
job_info_ext = {
# Job name, as seen in Monitor
"Name": "Export {} [{}-{}]".format(
@@ -870,7 +871,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
return payload
def _get_vray_render_payload(self, data):
- payload = copy.deepcopy(payload_skeleton)
+ payload = copy.deepcopy(self.payload_skeleton)
vray_settings = cmds.ls(type="VRaySettingsNode")
node = vray_settings[0]
template = cmds.getAttr("{}.vrscene_filename".format(node))
@@ -905,7 +906,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
return payload
def _get_arnold_render_payload(self, data):
- payload = copy.deepcopy(payload_skeleton)
+ payload = copy.deepcopy(self.payload_skeleton)
ass_file, _ = os.path.splitext(data["output_filename_0"])
first_file = ass_file + ".ass"
job_info_ext = {
diff --git a/pype/plugins/tvpaint/publish/collect_workfile_data.py b/pype/plugins/tvpaint/publish/collect_workfile_data.py
index c6179b76cf..6af659297d 100644
--- a/pype/plugins/tvpaint/publish/collect_workfile_data.py
+++ b/pype/plugins/tvpaint/publish/collect_workfile_data.py
@@ -6,10 +6,41 @@ import avalon.api
from avalon.tvpaint import pipeline, lib
+class ResetTVPaintWorkfileMetadata(pyblish.api.Action):
+ """Fix invalid metadata in workfile."""
+ label = "Reset invalid workfile metadata"
+ on = "failed"
+
+ def process(self, context, plugin):
+ metadata_keys = {
+ pipeline.SECTION_NAME_CONTEXT: {},
+ pipeline.SECTION_NAME_INSTANCES: [],
+ pipeline.SECTION_NAME_CONTAINERS: []
+ }
+ for metadata_key, default in metadata_keys.items():
+ json_string = pipeline.get_workfile_metadata_string(metadata_key)
+ if not json_string:
+ continue
+
+ try:
+ return json.loads(json_string)
+ except Exception:
+ self.log.warning(
+ (
+ "Couldn't parse metadata from key \"{}\"."
+ " Will reset to default value \"{}\"."
+ " Loaded value was: {}"
+ ).format(metadata_key, default, json_string),
+ exc_info=True
+ )
+ pipeline.write_workfile_metadata(metadata_key, default)
+
+
class CollectWorkfileData(pyblish.api.ContextPlugin):
label = "Collect Workfile Data"
order = pyblish.api.CollectorOrder - 1.01
hosts = ["tvpaint"]
+ actions = [ResetTVPaintWorkfileMetadata]
def process(self, context):
current_project_id = lib.execute_george("tv_projectcurrentid")
diff --git a/pype/plugins/unreal/create/create_layout.py b/pype/plugins/unreal/create/create_layout.py
new file mode 100644
index 0000000000..82cef43cee
--- /dev/null
+++ b/pype/plugins/unreal/create/create_layout.py
@@ -0,0 +1,42 @@
+from unreal import EditorLevelLibrary as ell
+from pype.hosts.unreal.plugin import Creator
+from avalon.unreal import (
+ instantiate,
+)
+
+
+class CreateLayout(Creator):
+ """Layout output for character rigs"""
+
+ name = "layoutMain"
+ label = "Layout"
+ family = "layout"
+ icon = "cubes"
+
+ root = "/Game"
+ suffix = "_INS"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateLayout, self).__init__(*args, **kwargs)
+
+ def process(self):
+ data = self.data
+
+ name = data["subset"]
+
+ selection = []
+ # if (self.options or {}).get("useSelection"):
+ # sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
+ # selection = [a.get_path_name() for a in sel_objects]
+
+ data["level"] = ell.get_editor_world().get_path_name()
+
+ data["members"] = []
+
+ if (self.options or {}).get("useSelection"):
+ # Set as members the selected actors
+ for actor in ell.get_selected_level_actors():
+ data["members"].append("{}.{}".format(
+ actor.get_outer().get_name(), actor.get_name()))
+
+ instantiate(self.root, name, data, selection, self.suffix)
diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py
new file mode 100644
index 0000000000..5e106788ce
--- /dev/null
+++ b/pype/plugins/unreal/load/load_animation.py
@@ -0,0 +1,204 @@
+import os
+
+from avalon import api, pipeline
+from avalon.unreal import lib
+from avalon.unreal import pipeline as unreal_pipeline
+import unreal
+
+
+class AnimationFBXLoader(api.Loader):
+ """Load Unreal SkeletalMesh from FBX"""
+
+ families = ["animation"]
+ label = "Import FBX Animation"
+ representations = ["fbx"]
+ icon = "cube"
+ color = "orange"
+
+ def load(self, context, name, namespace, options=None):
+ """
+ Load and containerise representation into Content Browser.
+
+ This is two step process. First, import FBX to temporary path and
+ then call `containerise()` on it - this moves all content to new
+ directory and then it will create AssetContainer there and imprint it
+ with metadata. This will mark this path as container.
+
+ Args:
+ context (dict): application context
+ name (str): subset name
+ namespace (str): in Unreal this is basically path to container.
+ This is not passed here, so namespace is set
+ by `containerise()` because only then we know
+ real path.
+ data (dict): Those would be data to be imprinted. This is not used
+ now, data are imprinted by `containerise()`.
+
+ Returns:
+ list(str): list of container content
+ """
+
+ # Create directory for asset and avalon container
+ root = "/Game/Avalon/Assets"
+ asset = context.get('asset').get('name')
+ suffix = "_CON"
+ if asset:
+ asset_name = "{}_{}".format(asset, name)
+ else:
+ asset_name = "{}".format(name)
+
+ tools = unreal.AssetToolsHelpers().get_asset_tools()
+ asset_dir, container_name = tools.create_unique_asset_name(
+ "{}/{}/{}".format(root, asset, name), suffix="")
+
+ container_name += suffix
+
+ unreal.EditorAssetLibrary.make_directory(asset_dir)
+
+ automated = False
+ actor = None
+
+ task = unreal.AssetImportTask()
+ task.options = unreal.FbxImportUI()
+
+ # If there are no options, the process cannot be automated
+ if options:
+ automated = True
+ actor_name = 'PersistentLevel.' + options.get('instance_name')
+ actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name)
+ skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton
+ task.options.set_editor_property('skeleton', skeleton)
+
+ if not actor:
+ return None
+
+ task.set_editor_property('filename', self.fname)
+ task.set_editor_property('destination_path', asset_dir)
+ task.set_editor_property('destination_name', asset_name)
+ task.set_editor_property('replace_existing', False)
+ task.set_editor_property('automated', automated)
+ task.set_editor_property('save', False)
+
+ # set import options here
+ task.options.set_editor_property(
+ 'automated_import_should_detect_type', True)
+ task.options.set_editor_property(
+ 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION)
+ task.options.set_editor_property('import_mesh', False)
+ task.options.set_editor_property('import_animations', True)
+
+ task.options.skeletal_mesh_import_data.set_editor_property(
+ 'import_content_type',
+ unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS
+ )
+
+ unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
+
+ # Create Asset Container
+ lib.create_avalon_container(
+ container=container_name, path=asset_dir)
+
+ data = {
+ "schema": "avalon-core:container-2.0",
+ "id": pipeline.AVALON_CONTAINER_ID,
+ "asset": asset,
+ "namespace": asset_dir,
+ "container_name": container_name,
+ "asset_name": asset_name,
+ "loader": str(self.__class__.__name__),
+ "representation": context["representation"]["_id"],
+ "parent": context["representation"]["parent"],
+ "family": context["representation"]["context"]["family"]
+ }
+ unreal_pipeline.imprint(
+ "{}/{}".format(asset_dir, container_name), data)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ asset_dir, recursive=True, include_folder=True
+ )
+
+ animation = None
+
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
+ imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a)
+ imported_asset = unreal.AssetRegistryHelpers.get_asset(
+ imported_asset_data)
+ if imported_asset.__class__ == unreal.AnimSequence:
+ animation = imported_asset
+ break
+
+ if animation:
+ animation.set_editor_property('enable_root_motion', True)
+ actor.skeletal_mesh_component.set_editor_property(
+ 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
+ actor.skeletal_mesh_component.animation_data.set_editor_property(
+ 'anim_to_play', animation)
+
+ return asset_content
+
+ def update(self, container, representation):
+ name = container["asset_name"]
+ source_path = api.get_representation_path(representation)
+ destination_path = container["namespace"]
+
+ task = unreal.AssetImportTask()
+ task.options = unreal.FbxImportUI()
+
+ task.set_editor_property('filename', source_path)
+ task.set_editor_property('destination_path', destination_path)
+ # strip suffix
+ task.set_editor_property('destination_name', name)
+ task.set_editor_property('replace_existing', True)
+ task.set_editor_property('automated', True)
+ task.set_editor_property('save', False)
+
+ # set import options here
+ task.options.set_editor_property(
+ 'automated_import_should_detect_type', True)
+ task.options.set_editor_property(
+ 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION)
+ task.options.set_editor_property('import_mesh', False)
+ task.options.set_editor_property('import_animations', True)
+
+ task.options.skeletal_mesh_import_data.set_editor_property(
+ 'import_content_type',
+ unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS
+ )
+
+ skeletal_mesh = unreal.EditorAssetLibrary.load_asset(
+ container.get('namespace') + "/" + container.get('asset_name'))
+ skeleton = skeletal_mesh.get_editor_property('skeleton')
+ task.options.set_editor_property('skeleton', skeleton)
+
+ # do import fbx and replace existing data
+ unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
+ container_path = "{}/{}".format(container["namespace"],
+ container["objectName"])
+ # update metadata
+ unreal_pipeline.imprint(
+ container_path,
+ {
+ "representation": str(representation["_id"]),
+ "parent": str(representation["parent"])
+ })
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ destination_path, recursive=True, include_folder=True
+ )
+
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
+
+ def remove(self, container):
+ path = container["namespace"]
+ parent_path = os.path.dirname(path)
+
+ unreal.EditorAssetLibrary.delete_directory(path)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ parent_path, recursive=False
+ )
+
+ if len(asset_content) == 0:
+ unreal.EditorAssetLibrary.delete_directory(parent_path)
diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py
new file mode 100644
index 0000000000..56351e388b
--- /dev/null
+++ b/pype/plugins/unreal/load/load_rig.py
@@ -0,0 +1,191 @@
+import os
+
+from avalon import api, pipeline
+from avalon.unreal import lib
+from avalon.unreal import pipeline as unreal_pipeline
+import unreal
+
+
+class SkeletalMeshFBXLoader(api.Loader):
+ """Load Unreal SkeletalMesh from FBX"""
+
+ families = ["rig"]
+ label = "Import FBX Skeletal Mesh"
+ representations = ["fbx"]
+ icon = "cube"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+ """
+ Load and containerise representation into Content Browser.
+
+ This is two step process. First, import FBX to temporary path and
+ then call `containerise()` on it - this moves all content to new
+ directory and then it will create AssetContainer there and imprint it
+ with metadata. This will mark this path as container.
+
+ Args:
+ context (dict): application context
+ name (str): subset name
+ namespace (str): in Unreal this is basically path to container.
+ This is not passed here, so namespace is set
+ by `containerise()` because only then we know
+ real path.
+ data (dict): Those would be data to be imprinted. This is not used
+ now, data are imprinted by `containerise()`.
+
+ Returns:
+ list(str): list of container content
+ """
+
+ # Create directory for asset and avalon container
+ root = "/Game/Avalon/Assets"
+ asset = context.get('asset').get('name')
+ suffix = "_CON"
+ if asset:
+ asset_name = "{}_{}".format(asset, name)
+ else:
+ asset_name = "{}".format(name)
+
+ tools = unreal.AssetToolsHelpers().get_asset_tools()
+ asset_dir, container_name = tools.create_unique_asset_name(
+ "{}/{}/{}".format(root, asset, name), suffix="")
+
+ container_name += suffix
+
+ unreal.EditorAssetLibrary.make_directory(asset_dir)
+
+ task = unreal.AssetImportTask()
+
+ task.set_editor_property('filename', self.fname)
+ task.set_editor_property('destination_path', asset_dir)
+ task.set_editor_property('destination_name', asset_name)
+ task.set_editor_property('replace_existing', False)
+ task.set_editor_property('automated', True)
+ task.set_editor_property('save', False)
+
+ # set import options here
+ options = unreal.FbxImportUI()
+ options.set_editor_property('import_as_skeletal', True)
+ options.set_editor_property('import_animations', False)
+ options.set_editor_property('import_mesh', True)
+ options.set_editor_property('import_materials', True)
+ options.set_editor_property('import_textures', True)
+ options.set_editor_property('skeleton', None)
+ options.set_editor_property('create_physics_asset', False)
+
+ options.set_editor_property('mesh_type_to_import',
+ unreal.FBXImportType.FBXIT_SKELETAL_MESH)
+
+ options.skeletal_mesh_import_data.set_editor_property(
+ 'import_content_type',
+ unreal.FBXImportContentType.FBXICT_ALL
+ )
+ # set to import normals, otherwise Unreal will compute them
+ # and it will take a long time, depending on the size of the mesh
+ options.skeletal_mesh_import_data.set_editor_property(
+ 'normal_import_method',
+ unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS
+ )
+
+ task.options = options
+ unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
+
+ # Create Asset Container
+ lib.create_avalon_container(
+ container=container_name, path=asset_dir)
+
+ data = {
+ "schema": "avalon-core:container-2.0",
+ "id": pipeline.AVALON_CONTAINER_ID,
+ "asset": asset,
+ "namespace": asset_dir,
+ "container_name": container_name,
+ "asset_name": asset_name,
+ "loader": str(self.__class__.__name__),
+ "representation": context["representation"]["_id"],
+ "parent": context["representation"]["parent"],
+ "family": context["representation"]["context"]["family"]
+ }
+ unreal_pipeline.imprint(
+ "{}/{}".format(asset_dir, container_name), data)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ asset_dir, recursive=True, include_folder=True
+ )
+
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
+
+ return asset_content
+
+ def update(self, container, representation):
+ name = container["asset_name"]
+ source_path = api.get_representation_path(representation)
+ destination_path = container["namespace"]
+
+ task = unreal.AssetImportTask()
+
+ task.set_editor_property('filename', source_path)
+ task.set_editor_property('destination_path', destination_path)
+ task.set_editor_property('destination_name', name)
+ task.set_editor_property('replace_existing', True)
+ task.set_editor_property('automated', True)
+ task.set_editor_property('save', True)
+
+ # set import options here
+ options = unreal.FbxImportUI()
+ options.set_editor_property('import_as_skeletal', True)
+ options.set_editor_property('import_animations', False)
+ options.set_editor_property('import_mesh', True)
+ options.set_editor_property('import_materials', True)
+ options.set_editor_property('import_textures', True)
+ options.set_editor_property('skeleton', None)
+ options.set_editor_property('create_physics_asset', False)
+
+ options.set_editor_property('mesh_type_to_import',
+ unreal.FBXImportType.FBXIT_SKELETAL_MESH)
+
+ options.skeletal_mesh_import_data.set_editor_property(
+ 'import_content_type',
+ unreal.FBXImportContentType.FBXICT_ALL
+ )
+ # set to import normals, otherwise Unreal will compute them
+ # and it will take a long time, depending on the size of the mesh
+ options.skeletal_mesh_import_data.set_editor_property(
+ 'normal_import_method',
+ unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS
+ )
+
+ task.options = options
+ # do import fbx and replace existing data
+ unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
+ container_path = "{}/{}".format(container["namespace"],
+ container["objectName"])
+ # update metadata
+ unreal_pipeline.imprint(
+ container_path,
+ {
+ "representation": str(representation["_id"]),
+ "parent": str(representation["parent"])
+ })
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ destination_path, recursive=True, include_folder=True
+ )
+
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
+
+ def remove(self, container):
+ path = container["namespace"]
+ parent_path = os.path.dirname(path)
+
+ unreal.EditorAssetLibrary.delete_directory(path)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ parent_path, recursive=False
+ )
+
+ if len(asset_content) == 0:
+ unreal.EditorAssetLibrary.delete_directory(parent_path)
diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py
new file mode 100644
index 0000000000..08330e349b
--- /dev/null
+++ b/pype/plugins/unreal/load/load_setdress.py
@@ -0,0 +1,127 @@
+import json
+
+from avalon import api
+import unreal
+
+
+class AnimationCollectionLoader(api.Loader):
+ """Load Unreal SkeletalMesh from FBX"""
+
+ families = ["setdress"]
+ representations = ["json"]
+
+ label = "Load Animation Collection"
+ icon = "cube"
+ color = "orange"
+
+ def load(self, context, name, namespace, options):
+ from avalon import api, pipeline
+ from avalon.unreal import lib
+ from avalon.unreal import pipeline as unreal_pipeline
+ import unreal
+
+ # Create directory for asset and avalon container
+ root = "/Game/Avalon/Assets"
+ asset = context.get('asset').get('name')
+ suffix = "_CON"
+
+ tools = unreal.AssetToolsHelpers().get_asset_tools()
+ asset_dir, container_name = tools.create_unique_asset_name(
+ "{}/{}".format(root, asset), suffix="")
+
+ container_name += suffix
+
+ unreal.EditorAssetLibrary.make_directory(asset_dir)
+
+ libpath = self.fname
+
+ with open(libpath, "r") as fp:
+ data = json.load(fp)
+
+ all_loaders = api.discover(api.Loader)
+
+ for element in data:
+ reference = element.get('_id')
+
+ loaders = api.loaders_from_representation(all_loaders, reference)
+ loader = None
+ for l in loaders:
+ if l.__name__ == "AnimationFBXLoader":
+ loader = l
+ break
+
+ if not loader:
+ continue
+
+ instance_name = element.get('instance_name')
+
+ api.load(
+ loader,
+ reference,
+ namespace=instance_name,
+ options=element
+ )
+
+ # Create Asset Container
+ lib.create_avalon_container(
+ container=container_name, path=asset_dir)
+
+ data = {
+ "schema": "avalon-core:container-2.0",
+ "id": pipeline.AVALON_CONTAINER_ID,
+ "asset": asset,
+ "namespace": asset_dir,
+ "container_name": container_name,
+ "loader": str(self.__class__.__name__),
+ "representation": context["representation"]["_id"],
+ "parent": context["representation"]["parent"],
+ "family": context["representation"]["context"]["family"]
+ }
+ unreal_pipeline.imprint(
+ "{}/{}".format(asset_dir, container_name), data)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ asset_dir, recursive=True, include_folder=True
+ )
+
+ return asset_content
+
+ def update(self, container, representation):
+ from avalon import api, io
+ from avalon.unreal import pipeline
+
+ source_path = api.get_representation_path(representation)
+
+ with open(source_path, "r") as fp:
+ data = json.load(fp)
+
+ animation_containers = [
+ i for i in pipeline.ls() if
+ i.get('asset') == container.get('asset') and
+ i.get('family') == 'animation']
+
+ for element in data:
+ new_version = io.find_one({"_id": io.ObjectId(element.get('_id'))})
+ new_version_number = new_version.get('context').get('version')
+ anim_container = None
+ for i in animation_containers:
+ if i.get('container_name') == (element.get('subset') + "_CON"):
+ anim_container = i
+ break
+ if not anim_container:
+ continue
+
+ api.update(anim_container, new_version_number)
+
+ container_path = "{}/{}".format(container["namespace"],
+ container["objectName"])
+ # update metadata
+ pipeline.imprint(
+ container_path,
+ {
+ "representation": str(representation["_id"]),
+ "parent": str(representation["parent"])
+ })
+
+ def remove(self, container):
+ unreal.EditorAssetLibrary.delete_directory(container["namespace"])
diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py
index 4c27f9aa92..149bafcacc 100644
--- a/pype/plugins/unreal/load/load_staticmeshfbx.py
+++ b/pype/plugins/unreal/load/load_staticmeshfbx.py
@@ -1,12 +1,16 @@
-from avalon import api
+import os
+
+from avalon import api, pipeline
from avalon import unreal as avalon_unreal
+from avalon.unreal import lib
+from avalon.unreal import pipeline as unreal_pipeline
import unreal
class StaticMeshFBXLoader(api.Loader):
"""Load Unreal StaticMesh from FBX"""
- families = ["unrealStaticMesh"]
+ families = ["model", "unrealStaticMesh"]
label = "Import FBX Static Mesh"
representations = ["fbx"]
icon = "cube"
@@ -35,67 +39,119 @@ class StaticMeshFBXLoader(api.Loader):
list(str): list of container content
"""
- tools = unreal.AssetToolsHelpers().get_asset_tools()
- temp_dir, temp_name = tools.create_unique_asset_name(
- "/Game/{}".format(name), "_TMP"
- )
+ # Create directory for asset and avalon container
+ root = "/Game/Avalon/Assets"
+ asset = context.get('asset').get('name')
+ suffix = "_CON"
+ if asset:
+ asset_name = "{}_{}".format(asset, name)
+ else:
+ asset_name = "{}".format(name)
- unreal.EditorAssetLibrary.make_directory(temp_dir)
+ tools = unreal.AssetToolsHelpers().get_asset_tools()
+ asset_dir, container_name = tools.create_unique_asset_name(
+ "{}/{}/{}".format(root, asset, name), suffix="")
+
+ container_name += suffix
+
+ unreal.EditorAssetLibrary.make_directory(asset_dir)
task = unreal.AssetImportTask()
- task.filename = self.fname
- task.destination_path = temp_dir
- task.destination_name = name
- task.replace_existing = False
- task.automated = True
- task.save = True
+ task.set_editor_property('filename', self.fname)
+ task.set_editor_property('destination_path', asset_dir)
+ task.set_editor_property('destination_name', asset_name)
+ task.set_editor_property('replace_existing', False)
+ task.set_editor_property('automated', True)
+ task.set_editor_property('save', True)
# set import options here
- task.options = unreal.FbxImportUI()
- task.options.import_animations = False
+ options = unreal.FbxImportUI()
+ options.set_editor_property(
+ 'automated_import_should_detect_type', False)
+ options.set_editor_property('import_animations', False)
+ task.options = options
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
- imported_assets = unreal.EditorAssetLibrary.list_assets(
- temp_dir, recursive=True, include_folder=True
- )
- new_dir = avalon_unreal.containerise(
- name, namespace, imported_assets, context, self.__class__.__name__)
+ # Create Asset Container
+ lib.create_avalon_container(
+ container=container_name, path=asset_dir)
+
+ data = {
+ "schema": "avalon-core:container-2.0",
+ "id": pipeline.AVALON_CONTAINER_ID,
+ "asset": asset,
+ "namespace": asset_dir,
+ "container_name": container_name,
+ "asset_name": asset_name,
+ "loader": str(self.__class__.__name__),
+ "representation": context["representation"]["_id"],
+ "parent": context["representation"]["parent"],
+ "family": context["representation"]["context"]["family"]
+ }
+ unreal_pipeline.imprint(
+ "{}/{}".format(asset_dir, container_name), data)
asset_content = unreal.EditorAssetLibrary.list_assets(
- new_dir, recursive=True, include_folder=True
+ asset_dir, recursive=True, include_folder=True
)
- unreal.EditorAssetLibrary.delete_directory(temp_dir)
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
return asset_content
def update(self, container, representation):
- node = container["objectName"]
+ name = container["name"]
source_path = api.get_representation_path(representation)
destination_path = container["namespace"]
task = unreal.AssetImportTask()
- task.filename = source_path
- task.destination_path = destination_path
+ task.set_editor_property('filename', source_path)
+ task.set_editor_property('destination_path', destination_path)
# strip suffix
- task.destination_name = node[:-4]
- task.replace_existing = True
- task.automated = True
- task.save = True
+ task.set_editor_property('destination_name', name)
+ task.set_editor_property('replace_existing', True)
+ task.set_editor_property('automated', True)
+ task.set_editor_property('save', True)
- task.options = unreal.FbxImportUI()
- task.options.import_animations = False
+ # set import options here
+ options = unreal.FbxImportUI()
+ options.set_editor_property(
+ 'automated_import_should_detect_type', False)
+ options.set_editor_property('import_animations', False)
+ task.options = options
# do import fbx and replace existing data
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
container_path = "{}/{}".format(container["namespace"],
container["objectName"])
# update metadata
- avalon_unreal.imprint(
- container_path, {"_id": str(representation["_id"])})
+ unreal_pipeline.imprint(
+ container_path,
+ {
+ "representation": str(representation["_id"]),
+ "parent": str(representation["parent"])
+ })
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ destination_path, recursive=True, include_folder=True
+ )
+
+ for a in asset_content:
+ unreal.EditorAssetLibrary.save_asset(a)
def remove(self, container):
- unreal.EditorAssetLibrary.delete_directory(container["namespace"])
+ path = container["namespace"]
+ parent_path = os.path.dirname(path)
+
+ unreal.EditorAssetLibrary.delete_directory(path)
+
+ asset_content = unreal.EditorAssetLibrary.list_assets(
+ parent_path, recursive=False
+ )
+
+ if len(asset_content) == 0:
+ unreal.EditorAssetLibrary.delete_directory(parent_path)
diff --git a/pype/plugins/unreal/publish/collect_current_file.py b/pype/plugins/unreal/publish/collect_current_file.py
new file mode 100644
index 0000000000..4e828933bb
--- /dev/null
+++ b/pype/plugins/unreal/publish/collect_current_file.py
@@ -0,0 +1,19 @@
+import unreal
+
+import pyblish.api
+
+
+class CollectUnrealCurrentFile(pyblish.api.ContextPlugin):
+ """Inject the current working file into context"""
+
+ order = pyblish.api.CollectorOrder - 0.5
+ label = "Unreal Current File"
+ hosts = ['unreal']
+
+ def process(self, context):
+ """Inject the current working file"""
+ current_file = unreal.Paths.get_project_file_path()
+ context.data['currentFile'] = current_file
+
+ assert current_file != '', "Current file is empty. " \
+ "Save the file before continuing."
diff --git a/pype/plugins/unreal/publish/collect_instances.py b/pype/plugins/unreal/publish/collect_instances.py
index 766a73028c..62676f9938 100644
--- a/pype/plugins/unreal/publish/collect_instances.py
+++ b/pype/plugins/unreal/publish/collect_instances.py
@@ -1,5 +1,5 @@
+import ast
import unreal
-
import pyblish.api
@@ -35,13 +35,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
)
# content of container
- members = unreal.EditorAssetLibrary.list_assets(
- asset.get_path_name(), recursive=True, include_folder=True
- )
+ members = ast.literal_eval(data.get("members"))
self.log.debug(members)
self.log.debug(asset.get_path_name())
# remove instance container
- members.remove(asset.get_path_name())
self.log.info("Creating instance for {}".format(asset.get_name()))
instance = context.create_instance(asset.get_name())
@@ -50,6 +47,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Store the exact members of the object set
instance.data["setMembers"] = members
instance.data["families"] = [data.get("family")]
+ instance.data["level"] = data.get("level")
+ instance.data["parent"] = data.get("parent")
label = "{0} ({1})".format(asset.get_name()[:-4],
data["asset"])
diff --git a/pype/plugins/unreal/publish/extract_layout.py b/pype/plugins/unreal/publish/extract_layout.py
new file mode 100644
index 0000000000..6345b8da51
--- /dev/null
+++ b/pype/plugins/unreal/publish/extract_layout.py
@@ -0,0 +1,113 @@
+import os
+import json
+import math
+
+import unreal
+from unreal import EditorLevelLibrary as ell
+from unreal import EditorAssetLibrary as eal
+
+import pype.api
+from avalon import io
+
+
+class ExtractLayout(pype.api.Extractor):
+ """Extract a layout."""
+
+ label = "Extract Layout"
+ hosts = ["unreal"]
+ families = ["layout"]
+ optional = True
+
+ def process(self, instance):
+ # Define extract output file path
+ stagingdir = self.staging_dir(instance)
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+
+ # Check if the loaded level is the same of the instance
+ current_level = ell.get_editor_world().get_path_name()
+ assert current_level == instance.data.get("level"), \
+ "Wrong level loaded"
+
+ json_data = []
+
+ for member in instance[:]:
+ actor = ell.get_actor_reference(member)
+ mesh = None
+
+ # Check type the type of mesh
+ if actor.get_class().get_name() == 'SkeletalMeshActor':
+ mesh = actor.skeletal_mesh_component.skeletal_mesh
+ elif actor.get_class().get_name() == 'StaticMeshActor':
+ mesh = actor.static_mesh_component.static_mesh
+
+ if mesh:
+ # Search the reference to the Asset Container for the object
+ path = unreal.Paths.get_path(mesh.get_path_name())
+ filter = unreal.ARFilter(
+ class_names=["AssetContainer"], package_paths=[path])
+ ar = unreal.AssetRegistryHelpers.get_asset_registry()
+ try:
+ asset_container = ar.get_assets(filter)[0].get_asset()
+ except IndexError:
+ self.log.error("AssetContainer not found.")
+ return
+
+ parent = eal.get_metadata_tag(asset_container, "parent")
+ family = eal.get_metadata_tag(asset_container, "family")
+
+ self.log.info("Parent: {}".format(parent))
+ blend = io.find_one(
+ {
+ "type": "representation",
+ "parent": io.ObjectId(parent),
+ "name": "blend"
+ },
+ projection={"_id": True})
+ blend_id = blend["_id"]
+
+ json_element = {}
+ json_element["reference"] = str(blend_id)
+ json_element["family"] = family
+ json_element["instance_name"] = actor.get_name()
+ json_element["asset_name"] = mesh.get_name()
+ import_data = mesh.get_editor_property("asset_import_data")
+ json_element["file_path"] = import_data.get_first_filename()
+ transform = actor.get_actor_transform()
+
+ json_element["transform"] = {
+ "translation": {
+ "x": transform.translation.x,
+ "y": transform.translation.y,
+ "z": transform.translation.z
+ },
+ "rotation": {
+ "x": math.radians(transform.rotation.euler().x),
+ "y": math.radians(transform.rotation.euler().y),
+ "z": math.radians(transform.rotation.euler().z),
+ },
+ "scale": {
+ "x": transform.scale3d.x,
+ "y": transform.scale3d.y,
+ "z": transform.scale3d.z
+ }
+ }
+ json_data.append(json_element)
+
+ json_filename = "{}.json".format(instance.name)
+ json_path = os.path.join(stagingdir, json_filename)
+
+ with open(json_path, "w+") as file:
+ json.dump(json_data, fp=file, indent=2)
+
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ json_representation = {
+ 'name': 'json',
+ 'ext': 'json',
+ 'files': json_filename,
+ "stagingDir": stagingdir,
+ }
+ instance.data["representations"].append(json_representation)
diff --git a/pype/version.py b/pype/version.py
index abe7e03a96..fc92f01ee6 100644
--- a/pype/version.py
+++ b/pype/version.py
@@ -1 +1 @@
-__version__ = "2.14.2"
+__version__ = "2.14.3"
diff --git a/setup/nuke/nuke_path/write_to_read.py b/setup/nuke/nuke_path/write_to_read.py
index 7ea9220ad8..58985947bd 100644
--- a/setup/nuke/nuke_path/write_to_read.py
+++ b/setup/nuke/nuke_path/write_to_read.py
@@ -76,7 +76,7 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
def create_read_node(ndata, comp_start):
- read = nuke.createNode('Read', 'file ' + ndata['filepath'])
+ read = nuke.createNode('Read', 'file "' + ndata['filepath'] + '"')
read.knob('colorspace').setValue(int(ndata['colorspace']))
read.knob('raw').setValue(ndata['rawdata'])
read.knob('first').setValue(int(ndata['firstframe']))