[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-12-02 14:25:09 +01:00 committed by GitHub
commit 14fb3b23cd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 313 additions and 191 deletions

View file

@ -1,5 +1,27 @@
# Changelog
## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8)
**🚀 Enhancements**
- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139)
- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137)
- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129)
- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126)
- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115)
- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046)
- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148)
**🐛 Bug fixes**
- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153)
- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136)
- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135)
- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117)
## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7)

View file

@ -1,5 +1,25 @@
# Changelog
## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8)
**🚀 Enhancements**
- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139)
- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137)
- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129)
- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126)
- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115)
- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046)
- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148)
**🐛 Bug fixes**
- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153)
- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136)
- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135)
- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117)
## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7)

View file

@ -1556,7 +1556,7 @@ class SyncEntitiesFactory:
deleted_entities.append(mongo_id)
av_ent = self.avalon_ents_by_id[mongo_id]
av_ent_path_items = [p for p in av_ent["data"]["parents"]]
av_ent_path_items = list(av_ent["data"]["parents"])
av_ent_path_items.append(av_ent["name"])
self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
@ -1855,7 +1855,7 @@ class SyncEntitiesFactory:
_vis_par = _avalon_ent["data"]["visualParent"]
_name = _avalon_ent["name"]
if _name in self.all_ftrack_names:
av_ent_path_items = _avalon_ent["data"]["parents"]
av_ent_path_items = list(_avalon_ent["data"]["parents"])
av_ent_path_items.append(_name)
av_ent_path = "/".join(av_ent_path_items)
# TODO report
@ -1997,7 +1997,7 @@ class SyncEntitiesFactory:
{"_id": mongo_id},
item
))
av_ent_path_items = item["data"]["parents"]
av_ent_path_items = list(item["data"]["parents"])
av_ent_path_items.append(item["name"])
av_ent_path = "/".join(av_ent_path_items)
self.log.debug(
@ -2110,6 +2110,7 @@ class SyncEntitiesFactory:
entity_dict = self.entities_dict[ftrack_id]
final_parents = entity_dict["final_entity"]["data"]["parents"]
if archived_by_id:
# if is changeable then unarchive (nothing to check here)
if self.changeability_by_mongo_id[mongo_id]:
@ -2123,10 +2124,8 @@ class SyncEntitiesFactory:
archived_name = archived_by_id["name"]
if (
archived_name != entity_dict["name"] or
archived_parents != entity_dict["final_entity"]["data"][
"parents"
]
archived_name != entity_dict["name"]
or archived_parents != final_parents
):
return None
@ -2136,11 +2135,7 @@ class SyncEntitiesFactory:
for archived in archived_by_name:
mongo_id = str(archived["_id"])
archived_parents = archived.get("data", {}).get("parents")
if (
archived_parents == entity_dict["final_entity"]["data"][
"parents"
]
):
if archived_parents == final_parents:
return mongo_id
# Secondly try to find more close to current ftrack entity
@ -2350,8 +2345,7 @@ class SyncEntitiesFactory:
continue
changed = True
parents = [par for par in _parents]
hierarchy = "/".join(parents)
parents = list(_parents)
self.entities_dict[ftrack_id][
"final_entity"]["data"]["parents"] = parents

View file

@ -36,10 +36,35 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
return
context = instance.context
session = context.data["ftrackSession"]
task_entity, parent_entity = self.get_instance_entities(
instance, context)
if parent_entity is None:
self.log.info((
"Skipping ftrack integration. Instance \"{}\" does not"
" have specified ftrack entities."
).format(str(instance)))
return
session = context.data["ftrackSession"]
# Reset session operations and reconfigure locations
session.recorded_operations.clear()
session._configure_locations()
try:
self.integrate_to_ftrack(
session,
instance,
task_entity,
parent_entity,
component_list
)
except Exception:
session.reset()
raise
def get_instance_entities(self, instance, context):
parent_entity = None
default_asset_name = None
# If instance has set "ftrackEntity" or "ftrackTask" then use them from
# instance. Even if they are set to None. If they are set to None it
# has a reason. (like has different context)
@ -52,15 +77,21 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
parent_entity = context.data.get("ftrackEntity")
if task_entity:
default_asset_name = task_entity["name"]
parent_entity = task_entity["parent"]
if parent_entity is None:
self.log.info((
"Skipping ftrack integration. Instance \"{}\" does not"
" have specified ftrack entities."
).format(str(instance)))
return
return task_entity, parent_entity
def integrate_to_ftrack(
self,
session,
instance,
task_entity,
parent_entity,
component_list
):
default_asset_name = None
if task_entity:
default_asset_name = task_entity["name"]
if not default_asset_name:
default_asset_name = parent_entity["name"]
@ -186,13 +217,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
self.log.info("Setting task status to \"{}\"".format(status_name))
task_entity["status"] = status
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
session.commit()
def _fill_component_locations(self, session, component_list):
components_by_location_name = collections.defaultdict(list)
@ -495,13 +520,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
session.delete(member)
del(member)
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
session.commit()
# Reset members in memory
if "members" in component_entity.keys():
@ -617,13 +636,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
)
else:
# Commit changes.
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
session.commit()
def _create_components(self, session, asset_versions_data_by_id):
for item in asset_versions_data_by_id.values():

View file

@ -1,9 +1,8 @@
import collections
from copy import deepcopy
import pyblish.api
from openpype.client import (
get_project,
get_asset_by_id,
get_asset_by_name,
get_assets,
get_archived_assets
)
from openpype.pipeline import legacy_io
@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
families = ["clip", "shot"]
def process(self, context):
# processing starts here
if "hierarchyContext" not in context.data:
self.log.info("skipping IntegrateHierarchyToAvalon")
return
@ -25,161 +23,236 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if not legacy_io.Session:
legacy_io.install()
project_name = legacy_io.active_project()
hierarchy_context = self._get_active_assets(context)
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
self.project = None
self.import_to_avalon(context, project_name, hierarchy_context)
project_name = context.data["projectName"]
asset_names = self.extract_asset_names(hierarchy_context)
asset_docs_by_name = {}
for asset_doc in get_assets(project_name, asset_names=asset_names):
name = asset_doc["name"]
asset_docs_by_name[name] = asset_doc
archived_asset_docs_by_name = collections.defaultdict(list)
for asset_doc in get_archived_assets(
project_name, asset_names=asset_names
):
name = asset_doc["name"]
archived_asset_docs_by_name[name].append(asset_doc)
project_doc = None
hierarchy_queue = collections.deque()
for name, data in hierarchy_context.items():
hierarchy_queue.append((name, data, None))
while hierarchy_queue:
item = hierarchy_queue.popleft()
name, entity_data, parent = item
def import_to_avalon(
self,
context,
project_name,
input_data,
parent=None,
):
for name in input_data:
self.log.info("input_data[name]: {}".format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data["entity_type"]
data = {}
data["entityType"] = entity_type
# Custom attributes.
for k, val in entity_data.get("custom_attributes", {}).items():
data[k] = val
if entity_type.lower() != "project":
data["inputs"] = entity_data.get("inputs", [])
# Tasks.
tasks = entity_data.get("tasks", {})
if tasks is not None or len(tasks) > 0:
data["tasks"] = tasks
parents = []
visualParent = None
# do not store project"s id as visualParent
if self.project is not None:
if self.project["_id"] != parent["_id"]:
visualParent = parent["_id"]
parents.extend(
parent.get("data", {}).get("parents", [])
)
parents.append(parent["name"])
data["visualParent"] = visualParent
data["parents"] = parents
update_data = True
# Process project
if entity_type.lower() == "project":
entity = get_project(project_name)
# TODO: should be in validator?
assert (entity is not None), "Did not find project in DB"
# get data from already existing project
cur_entity_data = entity.get("data") or {}
cur_entity_data.update(data)
data = cur_entity_data
self.project = entity
# Raise error if project or parent are not set
elif self.project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
new_parent = project_doc = self.sync_project(
context,
entity_data
)
# Else process assset
else:
entity = get_asset_by_name(project_name, name)
if entity:
# Do not override data, only update
cur_entity_data = entity.get("data") or {}
entity_tasks = cur_entity_data["tasks"] or {}
# create tasks as dict by default
if not entity_tasks:
cur_entity_data["tasks"] = entity_tasks
new_tasks = data.pop("tasks", {})
if "tasks" not in cur_entity_data and not new_tasks:
continue
for task_name in new_tasks:
if task_name in entity_tasks.keys():
continue
cur_entity_data["tasks"][task_name] = new_tasks[
task_name]
cur_entity_data.update(data)
data = cur_entity_data
else:
# Skip updating data
update_data = False
archived_entities = get_archived_assets(
project_name,
asset_names=[name]
)
unarchive_entity = None
for archived_entity in archived_entities:
archived_parents = (
archived_entity
.get("data", {})
.get("parents")
)
if data["parents"] == archived_parents:
unarchive_entity = archived_entity
break
if unarchive_entity is None:
# Create entity if doesn"t exist
entity = self.create_avalon_asset(
name, data
)
else:
# Unarchive if entity was archived
entity = self.unarchive_entity(unarchive_entity, data)
new_parent = self.sync_asset(
name,
entity_data,
parent,
project_doc,
asset_docs_by_name,
archived_asset_docs_by_name
)
# make sure all relative instances have correct avalon data
self._set_avalon_data_to_relative_instances(
context,
project_name,
entity
new_parent
)
if update_data:
# Update entity data with input data
legacy_io.update_many(
{"_id": entity["_id"]},
{"$set": {"data": data}}
children = entity_data.get("childs")
if not children:
continue
for child_name, child_data in children.items():
hierarchy_queue.append((child_name, child_data, new_parent))
def extract_asset_names(self, hierarchy_context):
"""Extract all possible asset names from hierarchy context.
Args:
hierarchy_context (Dict[str, Any]): Nested hierarchy structure.
Returns:
Set[str]: All asset names from the hierarchy structure.
"""
hierarchy_queue = collections.deque()
for name, data in hierarchy_context.items():
hierarchy_queue.append((name, data))
asset_names = set()
while hierarchy_queue:
item = hierarchy_queue.popleft()
name, data = item
if data["entity_type"].lower() != "project":
asset_names.add(name)
children = data.get("childs")
if children:
for child_name, child_data in children.items():
hierarchy_queue.append((child_name, child_data))
return asset_names
def sync_project(self, context, entity_data):
project_doc = context.data["projectEntity"]
if "data" not in project_doc:
project_doc["data"] = {}
current_data = project_doc["data"]
changes = {}
entity_type = entity_data["entity_type"]
if current_data.get("entityType") != entity_type:
changes["entityType"] = entity_type
# Custom attributes.
attributes = entity_data.get("custom_attributes") or {}
for key, value in attributes.items():
if key not in current_data or current_data[key] != value:
update_key = "data.{}".format(key)
changes[update_key] = value
current_data[key] = value
if changes:
# Update entity data with input data
legacy_io.update_one(
{"_id": project_doc["_id"]},
{"$set": changes}
)
return project_doc
def sync_asset(
self,
asset_name,
entity_data,
parent,
project,
asset_docs_by_name,
archived_asset_docs_by_name
):
# Prepare data for new asset or for update comparison
data = {
"entityType": entity_data["entity_type"]
}
# Custom attributes.
attributes = entity_data.get("custom_attributes") or {}
for key, value in attributes.items():
data[key] = value
data["inputs"] = entity_data.get("inputs") or []
# Parents and visual parent are empty if parent is project
parents = []
parent_id = None
if project["_id"] != parent["_id"]:
parent_id = parent["_id"]
# Use parent's parents as source value
parents.extend(parent["data"]["parents"])
# Add parent's name to parents
parents.append(parent["name"])
data["visualParent"] = parent_id
data["parents"] = parents
asset_doc = asset_docs_by_name.get(asset_name)
# --- Create/Unarchive asset and end ---
if not asset_doc:
# Just use tasks from entity data as they are
# - this is different from the case when tasks are updated
data["tasks"] = entity_data.get("tasks") or {}
archived_asset_doc = None
for archived_entity in archived_asset_docs_by_name[asset_name]:
archived_parents = (
archived_entity
.get("data", {})
.get("parents")
)
if data["parents"] == archived_parents:
archived_asset_doc = archived_entity
break
# Create entity if doesn't exist
if archived_asset_doc is None:
return self.create_avalon_asset(
asset_name, data, project
)
if "childs" in entity_data:
self.import_to_avalon(
context, project_name, entity_data["childs"], entity
)
return self.unarchive_entity(
archived_asset_doc, data, project
)
def unarchive_entity(self, entity, data):
# --- Update existing asset ---
# Make sure current entity has "data" key
if "data" not in asset_doc:
asset_doc["data"] = {}
cur_entity_data = asset_doc["data"]
cur_entity_tasks = cur_entity_data.get("tasks") or {}
# Tasks
data["tasks"] = {}
new_tasks = entity_data.get("tasks") or {}
for task_name, task_info in new_tasks.items():
task_info = deepcopy(task_info)
if task_name in cur_entity_tasks:
src_task_info = deepcopy(cur_entity_tasks[task_name])
src_task_info.update(task_info)
task_info = src_task_info
data["tasks"][task_name] = task_info
changes = {}
for key, value in data.items():
if key not in cur_entity_data or value != cur_entity_data[key]:
update_key = "data.{}".format(key)
changes[update_key] = value
cur_entity_data[key] = value
# Update asset in database if necessary
if changes:
# Update entity data with input data
legacy_io.update_one(
{"_id": asset_doc["_id"]},
{"$set": changes}
)
return asset_doc
def unarchive_entity(self, archived_doc, data, project):
# Unarchived asset should not use same data
new_entity = {
"_id": entity["_id"],
asset_doc = {
"_id": archived_doc["_id"],
"schema": "openpype:asset-3.0",
"name": entity["name"],
"parent": self.project["_id"],
"name": archived_doc["name"],
"parent": project["_id"],
"type": "asset",
"data": data
}
legacy_io.replace_one(
{"_id": entity["_id"]},
new_entity
{"_id": archived_doc["_id"]},
asset_doc
)
return new_entity
return asset_doc
def create_avalon_asset(self, name, data):
def create_avalon_asset(self, name, data, project):
asset_doc = {
"schema": "openpype:asset-3.0",
"name": name,
"parent": self.project["_id"],
"parent": project["_id"],
"type": "asset",
"data": data
}
@ -194,27 +267,27 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
project_name,
asset_doc
):
asset_name = asset_doc["name"]
new_parents = asset_doc["data"]["parents"]
hierarchy = "/".join(new_parents)
parent_name = project_name
if new_parents:
parent_name = new_parents[-1]
for instance in context:
# Skip instance if has filled asset entity
if instance.data.get("assetEntity"):
# Skip if instance asset does not match
instance_asset_name = instance.data.get("asset")
if asset_name != instance_asset_name:
continue
asset_name = asset_doc["name"]
inst_asset_name = instance.data["asset"]
if asset_name == inst_asset_name:
instance.data["assetEntity"] = asset_doc
instance_asset_doc = instance.data.get("assetEntity")
# Update asset entity with new possible changes of asset document
instance.data["assetEntity"] = asset_doc
# get parenting data
parents = asset_doc["data"].get("parents") or list()
# equire only relative parent
parent_name = project_name
if parents:
parent_name = parents[-1]
# update avalon data on instance
# Update anatomy data if asset was not set on instance
if not instance_asset_doc:
instance.data["anatomyData"].update({
"hierarchy": "/".join(parents),
"hierarchy": hierarchy,
"task": {},
"parent": parent_name
})
@ -241,7 +314,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
hierarchy_context = context.data["hierarchyContext"]
active_assets = []
# filter only the active publishing insatnces
# filter only the active publishing instances
for instance in context:
if instance.data.get("publish") is False:
continue