mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/PYPE-662_datetime_data_publish
This commit is contained in:
commit
b3f506f364
5 changed files with 100 additions and 12 deletions
|
|
@ -99,6 +99,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
|
||||
# Filter event even more (skip task entities)
|
||||
# - task entities are not relevant for avalon
|
||||
entity_mapping = {}
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
if ftrack_id not in ftrack_ids:
|
||||
|
|
@ -107,6 +108,8 @@ class DeleteAssetSubset(BaseAction):
|
|||
if entity.entity_type.lower() == "task":
|
||||
ftrack_ids.remove(ftrack_id)
|
||||
|
||||
entity_mapping[ftrack_id] = entity
|
||||
|
||||
if not ftrack_ids:
|
||||
# It is bug if this happens!
|
||||
return {
|
||||
|
|
@ -122,11 +125,41 @@ class DeleteAssetSubset(BaseAction):
|
|||
project_name = project["full_name"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
selected_av_entities = self.dbcon.find({
|
||||
selected_av_entities = list(self.dbcon.find({
|
||||
"type": "asset",
|
||||
"data.ftrackId": {"$in": ftrack_ids}
|
||||
})
|
||||
selected_av_entities = [ent for ent in selected_av_entities]
|
||||
}))
|
||||
found_without_ftrack_id = {}
|
||||
if len(selected_av_entities) != len(ftrack_ids):
|
||||
found_ftrack_ids = [
|
||||
ent["data"]["ftrackId"] for ent in selected_av_entities
|
||||
]
|
||||
for ftrack_id, entity in entity_mapping.items():
|
||||
if ftrack_id in found_ftrack_ids:
|
||||
continue
|
||||
|
||||
av_ents_by_name = list(self.dbcon.find({
|
||||
"type": "asset",
|
||||
"name": entity["name"]
|
||||
}))
|
||||
if not av_ents_by_name:
|
||||
continue
|
||||
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items)-1:]
|
||||
# TODO we should say to user that
|
||||
# few of them are missing in avalon
|
||||
for av_ent in av_ents_by_name:
|
||||
if av_ent["data"]["parents"] != parents:
|
||||
continue
|
||||
|
||||
# TODO we should say to user that found entity
|
||||
# with same name does not match same ftrack id?
|
||||
if "ftrackId" not in av_ent["data"]:
|
||||
selected_av_entities.append(av_ent)
|
||||
found_without_ftrack_id[str(av_ent["_id"])] = ftrack_id
|
||||
break
|
||||
|
||||
if not selected_av_entities:
|
||||
return {
|
||||
"success": False,
|
||||
|
|
@ -155,7 +188,8 @@ class DeleteAssetSubset(BaseAction):
|
|||
"created_at": datetime.now(),
|
||||
"project_name": project_name,
|
||||
"subset_ids_by_name": {},
|
||||
"subset_ids_by_parent": {}
|
||||
"subset_ids_by_parent": {},
|
||||
"without_ftrack_id": found_without_ftrack_id
|
||||
}
|
||||
|
||||
id_item = {
|
||||
|
|
@ -413,14 +447,21 @@ class DeleteAssetSubset(BaseAction):
|
|||
asset_ids_to_archive = []
|
||||
ftrack_ids_to_delete = []
|
||||
if len(assets_to_delete) > 0:
|
||||
map_av_ftrack_id = spec_data["without_ftrack_id"]
|
||||
# Prepare data when deleting whole avalon asset
|
||||
avalon_assets = self.dbcon.find({"type": "asset"})
|
||||
avalon_assets_by_parent = collections.defaultdict(list)
|
||||
for asset in avalon_assets:
|
||||
asset_id = asset["_id"]
|
||||
parent_id = asset["data"]["visualParent"]
|
||||
avalon_assets_by_parent[parent_id].append(asset)
|
||||
if asset["_id"] in assets_to_delete:
|
||||
ftrack_id = asset["data"]["ftrackId"]
|
||||
if asset_id in assets_to_delete:
|
||||
ftrack_id = map_av_ftrack_id.get(str(asset_id))
|
||||
if not ftrack_id:
|
||||
ftrack_id = asset["data"].get("ftrackId")
|
||||
|
||||
if not ftrack_id:
|
||||
continue
|
||||
ftrack_ids_to_delete.append(ftrack_id)
|
||||
|
||||
children_queue = Queue()
|
||||
|
|
|
|||
|
|
@ -1445,7 +1445,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
.get("name", {})
|
||||
.get("new")
|
||||
)
|
||||
avalon_ent_by_name = self.avalon_ents_by_name.get(name)
|
||||
avalon_ent_by_name = self.avalon_ents_by_name.get(name) or {}
|
||||
avalon_ent_by_name_ftrack_id = (
|
||||
avalon_ent_by_name
|
||||
.get("data", {})
|
||||
|
|
|
|||
|
|
@ -78,6 +78,8 @@ class CollectTemplates(pyblish.api.InstancePlugin):
|
|||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
else:
|
||||
hierarchy = ""
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
|
|
|
|||
|
|
@ -111,8 +111,15 @@ class LoadMov(api.Loader):
|
|||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
file = self.fname.replace("\\", "/")
|
||||
log.info("file: {}\n".format(self.fname))
|
||||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
read_name = "Read_{0}_{1}_{2}".format(
|
||||
repr_cont["asset"],
|
||||
|
|
@ -200,7 +207,15 @@ class LoadMov(api.Loader):
|
|||
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
file = self.fname.replace("\\", "/")
|
||||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = representation["_id"]
|
||||
log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({
|
||||
|
|
@ -263,6 +278,19 @@ class LoadMov(api.Loader):
|
|||
if colorspace:
|
||||
node["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
node["colorspace"].setValue(str(preset_clrsp))
|
||||
|
||||
|
||||
updated_dict = {}
|
||||
updated_dict.update({
|
||||
"representation": str(representation["_id"]),
|
||||
|
|
|
|||
|
|
@ -107,7 +107,15 @@ class LoadSequence(api.Loader):
|
|||
first -= self.handle_start
|
||||
last += self.handle_end
|
||||
|
||||
file = self.fname.replace("\\", "/")
|
||||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
repr_cont = context["representation"]["context"]
|
||||
if "#" not in file:
|
||||
|
|
@ -229,7 +237,16 @@ class LoadSequence(api.Loader):
|
|||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
repr_cont = representation["context"]
|
||||
file = self.fname.replace("\\", "/")
|
||||
|
||||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = representation["_id"]
|
||||
log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
if "#" not in file:
|
||||
frame = repr_cont.get("frame")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue