From 186b29340f33b56b774f8adf066a4794126a49d0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 14:13:19 +0100 Subject: [PATCH 01/43] added master version implementation to outdated check --- pype/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 2235efa2f4..796fe4f11f 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -173,6 +173,8 @@ def is_latest(representation): """ version = io.find_one({"_id": representation['parent']}) + if version["type"] == "master_version": + return True # Get highest version under the parent highest_version = io.find_one({ From 6ff31dd9a232414b1537126b81caa669f4aea076 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 17:04:12 +0100 Subject: [PATCH 02/43] integrate_new also stores anatomy data to published_representations --- pype/plugins/global/publish/integrate_new.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index a2343ce8a9..18e492796a 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -255,6 +255,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if 'transfers' not in instance.data: instance.data['transfers'] = [] + published_representations = {} for idx, repre in enumerate(instance.data["representations"]): # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) @@ -448,6 +449,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list representations.append(representation) + published_representations[repre_id] = { + "representation": representation, + "anatomy_data": template_data + } self.log.debug("__ representations: {}".format(representations)) # Remove old representations if there are any (before insertion of new) @@ -462,7 +467,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) - instance.data["published_representations"] = representations + instance.data["published_representations"] = ( + published_representations + ) # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations))) From ceac303221fdc96d66d16e2137ed44dc9e384bbc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 17:04:50 +0100 Subject: [PATCH 03/43] integrate thumbnails do not raise error but log warnings --- pype/plugins/global/publish/integrate_thumbnail.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index b623fa9072..5361c8aadb 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -21,14 +21,16 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): def process(self, instance): if not os.environ.get("AVALON_THUMBNAIL_ROOT"): - self.log.info("AVALON_THUMBNAIL_ROOT is not set." - " Skipping thumbnail integration.") + self.log.warning( + "AVALON_THUMBNAIL_ROOT is not set." + " Skipping thumbnail integration." + ) return published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( - "There are not published representation ids on the instance." + "There are not published representations on the instance." ) return @@ -36,10 +38,11 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: - raise AssertionError("Anatomy does not have set publish key!") + self.warning("Anatomy does not have set publish key!") + return if "thumbnail" not in anatomy.templates["publish"]: - raise AssertionError(( + self.warning(( "There is not set \"thumbnail\" template for project \"{}\"" ).format(project_name)) From 20d6893e1dbaa19fcd9282ffccfc039896016222 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 17:05:48 +0100 Subject: [PATCH 04/43] integrate thumbnail uses new anatomy feature --- pype/plugins/global/publish/integrate_thumbnail.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 5361c8aadb..78929713da 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -92,15 +92,9 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): }) anatomy_filled = anatomy.format(template_data) - final_path = anatomy_filled.get("publish", {}).get("thumbnail") - if not final_path: - raise AssertionError(( - "Anatomy template was not filled with entered data" - "\nTemplate: {} " - "\nData: {}" - ).format(thumbnail_template, str(template_data))) + template_filled = anatomy_filled["publish"]["thumbnail"] - dst_full_path = os.path.normpath(final_path) + dst_full_path = os.path.normpath(str(template_filled)) self.log.debug( "Copying file .. {} -> {}".format(src_full_path, dst_full_path) ) @@ -118,13 +112,14 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): template_data.pop("_id") template_data.pop("thumbnail_root") + repre_context = template_filled.used_values thumbnail_entity = { "_id": thumbnail_id, "type": "thumbnail", "schema": "pype:thumbnail-1.0", "data": { "template": thumbnail_template, - "template_data": template_data + "template_data": repre_context } } # Create thumbnail entity From edf48c01491568f4291de59d47370200cca32ac2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 17:06:57 +0100 Subject: [PATCH 05/43] added required keys for anatomy data to thumbnail context --- .../global/publish/integrate_thumbnail.py | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 78929713da..75755ccb64 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -18,6 +18,10 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.01 families = ["review"] + required_context_keys = [ + "project", "asset", "task", "subset", "version" + ] + def process(self, instance): if not os.environ.get("AVALON_THUMBNAIL_ROOT"): @@ -45,10 +49,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): self.warning(( "There is not set \"thumbnail\" template for project \"{}\"" ).format(project_name)) - - thumbnail_template = anatomy.templates["publish"]["thumbnail"] - - io.install() + return thumb_repre = None for repre in published_repres: @@ -62,6 +63,10 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return + io.install() + + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + version = io.find_one({"_id": thumb_repre["parent"]}) if not version: raise AssertionError( @@ -83,7 +88,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): thumbnail_id = ObjectId() # Prepare anatomy template fill data - template_data = copy.deepcopy(thumb_repre["context"]) + template_data = copy.deepcopy(thumb_repre_anatomy_data) template_data.update({ "_id": str(thumbnail_id), "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), @@ -113,6 +118,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): template_data.pop("thumbnail_root") repre_context = template_filled.used_values + for key in self.required_context_keys: + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] + thumbnail_entity = { "_id": thumbnail_id, "type": "thumbnail", From 4a3bf303d4170dceba4331ae9fdfe070f0ad5436 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 17:07:18 +0100 Subject: [PATCH 06/43] integrate thumbnails use new structure of published representations --- pype/plugins/global/publish/integrate_thumbnail.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 75755ccb64..0bb34eab58 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -52,9 +52,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): return thumb_repre = None - for repre in published_repres: + thumb_repre_anatomy_data = None + for repre_info in published_repres.values(): + repre = repre_info["representation"] if repre["name"].lower() == "thumbnail": thumb_repre = repre + thumb_repre_anatomy_data = repre_info["anatomy_data"] break if not thumb_repre: From a3e847bc21ad6498c7f10494115e765ff1e48214 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 18:29:05 +0100 Subject: [PATCH 07/43] store more information into published repres --- pype/plugins/global/publish/integrate_new.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 18e492796a..fe2bcbff33 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -234,6 +234,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): bulk_writes ) + version = io.find_one({"_id": version_id}) + existing_repres = list(io.find({ "parent": version_id, "type": "archived_representation" @@ -451,7 +453,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): representations.append(representation) published_representations[repre_id] = { "representation": representation, - "anatomy_data": template_data + "anatomy_data": template_data, + # TODO prabably should store subset and version to instance + "subset_entity": subset, + "version_entity": version } self.log.debug("__ representations: {}".format(representations)) From 2abe39ef9d8f6e75aa92a3d145f909aeba4d8c16 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 18:29:19 +0100 Subject: [PATCH 08/43] initial commit for instegrate master version --- .../publish/integrate_master_version.py | 129 ++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 pype/plugins/global/publish/integrate_master_version.py diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py new file mode 100644 index 0000000000..efd01dd07c --- /dev/null +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -0,0 +1,129 @@ +import os +import logging +import shutil + +import errno +import pyblish.api +from avalon import api, io +from avalon.vendor import filelink + + +log = logging.getLogger(__name__) + + +class IntegrateMasterVersion(pyblish.api.InstancePlugin): + label = "Integrate Master Version" + # Must happen after IntegrateNew + order = pyblish.api.IntegratorOrder + 0.1 + + ignored_representation_names = [] + + def process(self, instance): + published_repres = instance.data.get("published_representations") + if not published_repres: + self.log.debug( + "There are not published representations on the instance." + ) + return + + project_name = api.Session["AVALON_PROJECT"] + + # TODO raise error if master not set? + anatomy = instance.context.data["anatomy"] + if "publish" not in anatomy.templates: + self.warning("Anatomy does not have set publish key!") + return + + if "master" not in anatomy.templates["publish"]: + self.warning(( + "There is not set \"master\" template for project \"{}\"" + ).format(project_name)) + return + + version_entity = None + + filtered_repre_ids = [] + for repre_id, repre_info in published_repres.items(): + repre = repre_info["representation"] + if version_entity is None: + version_entity = repre_info.get("version_entity") + + if repre["name"].lower() in self.ignored_representation_names: + filtered_repre_ids.append(repre_id) + + for repre_id in filtered_repre_ids: + published_repres.pop(repre_id, None) + + if not published_repres: + self.log.debug( + "All published representations were filtered by name." + ) + return + + if version_entity is None: + version_entity = ( + self.version_from_representations(published_repres) + ) + + if not version_entity: + self.log.warning("Can't find origin version in database.") + return + + cur_master_version, cur_master_repres = ( + self.current_master_ents(version_entity) + ) + + cur_master_repres_by_name = { + repre["name"].lower(): repre for repre in cur_master_repres + } + + if cur_master_version: + cur_master_version_id = cur_master_version["_id"] + else: + cur_master_version_id = io.ObjectId() + + new_master_version = { + "_id": cur_master_version_id, + "version_id": version_entity["_id"], + "parent": version_entity["parent"], + "type": "master_version", + "schema": "pype:master_version-1.0" + } + + repres_to_replace = {} + for repre_id, repre_info in published_repres.items(): + repre = repre_info["representation"] + repre_name_low = repre["name"].lower() + if repre_name_low in cur_master_repres_by_name: + repres_to_replace[repre_id] = ( + cur_master_repres_by_name.pop(repre_name_low) + ) + + if cur_master_version: + io.replace_one( + {"_id": new_master_version["_id"]}, + new_master_version + ) + else: + io.insert_one(new_master_version) + + def version_from_representations(self, repres): + for repre in repres: + version = io.find_one({"_id": repre["parent"]}) + if version: + return version + + def current_master_ents(self, version): + master_version = io.find_one({ + "parent": version["parent"], + "type": "master_version" + }) + + if not master_version: + return (None, []) + + master_repres = list(io.find({ + "parent": master_version["_id"], + "type": "representation" + })) + return (master_version, master_repres) From c06a4c337beb85df6e3a2bb18538ccb8a36c3f35 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 20 Feb 2020 18:59:30 +0100 Subject: [PATCH 09/43] initial master version schema --- schema/master_version-1.0.json | 41 ++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 schema/master_version-1.0.json diff --git a/schema/master_version-1.0.json b/schema/master_version-1.0.json new file mode 100644 index 0000000000..173a076537 --- /dev/null +++ b/schema/master_version-1.0.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:master_version-1.0", + "description": "Master version of asset", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent" + ], + + "properties": { + "_id": { + "description": "Document's id (database will create it's if not entered)", + "type": "ObjectId", + "example": "592c33475f8c1b064c4d1696" + }, + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["avalon-core:master_version-3.0", "pype:master_version-3.0"], + "example": "pype:master_version-3.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["master_version"], + "example": "master_version" + }, + "parent": { + "description": "Unique identifier to parent document", + "type": "ObjectId", + "example": "592c33475f8c1b064c4d1696" + } + } +} From 34438fcc42a47076ae7bc089eebb99fa02c081f0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Feb 2020 19:17:29 +0100 Subject: [PATCH 10/43] seems to look like it may work once --- .../publish/integrate_master_version.py | 260 ++++++++++++++++-- 1 file changed, 231 insertions(+), 29 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index efd01dd07c..6991978a24 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -1,10 +1,10 @@ import os +import copy import logging -import shutil -import errno +from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io +from avalon import api, io, pipeline from avalon.vendor import filelink @@ -40,13 +40,15 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ).format(project_name)) return - version_entity = None + master_template = anatomy.templates["publish"]["master"] + + src_version_entity = None filtered_repre_ids = [] for repre_id, repre_info in published_repres.items(): repre = repre_info["representation"] - if version_entity is None: - version_entity = repre_info.get("version_entity") + if src_version_entity is None: + src_version_entity = repre_info.get("version_entity") if repre["name"].lower() in self.ignored_representation_names: filtered_repre_ids.append(repre_id) @@ -60,52 +62,252 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ) return - if version_entity is None: - version_entity = ( + if src_version_entity is None: + src_version_entity = ( self.version_from_representations(published_repres) ) - if not version_entity: + if not src_version_entity: self.log.warning("Can't find origin version in database.") return - cur_master_version, cur_master_repres = ( - self.current_master_ents(version_entity) + old_version, old_repres = ( + self.current_master_ents(src_version_entity) ) - cur_master_repres_by_name = { - repre["name"].lower(): repre for repre in cur_master_repres + old_repres_by_name = { + repre["name"].lower(): repre for repre in old_repres } - if cur_master_version: - cur_master_version_id = cur_master_version["_id"] + if old_version: + new_version_id = old_version["_id"] else: - cur_master_version_id = io.ObjectId() + new_version_id = io.ObjectId() new_master_version = { - "_id": cur_master_version_id, - "version_id": version_entity["_id"], - "parent": version_entity["parent"], + "_id": new_version_id, + "version_id": src_version_entity["_id"], + "parent": src_version_entity["parent"], "type": "master_version", "schema": "pype:master_version-1.0" } - repres_to_replace = {} + bulk_writes = [] + + if old_version: + bulk_writes.append( + ReplaceOne( + {"_id": new_master_version["_id"]}, + new_master_version + ) + ) + else: + bulk_writes.append( + InsertOne(new_master_version) + ) + + # Separate old representations into `to replace` and `to delete` + old_repres_to_replace = {} + old_repres_to_delete = {} for repre_id, repre_info in published_repres.items(): repre = repre_info["representation"] repre_name_low = repre["name"].lower() - if repre_name_low in cur_master_repres_by_name: - repres_to_replace[repre_id] = ( - cur_master_repres_by_name.pop(repre_name_low) + if repre_name_low in old_repres_by_name: + old_repres_to_replace[repre_name_low] = ( + old_repres_by_name.pop(repre_name_low) + ) + else: + old_repres_to_delete[repre_name_low] = ( + old_repres_by_name.pop(repre_name_low) ) - if cur_master_version: - io.replace_one( - {"_id": new_master_version["_id"]}, - new_master_version + archived_repres = list(io.find({ + # Check what is type of archived representation + "type": "archived_repsentation", + "parent": new_version_id + })) + archived_repres_by_name = {} + for repre in archived_repres: + repre_name_low = repre["name"].lower() + archived_repres_by_name[repre_name_low] = repre + + self.delete_repre_files(old_repres) + + for repre_id, repre_info in published_repres.items(): + repre = copy.deepcopy(repre_info["representation"]) + repre_name_low = repre["name"].lower() + + repre["parent"] = new_master_version["_id"] + # TODO change repre data and context (new anatomy) + # TODO hardlink files + + # Replace current representation + if repre_name_low in old_repres_to_replace: + old_repre = old_repres_to_replace.pop(repre_name_low) + repre["_id"] = old_repre["_id"] + bulk_writes.append( + ReplaceOne( + {"_id": old_repre["_id"]}, + repre + ) + ) + + # Unarchive representation + elif repre_name_low in archived_repres_by_name: + archived_repre = archived_repres_by_name.pop(repre_name_low) + old_id = archived_repre["old_id"] + repre["_id"] = old_id + bulk_writes.append( + ReplaceOne( + {"old_id": old_id}, + repre + ) + ) + + # Create representation + else: + repre["_id"] = io.ObjectId() + bulk_writes.append( + InsertOne(repre) + ) + + # Archive not replaced old representations + for repre_name_low, repre in old_repres_to_delete.items(): + # TODO delete their files + + # Replace archived representation (This is backup) + # - should not happen to have both repre and archived repre + if repre_name_low in archived_repres_by_name: + archived_repre = archived_repres_by_name.pop(repre_name_low) + repre["old_id"] = repre["_id"] + repre["_id"] = archived_repre["_id"] + repre["type"] = archived_repre["type"] + bulk_writes.append( + ReplaceOne( + {"_id": archived_repre["_id"]}, + repre + ) + ) + + else: + repre["old_id"] = repre["_id"] + repre["_id"] = io.ObjectId() + repre["type"] = "archived_representation" + bulk_writes.append( + InsertOne(repre) + ) + + if bulk_writes: + pass + + def delete_repre_files(self, repres): + if not repres: + return + + frame_splitter = "_-_FRAME_-_" + files_to_delete = [] + for repre in repres: + is_sequence = False + if "frame" in repre["context"]: + repre["context"]["frame"] = frame_splitter + is_sequence = True + + template = repre["data"]["template"] + context = repre["context"] + context["root"] = api.registered_root() + path = pipeline.format_template_with_optional_keys( + context, template ) - else: - io.insert_one(new_master_version) + path = os.path.normpath(path) + if not is_sequence: + if os.path.exists(path): + files_to_delete.append(path) + continue + + dirpath = os.path.dirname(path) + file_start = None + file_end = None + file_items = path.split(frame_splitter) + if len(file_items) == 0: + continue + elif len(file_items) == 1: + if path.startswith(frame_splitter): + file_end = file_items[0] + else: + file_start = file_items[1] + + elif len(file_items) == 2: + file_start, file_end = file_items + + else: + raise ValueError(( + "Representation template has `frame` key " + "more than once inside." + )) + + for file_name in os.listdir(dirpath): + check_name = str(file_name) + if file_start and not check_name.startswith(file_start): + continue + check_name.replace(file_start, "") + + if file_end and not check_name.endswith(file_end): + continue + check_name.replace(file_end, "") + + # File does not have frame + if not check_name: + continue + + files_to_delete.append(os.path.join(dirpath, file_name)) + + renamed_files = [] + failed = False + for file_path in files_to_delete: + # TODO too robust for testing - should be easier in future + _rename_path = file_path + ".BACKUP" + rename_path = None + max_index = 200 + cur_index = 1 + while True: + if max_index >= cur_index: + raise Exception(( + "Max while loop index reached! Can't make backup" + " for previous master version." + )) + break + + if not os.path.exists(_rename_path): + rename_path = _rename_path + break + + try: + os.remove(_rename_path) + except Exception: + _rename_path = file_path + ".BACKUP{}".format( + str(cur_index) + ) + cur_index += 1 + + try: + args = (file_path, rename_path) + os.rename(*args) + renamed_files.append(args) + except Exception: + failed = True + break + + if failed: + for dst_name, src_name in renamed_files: + os.rename(src_name, dst_name) + + raise AssertionError(( + "Could not create master version because it is not possible" + " to replace current master files." + )) + + for _, renamed_path in renamed_files: + os.remove(renamed_path) def version_from_representations(self, repres): for repre in repres: From 36c35dbf8433be8600eb9041c0d9e4d9c2fc8953 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 14:06:41 +0100 Subject: [PATCH 11/43] store all published files per representation --- pype/plugins/global/publish/integrate_new.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index fe2bcbff33..8ef027bb93 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -259,6 +259,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): published_representations = {} for idx, repre in enumerate(instance.data["representations"]): + published_files = [] + # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) if intent is not None: @@ -364,16 +366,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) + published_files.append(dst) + # for adding first frame into db if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( - dst_head, - dst_start_frame, - dst_tail).replace("..", ".") - repre['published_path'] = self.unc_convert(dst) - else: # Single file # _______ @@ -402,9 +400,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) - repre['published_path'] = self.unc_convert(dst) + published_files.append(dst) + self.log.debug("__ dst: {}".format(dst)) + repre["publishedFiles"] = published_files + for key in self.db_representation_context_keys: value = template_data.get(key) if not value: @@ -454,6 +455,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): published_representations[repre_id] = { "representation": representation, "anatomy_data": template_data, + "published_files": published_files, # TODO prabably should store subset and version to instance "subset_entity": subset, "version_entity": version @@ -470,7 +472,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) - self.log.debug("__ represPATH: {}".format(rep['published_path'])) + self.log.debug("__ represPATH:\n{}".format( + ",\n".join(rep['publishedFiles']) + )) io.insert_many(representations) instance.data["published_representations"] = ( published_representations From 2a128b0956f23a03cc844b8d8e7fbf379c7ac7bd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 14:06:58 +0100 Subject: [PATCH 12/43] added first version of file mapping --- .../publish/integrate_master_version.py | 67 +++++++++++++++++-- 1 file changed, 60 insertions(+), 7 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 6991978a24..a93226ae18 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -1,6 +1,7 @@ import os import copy import logging +import clique from pymongo import InsertOne, ReplaceOne import pyblish.api @@ -40,8 +41,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ).format(project_name)) return - master_template = anatomy.templates["publish"]["master"] - src_version_entity = None filtered_repre_ids = [] @@ -133,13 +132,21 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): self.delete_repre_files(old_repres) - for repre_id, repre_info in published_repres.items(): - repre = copy.deepcopy(repre_info["representation"]) - repre_name_low = repre["name"].lower() + master_template = anatomy.templates["publish"]["master"] + src_to_dst_file_paths = [] + for repre_id, repre_info in published_repres.items(): + + # Skip if new repre does not have published repre files + published_files = repre_info["published_files"] + if len(published_files) == 0: + continue + + # Prepare new repre + repre = copy.deepcopy(repre_info["representation"]) repre["parent"] = new_master_version["_id"] - # TODO change repre data and context (new anatomy) - # TODO hardlink files + + repre_name_low = repre["name"].lower() # Replace current representation if repre_name_low in old_repres_to_replace: @@ -171,6 +178,52 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): InsertOne(repre) ) + # TODO change repre data and context (new anatomy) + # TODO hardlink files + + # Prepare anatomy data + anatomy_data = repre_info["anatomy_data"] + anatomy_data.pop("version", None) + + if len(published_files) == 1: + anatomy_filled = anatomy.format(anatomy_data) + template_filled = anatomy_filled["publish"]["master"] + src_to_dst_file_paths.append( + (published_files[0], template_filled) + ) + continue + + collections, remainders = clique.assemble(published_files) + if remainders or not collections or len(collections) > 1: + raise Exception(( + "Integrity error. Files of published representation" + " is combination of frame collections and single files." + )) + + src_col = collections[0] + + # Get filled path to repre context + anatomy_filled = anatomy.format(anatomy_data) + template_filled = anatomy_filled["publish"]["master"] + + # Get head and tail for collection + frame_splitter = "_-_FRAME_SPLIT_-_" + anatomy_data["frame"] = frame_splitter + _anatomy_filled = anatomy.format(anatomy_data) + _template_filled = _anatomy_filled["publish"]["master"] + head, tail = _template_filled.split(frame_splitter) + padding = ( + anatomy.templates["render"]["padding"] + ) + + dst_col = clique.Collection(head=head, padding=padding, tail=tail) + dst_col.indexes.clear() + dst_col.indexes.update(src_col.indexes) + for src_file, dst_file in zip(src_col, dst_col): + src_to_dst_file_paths.append( + (src_file, dst_file) + ) + # Archive not replaced old representations for repre_name_low, repre in old_repres_to_delete.items(): # TODO delete their files From 1a3463c78c5f10f013766991142bbd94708077d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 16:40:42 +0100 Subject: [PATCH 13/43] representation context and data are replaced with new data --- .../publish/integrate_master_version.py | 42 +++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index a93226ae18..b508404d77 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -2,6 +2,7 @@ import os import copy import logging import clique +import errno from pymongo import InsertOne, ReplaceOne import pyblish.api @@ -18,6 +19,10 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.1 ignored_representation_names = [] + db_representation_context_keys = [ + "project", "asset", "task", "subset", "representation", + "family", "hierarchy", "task", "username" + ] def process(self, instance): published_repres = instance.data.get("published_representations") @@ -142,9 +147,34 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): if len(published_files) == 0: continue + # Prepare anatomy data + anatomy_data = repre_info["anatomy_data"] + anatomy_data.pop("version", None) + + # Get filled path to repre context + anatomy_filled = anatomy.format(anatomy_data) + template_filled = anatomy_filled["publish"]["master"] + + repre_data = { + "path": str(template_filled), + "template": master_template + } + repre_context = template_filled.used_values + for key in self.db_representation_context_keys: + if ( + key in repre_context or + key not in anatomy_data + ): + continue + + repre_context[key] = anatomy_data[key] + + # TODO change repre data and context (new anatomy) # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) repre["parent"] = new_master_version["_id"] + repre["context"] = repre_context + repre["data"] = repre_data repre_name_low = repre["name"].lower() @@ -178,16 +208,8 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): InsertOne(repre) ) - # TODO change repre data and context (new anatomy) # TODO hardlink files - - # Prepare anatomy data - anatomy_data = repre_info["anatomy_data"] - anatomy_data.pop("version", None) - if len(published_files) == 1: - anatomy_filled = anatomy.format(anatomy_data) - template_filled = anatomy_filled["publish"]["master"] src_to_dst_file_paths.append( (published_files[0], template_filled) ) @@ -202,10 +224,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): src_col = collections[0] - # Get filled path to repre context - anatomy_filled = anatomy.format(anatomy_data) - template_filled = anatomy_filled["publish"]["master"] - # Get head and tail for collection frame_splitter = "_-_FRAME_SPLIT_-_" anatomy_data["frame"] = frame_splitter From 84dceb42afda2753a74f9e86b5b1aa10aa748b0a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 16:41:04 +0100 Subject: [PATCH 14/43] added reate hardlink and path root checker --- .../publish/integrate_master_version.py | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index b508404d77..1ec0bd00dd 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -242,6 +242,10 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): (src_file, dst_file) ) + # TODO should we *only* create hardlinks? + for src_path, dst_path in src_to_dst_file_paths: + self.create_hardlink(src_path, dst_path) + # Archive not replaced old representations for repre_name_low, repre in old_repres_to_delete.items(): # TODO delete their files @@ -271,6 +275,83 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): if bulk_writes: pass + def create_hardlink(self, src_path, dst_path): + dst_path = self.path_root_check(dst_path) + src_path = self.path_root_check(src_path) + + dirname = os.path.dirname(dst_path) + + try: + os.makedirs(dirname) + except OSError as exc: + if exc.errno != errno.EEXIST: + self.log.error("An unexpected error occurred.", exc_info=True) + raise + + filelink.create(src_path, dst_path, filelink.HARDLINK) + + def path_root_check(self, path): + normalized_path = os.path.normpath(path) + forward_slash_path = normalized_path.replace("\\", "/") + + drive, _path = os.path.splitdrive(normalized_path) + if os.path.exists(drive + "/"): + self.log.debug( + "Drive \"{}\" exist. Nothing to change.".format(drive) + ) + return normalized_path + + path_env_key = "PYPE_STUDIO_PROJECTS_PATH" + mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT" + missing_envs = [] + if path_env_key not in os.environ: + missing_envs.append(path_env_key) + + if mount_env_key not in os.environ: + missing_envs.append(mount_env_key) + + if missing_envs: + _add_s = "" + if len(missing_envs) > 1: + _add_s = "s" + + self.log.warning(( + "Can't replace MOUNT drive path to UNC path due to missing" + " environment variable{}: `{}`. This may cause issues during" + " publishing process." + ).format(_add_s, ", ".join(missing_envs))) + + return normalized_path + + unc_root = os.environ[path_env_key].replace("\\", "/") + mount_root = os.environ[mount_env_key].replace("\\", "/") + + # --- Remove slashes at the end of mount and unc roots --- + while unc_root.endswith("/"): + unc_root = unc_root[:-1] + + while mount_root.endswith("/"): + mount_root = mount_root[:-1] + # --- + + if forward_slash_path.startswith(unc_root): + self.log.debug(( + "Path already starts with UNC root: \"{}\"" + ).format(unc_root)) + return normalized_path + + if not forward_slash_path.startswith(mount_root): + self.log.warning(( + "Path do not start with MOUNT root \"{}\" " + "set in environment variable \"{}\"" + ).format(unc_root, mount_env_key)) + return normalized_path + + # Replace Mount root with Unc root + path = unc_root + forward_slash_path[len(mount_root):] + + return os.path.normpath(path) + def delete_repre_files(self, repres): if not repres: return From 12e95ef32c883f44f0b1136e3023065a97b469dc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 17:24:40 +0100 Subject: [PATCH 15/43] publihsed_path moved back due to integrity errors connected with removing --- pype/plugins/global/publish/integrate_new.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 779a498451..f8cde10aed 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -373,6 +373,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding + dst = "{0}{1}{2}".format( + dst_head, + dst_start_frame, + dst_tail + ).replace("..", ".") + repre['published_path'] = self.unc_convert(dst) + else: # Single file # _______ @@ -402,7 +409,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) published_files.append(dst) - + repre['published_path'] = self.unc_convert(dst) self.log.debug("__ dst: {}".format(dst)) repre["publishedFiles"] = published_files @@ -473,9 +480,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) - self.log.debug("__ represPATH:\n{}".format( - ",\n".join(rep['publishedFiles']) - )) + self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) instance.data["published_representations"] = ( published_representations From 22e8c301467f3f35e1d39daed855b646602c8633 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 17:26:17 +0100 Subject: [PATCH 16/43] fixed old repres to delete variable --- pype/plugins/global/publish/integrate_master_version.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 1ec0bd00dd..3df74f4e28 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -120,10 +120,9 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): old_repres_to_replace[repre_name_low] = ( old_repres_by_name.pop(repre_name_low) ) - else: - old_repres_to_delete[repre_name_low] = ( - old_repres_by_name.pop(repre_name_low) - ) + + if old_repres_by_name: + old_repres_to_delete = old_repres_by_name archived_repres = list(io.find({ # Check what is type of archived representation From 50bff7fcc0b968c9113d3980e6b656b57d4c32f1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 17:26:45 +0100 Subject: [PATCH 17/43] bulk is actually written to database --- pype/plugins/global/publish/integrate_master_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 3df74f4e28..ea97f3d779 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -272,7 +272,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - pass + io._database[io.Session["AVALON_PROJECT"]].bulk_write(bulk_writes) def create_hardlink(self, src_path, dst_path): dst_path = self.path_root_check(dst_path) From 1c098196e69496aa8237b06ebcd36fe34d4db74b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 17:46:05 +0100 Subject: [PATCH 18/43] added few debug logs --- .../publish/integrate_master_version.py | 37 +++++++++++++++++-- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index ea97f3d779..390c86afce 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -25,6 +25,11 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ] def process(self, instance): + self.log.debug( + "Integrate of Master version for subset `{}` begins.".format( + instance.data.get("subset", str(instance)) + ) + ) published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( @@ -37,15 +42,21 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # TODO raise error if master not set? anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: - self.warning("Anatomy does not have set publish key!") + self.log.warning("Anatomy does not have set publish key!") return if "master" not in anatomy.templates["publish"]: - self.warning(( + self.log.warning(( "There is not set \"master\" template for project \"{}\"" ).format(project_name)) return + master_template = anatomy.templates["publish"]["master"] + + self.log.debug("`Master` template check was successful. `{}`".format( + master_template + )) + src_version_entity = None filtered_repre_ids = [] @@ -55,6 +66,11 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): src_version_entity = repre_info.get("version_entity") if repre["name"].lower() in self.ignored_representation_names: + self.log.debug( + "Filtering representation with name: `{}`".format( + repre["name"].lower() + ) + ) filtered_repre_ids.append(repre_id) for repre_id in filtered_repre_ids: @@ -67,12 +83,19 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): return if src_version_entity is None: + self.log.debug(( + "Published version entity was not sent in representation data." + " Querying entity from database." + )) src_version_entity = ( self.version_from_representations(published_repres) ) if not src_version_entity: - self.log.warning("Can't find origin version in database.") + self.log.warning(( + "Can't find origin version in database." + " Skipping Master version publish." + )) return old_version, old_repres = ( @@ -99,6 +122,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): bulk_writes = [] if old_version: + self.log.debug("Replacing old master version.") bulk_writes.append( ReplaceOne( {"_id": new_master_version["_id"]}, @@ -106,6 +130,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ) ) else: + self.log.debug("Creating first master version.") bulk_writes.append( InsertOne(new_master_version) ) @@ -282,11 +307,17 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): try: os.makedirs(dirname) + self.log.debug("Folder created: \"{}\"".format(dirname)) except OSError as exc: if exc.errno != errno.EEXIST: self.log.error("An unexpected error occurred.", exc_info=True) raise + self.log.debug("Folder already exists: \"{}\"".format(dirname)) + + self.log.debug("Copying file \"{}\" to \"{}\"".format( + src_path, dst_path + )) filelink.create(src_path, dst_path, filelink.HARDLINK) def path_root_check(self, path): From 03b252556de70a121a273b5acc0474b33c035327 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:19:12 +0100 Subject: [PATCH 19/43] keep only one master_template variable --- pype/plugins/global/publish/integrate_master_version.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 390c86afce..dc15ff2d8d 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -161,8 +161,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): self.delete_repre_files(old_repres) - master_template = anatomy.templates["publish"]["master"] - src_to_dst_file_paths = [] for repre_id, repre_info in published_repres.items(): @@ -193,7 +191,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): repre_context[key] = anatomy_data[key] - # TODO change repre data and context (new anatomy) # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) repre["parent"] = new_master_version["_id"] @@ -232,7 +229,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): InsertOne(repre) ) - # TODO hardlink files + # Prepare paths of source and destination files if len(published_files) == 1: src_to_dst_file_paths.append( (published_files[0], template_filled) From 236da4f8849042b65d0192e7f0749638891b3870 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:21:54 +0100 Subject: [PATCH 20/43] fixed backup file handling --- .../publish/integrate_master_version.py | 45 +++++++++++++++---- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index dc15ff2d8d..d98767cbfd 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -263,7 +263,9 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): (src_file, dst_file) ) + # Copy(hardlink) paths of source and destination files # TODO should we *only* create hardlinks? + # TODO less logs about drives for src_path, dst_path in src_to_dst_file_paths: self.create_hardlink(src_path, dst_path) @@ -443,17 +445,24 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): renamed_files = [] failed = False for file_path in files_to_delete: - # TODO too robust for testing - should be easier in future - _rename_path = file_path + ".BACKUP" - rename_path = None - max_index = 200 - cur_index = 1 - while True: - if max_index >= cur_index: - raise Exception(( + self.log.debug( + "Preparing file for deletion: `{}`".format(file_path) + ) + rename_path = file_path + ".BACKUP" + + max_index = 10 + cur_index = 0 + _rename_path = None + while os.path.exists(rename_path): + if _rename_path is None: + _rename_path = rename_path + + if cur_index >= max_index: + self.log.warning(( "Max while loop index reached! Can't make backup" " for previous master version." )) + failed = True break if not os.path.exists(_rename_path): @@ -462,21 +471,41 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): try: os.remove(_rename_path) + self.log.debug( + "Deleted old backup file: \"{}\"".format(_rename_path) + ) except Exception: + self.log.warning( + "Could not delete old backup file \"{}\".".format( + _rename_path + ), + exc_info=True + ) _rename_path = file_path + ".BACKUP{}".format( str(cur_index) ) cur_index += 1 + # Skip if any already failed + if failed: + break + try: args = (file_path, rename_path) os.rename(*args) renamed_files.append(args) except Exception: + self.log.warning( + "Could not rename file `{}` to `{}`".format( + file_path, rename_path + ), + exc_info=True + ) failed = True break if failed: + # Rename back old renamed files for dst_name, src_name in renamed_files: os.rename(src_name, dst_name) From 390acf4eb394496486b7127b1fa9e75d01fececc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:24:42 +0100 Subject: [PATCH 21/43] addde important TODO --- pype/plugins/global/publish/integrate_master_version.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index d98767cbfd..be6602ac13 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -317,6 +317,8 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): self.log.debug("Copying file \"{}\" to \"{}\"".format( src_path, dst_path )) + # TODO check if file exists!!! + # - uncomplete publish may cause that file already exists filelink.create(src_path, dst_path, filelink.HARDLINK) def path_root_check(self, path): From ae1b102f4756545ee670972e059c2db96603d712 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:39:52 +0100 Subject: [PATCH 22/43] begin and ending logs have 3 symbol start --- .../publish/integrate_master_version.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index be6602ac13..de2cedc2d7 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -26,14 +26,14 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug( - "Integrate of Master version for subset `{}` begins.".format( + "--- Integration of Master version for subset `{}` begins.".format( instance.data.get("subset", str(instance)) ) ) published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( - "There are not published representations on the instance." + "*** There are not published representations on the instance." ) return @@ -42,12 +42,12 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # TODO raise error if master not set? anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: - self.log.warning("Anatomy does not have set publish key!") + self.log.warning("!!! Anatomy does not have set publish key!") return if "master" not in anatomy.templates["publish"]: self.log.warning(( - "There is not set \"master\" template for project \"{}\"" + "!!! There is not set \"master\" template for project \"{}\"" ).format(project_name)) return @@ -78,7 +78,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): if not published_repres: self.log.debug( - "All published representations were filtered by name." + "*** All published representations were filtered by name." ) return @@ -93,7 +93,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): if not src_version_entity: self.log.warning(( - "Can't find origin version in database." + "!!! Can't find origin version in database." " Skipping Master version publish." )) return @@ -241,7 +241,8 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): raise Exception(( "Integrity error. Files of published representation" " is combination of frame collections and single files." - )) + "Collections: `{}` Single files: `{}`" + ).format(str(collections), str(remainders))) src_col = collections[0] @@ -266,13 +267,12 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # Copy(hardlink) paths of source and destination files # TODO should we *only* create hardlinks? # TODO less logs about drives + # TODO should we keep files for deletion until this is successful? for src_path, dst_path in src_to_dst_file_paths: self.create_hardlink(src_path, dst_path) # Archive not replaced old representations for repre_name_low, repre in old_repres_to_delete.items(): - # TODO delete their files - # Replace archived representation (This is backup) # - should not happen to have both repre and archived repre if repre_name_low in archived_repres_by_name: @@ -298,6 +298,12 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): if bulk_writes: io._database[io.Session["AVALON_PROJECT"]].bulk_write(bulk_writes) + self.log.debug(( + "--- End of Master version integration for subset `{}`." + ).format( + instance.data.get("subset", str(instance)) + )) + def create_hardlink(self, src_path, dst_path): dst_path = self.path_root_check(dst_path) src_path = self.path_root_check(src_path) From e5108a6e37e66fa3ffcf78df7a4872b1a0f517cd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:40:40 +0100 Subject: [PATCH 23/43] reduced logs about drive remapping --- .../publish/integrate_master_version.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index de2cedc2d7..a32c94b43e 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -264,9 +264,10 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): (src_file, dst_file) ) + self.path_checks = [] + # Copy(hardlink) paths of source and destination files # TODO should we *only* create hardlinks? - # TODO less logs about drives # TODO should we keep files for deletion until this is successful? for src_path, dst_path in src_to_dst_file_paths: self.create_hardlink(src_path, dst_path) @@ -333,9 +334,13 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): drive, _path = os.path.splitdrive(normalized_path) if os.path.exists(drive + "/"): - self.log.debug( - "Drive \"{}\" exist. Nothing to change.".format(drive) - ) + key = "drive_check{}".format(drive) + if key not in self.path_checks: + self.log.debug( + "Drive \"{}\" exist. Nothing to change.".format(drive) + ) + self.path_checks.append(key) + return normalized_path path_env_key = "PYPE_STUDIO_PROJECTS_PATH" @@ -348,15 +353,18 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): missing_envs.append(mount_env_key) if missing_envs: - _add_s = "" - if len(missing_envs) > 1: - _add_s = "s" + key = "missing_envs" + if key not in self.path_checks: + self.path_checks.append(key) + _add_s = "" + if len(missing_envs) > 1: + _add_s = "s" - self.log.warning(( - "Can't replace MOUNT drive path to UNC path due to missing" - " environment variable{}: `{}`. This may cause issues during" - " publishing process." - ).format(_add_s, ", ".join(missing_envs))) + self.log.warning(( + "Can't replace MOUNT drive path to UNC path due to missing" + " environment variable{}: `{}`. This may cause issues" + " during publishing process." + ).format(_add_s, ", ".join(missing_envs))) return normalized_path From 58ca4399a1db3264b1af054a80c2d2a96a44c5ce Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Feb 2020 18:45:24 +0100 Subject: [PATCH 24/43] removed unused log --- pype/plugins/global/publish/integrate_master_version.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index a32c94b43e..f767a312d6 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -10,20 +10,21 @@ from avalon import api, io, pipeline from avalon.vendor import filelink -log = logging.getLogger(__name__) - - class IntegrateMasterVersion(pyblish.api.InstancePlugin): label = "Integrate Master Version" # Must happen after IntegrateNew order = pyblish.api.IntegratorOrder + 0.1 + # Can specify representation names that will be ignored (lower case) ignored_representation_names = [] db_representation_context_keys = [ "project", "asset", "task", "subset", "representation", "family", "hierarchy", "task", "username" ] - + # TODO add family filtering + # QUESTION/TODO this process should happen on server if crashed due to + # permissions error on files (files were used or user didn't have perms) + # *but all other plugins must be sucessfully completed def process(self, instance): self.log.debug( "--- Integration of Master version for subset `{}` begins.".format( From 4e833a4f44153988fe90e342c15d72873def3b89 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 25 Feb 2020 14:05:45 +0100 Subject: [PATCH 25/43] master version do not rename each file but whole pusblish folder, also is used master.path anatomy instead of publish.master --- .../publish/integrate_master_version.py | 551 +++++++++--------- 1 file changed, 283 insertions(+), 268 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index f767a312d6..42c93db7e9 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -1,8 +1,8 @@ import os import copy -import logging import clique import errno +import shutil from pymongo import InsertOne, ReplaceOne import pyblish.api @@ -25,6 +25,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # QUESTION/TODO this process should happen on server if crashed due to # permissions error on files (files were used or user didn't have perms) # *but all other plugins must be sucessfully completed + def process(self, instance): self.log.debug( "--- Integration of Master version for subset `{}` begins.".format( @@ -42,24 +43,25 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # TODO raise error if master not set? anatomy = instance.context.data["anatomy"] - if "publish" not in anatomy.templates: - self.log.warning("!!! Anatomy does not have set publish key!") + if "master" not in anatomy.templates: + self.log.warning("!!! Anatomy does not have set `master` key!") return - if "master" not in anatomy.templates["publish"]: + if "path" not in anatomy.templates["master"]: self.log.warning(( - "!!! There is not set \"master\" template for project \"{}\"" + "!!! There is not set `path` template in `master` anatomy" + " for project \"{}\"." ).format(project_name)) return - master_template = anatomy.templates["publish"]["master"] - + master_template = anatomy.templates["master"]["path"] self.log.debug("`Master` template check was successful. `{}`".format( master_template )) - src_version_entity = None + master_publish_dir = self.get_publish_dir(instance) + src_version_entity = None filtered_repre_ids = [] for repre_id, repre_info in published_repres.items(): repre = repre_info["representation"] @@ -99,6 +101,47 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): )) return + all_copied_files = [] + transfers = instance.data.get("transfers", list()) + for src, dst in transfers: + dst = os.path.normpath(dst) + if dst not in all_copied_files: + all_copied_files.append(dst) + + hardlinks = instance.data.get("hardlinks", list()) + for src, dst in hardlinks: + dst = os.path.normpath(dst) + if dst not in all_copied_files: + all_copied_files.append(dst) + + all_repre_file_paths = [] + for repre_info in published_repres: + published_files = repre_info.get("published_files") or [] + for file_path in published_files: + file_path = os.path.normpath(file_path) + if file_path not in all_repre_file_paths: + all_repre_file_paths.append(file_path) + + # TODO this is not best practice of getting resources for publish + # WARNING due to this we must remove all files from master publish dir + instance_publish_dir = os.path.normpath( + instance.data["publishDir"] + ) + other_file_paths_mapping = [] + for file_path in all_copied_files: + # Check if it is from publishDir + if not file_path.startswith(instance_publish_dir): + continue + + if file_path in all_repre_file_paths: + continue + + dst_filepath = file_path.replace( + instance_publish_dir, master_publish_dir + ) + other_file_paths_mapping.append((file_path, dst_filepath)) + + # Current version old_version, old_repres = ( self.current_master_ents(src_version_entity) ) @@ -120,6 +163,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): "schema": "pype:master_version-1.0" } + # Don't make changes in database until everything is O.K. bulk_writes = [] if old_version: @@ -160,145 +204,212 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): repre_name_low = repre["name"].lower() archived_repres_by_name[repre_name_low] = repre - self.delete_repre_files(old_repres) + if os.path.exists(master_publish_dir): + backup_master_publish_dir = master_publish_dir + ".BACKUP" + max_idx = 10 + idx = 0 + _backup_master_publish_dir = backup_master_publish_dir + while os.path.exists(_backup_master_publish_dir): + self.log.debug(( + "Backup folder already exists." + " Trying to remove \"{}\"" + ).format(_backup_master_publish_dir)) - src_to_dst_file_paths = [] - for repre_id, repre_info in published_repres.items(): + try: + shutil.rmtree(_backup_master_publish_dir) + backup_master_publish_dir = _backup_master_publish_dir + break + except Exception: + self.log.info(( + "Could not remove previous backup folder." + " Trying to add index to folder name" + )) - # Skip if new repre does not have published repre files - published_files = repre_info["published_files"] - if len(published_files) == 0: - continue + _backup_master_publish_dir = ( + backup_master_publish_dir + str(idx) + ) + if not os.path.exists(_backup_master_publish_dir): + backup_master_publish_dir = _backup_master_publish_dir + break - # Prepare anatomy data - anatomy_data = repre_info["anatomy_data"] - anatomy_data.pop("version", None) + if idx > max_idx: + raise AssertionError(( + "Backup folders are fully occupied to max index \"{}\"" + ).format(max_idx)) + break - # Get filled path to repre context - anatomy_filled = anatomy.format(anatomy_data) - template_filled = anatomy_filled["publish"]["master"] + idx += 1 - repre_data = { - "path": str(template_filled), - "template": master_template - } - repre_context = template_filled.used_values - for key in self.db_representation_context_keys: - if ( - key in repre_context or - key not in anatomy_data - ): + self.log.debug("Backup folder path is \"{}\"".format( + backup_master_publish_dir + )) + try: + os.rename(master_publish_dir, backup_master_publish_dir) + except PermissionError: + raise AssertionError(( + "Could not create master version because it is not" + " possible to replace current master files." + )) + try: + src_to_dst_file_paths = [] + for repre_id, repre_info in published_repres.items(): + + # Skip if new repre does not have published repre files + published_files = repre_info["published_files"] + if len(published_files) == 0: continue - repre_context[key] = anatomy_data[key] + # Prepare anatomy data + anatomy_data = repre_info["anatomy_data"] + anatomy_data.pop("version", None) - # Prepare new repre - repre = copy.deepcopy(repre_info["representation"]) - repre["parent"] = new_master_version["_id"] - repre["context"] = repre_context - repre["data"] = repre_data + # Get filled path to repre context + anatomy_filled = anatomy.format(anatomy_data) + template_filled = anatomy_filled["publish"]["master"] - repre_name_low = repre["name"].lower() + repre_data = { + "path": str(template_filled), + "template": master_template + } + repre_context = template_filled.used_values + for key in self.db_representation_context_keys: + if ( + key in repre_context or + key not in anatomy_data + ): + continue - # Replace current representation - if repre_name_low in old_repres_to_replace: - old_repre = old_repres_to_replace.pop(repre_name_low) - repre["_id"] = old_repre["_id"] - bulk_writes.append( - ReplaceOne( - {"_id": old_repre["_id"]}, - repre + repre_context[key] = anatomy_data[key] + + # Prepare new repre + repre = copy.deepcopy(repre_info["representation"]) + repre["parent"] = new_master_version["_id"] + repre["context"] = repre_context + repre["data"] = repre_data + + repre_name_low = repre["name"].lower() + + # Replace current representation + if repre_name_low in old_repres_to_replace: + old_repre = old_repres_to_replace.pop(repre_name_low) + repre["_id"] = old_repre["_id"] + bulk_writes.append( + ReplaceOne( + {"_id": old_repre["_id"]}, + repre + ) ) - ) - # Unarchive representation - elif repre_name_low in archived_repres_by_name: - archived_repre = archived_repres_by_name.pop(repre_name_low) - old_id = archived_repre["old_id"] - repre["_id"] = old_id - bulk_writes.append( - ReplaceOne( - {"old_id": old_id}, - repre + # Unarchive representation + elif repre_name_low in archived_repres_by_name: + archived_repre = archived_repres_by_name.pop( + repre_name_low ) - ) - - # Create representation - else: - repre["_id"] = io.ObjectId() - bulk_writes.append( - InsertOne(repre) - ) - - # Prepare paths of source and destination files - if len(published_files) == 1: - src_to_dst_file_paths.append( - (published_files[0], template_filled) - ) - continue - - collections, remainders = clique.assemble(published_files) - if remainders or not collections or len(collections) > 1: - raise Exception(( - "Integrity error. Files of published representation" - " is combination of frame collections and single files." - "Collections: `{}` Single files: `{}`" - ).format(str(collections), str(remainders))) - - src_col = collections[0] - - # Get head and tail for collection - frame_splitter = "_-_FRAME_SPLIT_-_" - anatomy_data["frame"] = frame_splitter - _anatomy_filled = anatomy.format(anatomy_data) - _template_filled = _anatomy_filled["publish"]["master"] - head, tail = _template_filled.split(frame_splitter) - padding = ( - anatomy.templates["render"]["padding"] - ) - - dst_col = clique.Collection(head=head, padding=padding, tail=tail) - dst_col.indexes.clear() - dst_col.indexes.update(src_col.indexes) - for src_file, dst_file in zip(src_col, dst_col): - src_to_dst_file_paths.append( - (src_file, dst_file) - ) - - self.path_checks = [] - - # Copy(hardlink) paths of source and destination files - # TODO should we *only* create hardlinks? - # TODO should we keep files for deletion until this is successful? - for src_path, dst_path in src_to_dst_file_paths: - self.create_hardlink(src_path, dst_path) - - # Archive not replaced old representations - for repre_name_low, repre in old_repres_to_delete.items(): - # Replace archived representation (This is backup) - # - should not happen to have both repre and archived repre - if repre_name_low in archived_repres_by_name: - archived_repre = archived_repres_by_name.pop(repre_name_low) - repre["old_id"] = repre["_id"] - repre["_id"] = archived_repre["_id"] - repre["type"] = archived_repre["type"] - bulk_writes.append( - ReplaceOne( - {"_id": archived_repre["_id"]}, - repre + old_id = archived_repre["old_id"] + repre["_id"] = old_id + bulk_writes.append( + ReplaceOne( + {"old_id": old_id}, + repre + ) ) + + # Create representation + else: + repre["_id"] = io.ObjectId() + bulk_writes.append( + InsertOne(repre) + ) + + # Prepare paths of source and destination files + if len(published_files) == 1: + src_to_dst_file_paths.append( + (published_files[0], template_filled) + ) + continue + + collections, remainders = clique.assemble(published_files) + if remainders or not collections or len(collections) > 1: + raise Exception(( + "Integrity error. Files of published representation " + "is combination of frame collections and single files." + "Collections: `{}` Single files: `{}`" + ).format(str(collections), str(remainders))) + + src_col = collections[0] + + # Get head and tail for collection + frame_splitter = "_-_FRAME_SPLIT_-_" + anatomy_data["frame"] = frame_splitter + _anatomy_filled = anatomy.format(anatomy_data) + _template_filled = _anatomy_filled["master"]["path"] + head, tail = _template_filled.split(frame_splitter) + padding = ( + anatomy.templates["render"]["padding"] ) - else: - repre["old_id"] = repre["_id"] - repre["_id"] = io.ObjectId() - repre["type"] = "archived_representation" - bulk_writes.append( - InsertOne(repre) + dst_col = clique.Collection( + head=head, padding=padding, tail=tail + ) + dst_col.indexes.clear() + dst_col.indexes.update(src_col.indexes) + for src_file, dst_file in zip(src_col, dst_col): + src_to_dst_file_paths.append( + (src_file, dst_file) + ) + + self.path_checks = [] + + # Copy(hardlink) paths of source and destination files + # TODO should we *only* create hardlinks? + # TODO should we keep files for deletion until this is successful? + for src_path, dst_path in src_to_dst_file_paths: + self.create_hardlink(src_path, dst_path) + + for src_path, dst_path in other_file_paths_mapping: + self.create_hardlink(src_path, dst_path) + + # Archive not replaced old representations + for repre_name_low, repre in old_repres_to_delete.items(): + # Replace archived representation (This is backup) + # - should not happen to have both repre and archived repre + if repre_name_low in archived_repres_by_name: + archived_repre = archived_repres_by_name.pop( + repre_name_low + ) + repre["old_id"] = repre["_id"] + repre["_id"] = archived_repre["_id"] + repre["type"] = archived_repre["type"] + bulk_writes.append( + ReplaceOne( + {"_id": archived_repre["_id"]}, + repre + ) + ) + + else: + repre["old_id"] = repre["_id"] + repre["_id"] = io.ObjectId() + repre["type"] = "archived_representation" + bulk_writes.append( + InsertOne(repre) + ) + + if bulk_writes: + io._database[io.Session["AVALON_PROJECT"]].bulk_write( + bulk_writes ) - if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write(bulk_writes) + # Remove backuped previous master + shutil.rmtree(backup_master_publish_dir) + + except Exception: + os.rename(backup_master_publish_dir, master_publish_dir) + self.log.error(( + "!!! Creating of Master version failed." + " Previous master version maybe lost some data!" + )) + raise self.log.debug(( "--- End of Master version integration for subset `{}`." @@ -306,7 +417,49 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): instance.data.get("subset", str(instance)) )) + def get_all_files_from_path(self, path): + files = [] + for (dir_path, dir_names, file_names) in os.walk(path): + for file_name in file_names: + _path = os.path.join(dir_path, file_name) + files.append(_path) + return files + + def get_publish_dir(self, instance): + anatomy = instance.context.data["anatomy"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + + if "folder" in anatomy.templates["master"]: + anatomy_filled = anatomy.format(template_data) + publish_folder = anatomy_filled["master"]["folder"] + else: + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue + template_data.update({ + "frame": "FRAME_TEMP", + "representation": "TEMP" + }) + anatomy_filled = anatomy.format(template_data) + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) + + file_path = anatomy_filled["master"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) + + publish_folder = os.path.normpath(publish_folder) + + self.log.debug("Master publish dir: \"{}\"".format(publish_folder)) + + return publish_folder + def create_hardlink(self, src_path, dst_path): + # TODO check drives if are the same to check if cas hardlink dst_path = self.path_root_check(dst_path) src_path = self.path_root_check(src_path) @@ -314,7 +467,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): try: os.makedirs(dirname) - self.log.debug("Folder created: \"{}\"".format(dirname)) + self.log.debug("Folder(s) created: \"{}\"".format(dirname)) except OSError as exc: if exc.errno != errno.EEXIST: self.log.error("An unexpected error occurred.", exc_info=True) @@ -325,8 +478,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): self.log.debug("Copying file \"{}\" to \"{}\"".format( src_path, dst_path )) - # TODO check if file exists!!! - # - uncomplete publish may cause that file already exists filelink.create(src_path, dst_path, filelink.HARDLINK) def path_root_check(self, path): @@ -398,142 +549,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): return os.path.normpath(path) - def delete_repre_files(self, repres): - if not repres: - return - - frame_splitter = "_-_FRAME_-_" - files_to_delete = [] - for repre in repres: - is_sequence = False - if "frame" in repre["context"]: - repre["context"]["frame"] = frame_splitter - is_sequence = True - - template = repre["data"]["template"] - context = repre["context"] - context["root"] = api.registered_root() - path = pipeline.format_template_with_optional_keys( - context, template - ) - path = os.path.normpath(path) - if not is_sequence: - if os.path.exists(path): - files_to_delete.append(path) - continue - - dirpath = os.path.dirname(path) - file_start = None - file_end = None - file_items = path.split(frame_splitter) - if len(file_items) == 0: - continue - elif len(file_items) == 1: - if path.startswith(frame_splitter): - file_end = file_items[0] - else: - file_start = file_items[1] - - elif len(file_items) == 2: - file_start, file_end = file_items - - else: - raise ValueError(( - "Representation template has `frame` key " - "more than once inside." - )) - - for file_name in os.listdir(dirpath): - check_name = str(file_name) - if file_start and not check_name.startswith(file_start): - continue - check_name.replace(file_start, "") - - if file_end and not check_name.endswith(file_end): - continue - check_name.replace(file_end, "") - - # File does not have frame - if not check_name: - continue - - files_to_delete.append(os.path.join(dirpath, file_name)) - - renamed_files = [] - failed = False - for file_path in files_to_delete: - self.log.debug( - "Preparing file for deletion: `{}`".format(file_path) - ) - rename_path = file_path + ".BACKUP" - - max_index = 10 - cur_index = 0 - _rename_path = None - while os.path.exists(rename_path): - if _rename_path is None: - _rename_path = rename_path - - if cur_index >= max_index: - self.log.warning(( - "Max while loop index reached! Can't make backup" - " for previous master version." - )) - failed = True - break - - if not os.path.exists(_rename_path): - rename_path = _rename_path - break - - try: - os.remove(_rename_path) - self.log.debug( - "Deleted old backup file: \"{}\"".format(_rename_path) - ) - except Exception: - self.log.warning( - "Could not delete old backup file \"{}\".".format( - _rename_path - ), - exc_info=True - ) - _rename_path = file_path + ".BACKUP{}".format( - str(cur_index) - ) - cur_index += 1 - - # Skip if any already failed - if failed: - break - - try: - args = (file_path, rename_path) - os.rename(*args) - renamed_files.append(args) - except Exception: - self.log.warning( - "Could not rename file `{}` to `{}`".format( - file_path, rename_path - ), - exc_info=True - ) - failed = True - break - - if failed: - # Rename back old renamed files - for dst_name, src_name in renamed_files: - os.rename(src_name, dst_name) - - raise AssertionError(( - "Could not create master version because it is not possible" - " to replace current master files." - )) - - for _, renamed_path in renamed_files: - os.remove(renamed_path) - def version_from_representations(self, repres): for repre in repres: version = io.find_one({"_id": repre["parent"]}) From 685edf184383dd7e6cc75f1b29568622522aa001 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 25 Feb 2020 14:12:00 +0100 Subject: [PATCH 26/43] few minor fixes --- pype/plugins/global/publish/integrate_master_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 42c93db7e9..f2769a436e 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -115,7 +115,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): all_copied_files.append(dst) all_repre_file_paths = [] - for repre_info in published_repres: + for repre_info in published_repres.values(): published_files = repre_info.get("published_files") or [] for file_path in published_files: file_path = os.path.normpath(file_path) @@ -265,7 +265,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # Get filled path to repre context anatomy_filled = anatomy.format(anatomy_data) - template_filled = anatomy_filled["publish"]["master"] + template_filled = anatomy_filled["master"]["path"] repre_data = { "path": str(template_filled), From 4ced37437b3d875b24fbfe7fb8c613d45ab7d6f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 25 Feb 2020 14:26:23 +0100 Subject: [PATCH 27/43] create_hardlink changed to copy_file - can handle if paths are cross drives --- .../publish/integrate_master_version.py | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index f2769a436e..2a23abfbec 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -364,10 +364,10 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # TODO should we *only* create hardlinks? # TODO should we keep files for deletion until this is successful? for src_path, dst_path in src_to_dst_file_paths: - self.create_hardlink(src_path, dst_path) + self.copy_file(src_path, dst_path) for src_path, dst_path in other_file_paths_mapping: - self.create_hardlink(src_path, dst_path) + self.copy_file(src_path, dst_path) # Archive not replaced old representations for repre_name_low, repre in old_repres_to_delete.items(): @@ -412,7 +412,8 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): raise self.log.debug(( - "--- End of Master version integration for subset `{}`." + "--- Master version integration for subset `{}`" + " seems to be successful." ).format( instance.data.get("subset", str(instance)) )) @@ -458,7 +459,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): return publish_folder - def create_hardlink(self, src_path, dst_path): + def copy_file(self, src_path, dst_path): # TODO check drives if are the same to check if cas hardlink dst_path = self.path_root_check(dst_path) src_path = self.path_root_check(src_path) @@ -478,7 +479,19 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): self.log.debug("Copying file \"{}\" to \"{}\"".format( src_path, dst_path )) - filelink.create(src_path, dst_path, filelink.HARDLINK) + + # First try hardlink and copy if paths are cross drive + try: + filelink.create(src_path, dst_path, filelink.HARDLINK) + # Return when successful + return + + except OSError as exc: + # re-raise exception if different than cross drive path + if exc.errno != errno.EXDEV: + raise + + shutil.copy(src_path, dst_path) def path_root_check(self, path): normalized_path = os.path.normpath(path) From 666041c9c94aa94bcb0f43460b5af957799b39a9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 25 Feb 2020 14:54:59 +0100 Subject: [PATCH 28/43] added schema validation and fixed master version schema --- .../plugins/global/publish/integrate_master_version.py | 7 +++++-- schema/master_version-1.0.json | 10 ++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 2a23abfbec..715d99c1c8 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -6,7 +6,7 @@ import shutil from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io, pipeline +from avalon import api, io, schema from avalon.vendor import filelink @@ -162,6 +162,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): "type": "master_version", "schema": "pype:master_version-1.0" } + schema.validate(new_master_version) # Don't make changes in database until everything is O.K. bulk_writes = [] @@ -286,9 +287,11 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): repre["parent"] = new_master_version["_id"] repre["context"] = repre_context repre["data"] = repre_data + repre.pop("_id", None) + + schema.validate(repre) repre_name_low = repre["name"].lower() - # Replace current representation if repre_name_low in old_repres_to_replace: old_repre = old_repres_to_replace.pop(repre_name_low) diff --git a/schema/master_version-1.0.json b/schema/master_version-1.0.json index 173a076537..991594648b 100644 --- a/schema/master_version-1.0.json +++ b/schema/master_version-1.0.json @@ -17,14 +17,13 @@ "properties": { "_id": { "description": "Document's id (database will create it's if not entered)", - "type": "ObjectId", - "example": "592c33475f8c1b064c4d1696" + "example": "ObjectId(592c33475f8c1b064c4d1696)" }, "schema": { "description": "The schema associated with this document", "type": "string", - "enum": ["avalon-core:master_version-3.0", "pype:master_version-3.0"], - "example": "pype:master_version-3.0" + "enum": ["avalon-core:master_version-1.0", "pype:master_version-1.0"], + "example": "pype:master_version-1.0" }, "type": { "description": "The type of document", @@ -34,8 +33,7 @@ }, "parent": { "description": "Unique identifier to parent document", - "type": "ObjectId", - "example": "592c33475f8c1b064c4d1696" + "example": "ObjectId(592c33475f8c1b064c4d1697)" } } } From 877b9e8885dc431775e716f4d96c4766b966335d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 17 Mar 2020 17:27:09 +0100 Subject: [PATCH 29/43] feat(nuke): publish baked mov with preset colorspace --- pype/nuke/lib.py | 22 ++++++++++++++----- .../nuke/publish/extract_review_data_mov.py | 7 +++++- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 8e241dad16..f8284d18dd 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1564,10 +1564,9 @@ class ExporterReviewMov(ExporterReview): self.nodes = {} # deal with now lut defined in viewer lut - if hasattr(klass, "viewer_lut_raw"): - self.viewer_lut_raw = klass.viewer_lut_raw - else: - self.viewer_lut_raw = False + self.viewer_lut_raw = klass.viewer_lut_raw + self.bake_colorspace_fallback = klass.bake_colorspace_fallback + self.bake_colorspace_main = klass.bake_colorspace_main self.name = name or "baked" self.ext = ext or "mov" @@ -1628,8 +1627,19 @@ class ExporterReviewMov(ExporterReview): self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) if not self.viewer_lut_raw: - # OCIODisplay node - dag_node = nuke.createNode("OCIODisplay") + colorspace = self.bake_colorspace_main \ + or self.bake_colorspace_fallback + + self.log.debug("_ colorspace... `{}`".format(colorspace)) + + if colorspace: + # OCIOColorSpace with controled output + dag_node = nuke.createNode("OCIOColorSpace") + dag_node["out_colorspace"].setValue(str(colorspace)) + else: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect dag_node.setInput(0, self.previous_node) self._temp_nodes.append(dag_node) diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 8b204680a7..1c6efafcfe 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -3,7 +3,7 @@ import pyblish.api from avalon.nuke import lib as anlib from pype.nuke import lib as pnlib import pype - +reload(pnlib) class ExtractReviewDataMov(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts @@ -18,6 +18,11 @@ class ExtractReviewDataMov(pype.api.Extractor): families = ["review", "render", "render.local"] hosts = ["nuke"] + # presets + viewer_lut_raw = None + bake_colorspace_fallback = None + bake_colorspace_main = None + def process(self, instance): families = instance.data["families"] self.log.info("Creating staging dir...") From 94ce045fec85e82d45e8887cd3bbb7f6d717ee12 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 17 Mar 2020 18:32:59 +0100 Subject: [PATCH 30/43] fix(nuke): fallback approach of defining colorspace --- pype/nuke/lib.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index f8284d18dd..446f9af6a3 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1627,15 +1627,22 @@ class ExporterReviewMov(ExporterReview): self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) if not self.viewer_lut_raw: - colorspace = self.bake_colorspace_main \ - or self.bake_colorspace_fallback + colorspaces = [ + self.bake_colorspace_main, self.bake_colorspace_fallback + ] - self.log.debug("_ colorspace... `{}`".format(colorspace)) - - if colorspace: + if any(colorspaces): # OCIOColorSpace with controled output dag_node = nuke.createNode("OCIOColorSpace") - dag_node["out_colorspace"].setValue(str(colorspace)) + for c in colorspaces: + test = dag_node["out_colorspace"].setValue(str(c)) + if test: + self.log.info( + "Baking in colorspace... `{}`".format(c)) + break + + if not test: + dag_node = nuke.createNode("OCIODisplay") else: # OCIODisplay dag_node = nuke.createNode("OCIODisplay") From c7aba1564aab06258841581cd423047c838e4a53 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 18 Mar 2020 19:14:08 +0100 Subject: [PATCH 31/43] group AOVs from maya render --- .../global/publish/submit_publish_job.py | 5 +++-- pype/plugins/maya/publish/collect_render.py | 20 +++++++++++-------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index dcf19ae32c..e517198ba2 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -170,7 +170,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "review": ["lutPath"], "render.farm": ["bakeScriptPath", "bakeRenderPath", "bakeWriteNodeName", "version"] - } + } # list of family names to transfer to new family if present families_transfer = ["render3d", "render2d", "ftrack", "slate"] @@ -276,7 +276,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # if override remove all frames we are expecting to be rendered # so we'll copy only those missing from current render if instance.data.get("overrideExistingFrame"): - for frame in range(start, end+1): + for frame in range(start, end + 1): if frame not in r_col.indexes: continue r_col.indexes.remove(frame) @@ -366,6 +366,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_instance = copy(instance_data) new_instance["subset"] = subset_name + new_instance["group"] = aov ext = cols[0].tail.lstrip(".") diff --git a/pype/plugins/maya/publish/collect_render.py b/pype/plugins/maya/publish/collect_render.py index be3878e6bd..8d74d242b3 100644 --- a/pype/plugins/maya/publish/collect_render.py +++ b/pype/plugins/maya/publish/collect_render.py @@ -211,19 +211,23 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "attachTo": attachTo, "setMembers": layer_name, "publish": True, - "frameStart": int(context.data["assetEntity"]['data']['frameStart']), - "frameEnd": int(context.data["assetEntity"]['data']['frameEnd']), - "frameStartHandle": int(self.get_render_attribute("startFrame", - layer=layer_name)), - "frameEndHandle": int(self.get_render_attribute("endFrame", - layer=layer_name)), + "frameStart": int( + context.data["assetEntity"]['data']['frameStart']), + "frameEnd": int( + context.data["assetEntity"]['data']['frameEnd']), + "frameStartHandle": int( + self.get_render_attribute("startFrame", layer=layer_name)), + "frameEndHandle": int( + self.get_render_attribute("endFrame", layer=layer_name)), "byFrameStep": int( self.get_render_attribute("byFrameStep", layer=layer_name)), "renderer": self.get_render_attribute("currentRenderer", layer=layer_name), - "handleStart": int(context.data["assetEntity"]['data']['handleStart']), - "handleEnd": int(context.data["assetEntity"]['data']['handleEnd']), + "handleStart": int( + context.data["assetEntity"]['data']['handleStart']), + "handleEnd": int( + context.data["assetEntity"]['data']['handleEnd']), # instance subset "family": "renderlayer", From c73059869ec088f93dc2082e3a896f397e196bf5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Mar 2020 10:34:37 +0100 Subject: [PATCH 32/43] grammar fixes --- pype/plugins/global/publish/integrate_thumbnail.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 0bb34eab58..97122d2c39 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -34,7 +34,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( - "There are not published representations on the instance." + "There are no published representations on the instance." ) return @@ -42,12 +42,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: - self.warning("Anatomy does not have set publish key!") + self.log.warning("Anatomy is missing the \"publish\" key!") return if "thumbnail" not in anatomy.templates["publish"]: - self.warning(( - "There is not set \"thumbnail\" template for project \"{}\"" + self.log.warning(( + "There is no \"thumbnail\" template set for the project \"{}\"" ).format(project_name)) return From 1ac0961f3abc10d4aa963b253b9eb177dbb9b3be Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Mar 2020 10:36:37 +0100 Subject: [PATCH 33/43] added missing version_id to master version schema --- schema/master_version-1.0.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/schema/master_version-1.0.json b/schema/master_version-1.0.json index 991594648b..9dff570b3a 100644 --- a/schema/master_version-1.0.json +++ b/schema/master_version-1.0.json @@ -9,6 +9,7 @@ "additionalProperties": true, "required": [ + "version_id", "schema", "type", "parent" @@ -19,6 +20,10 @@ "description": "Document's id (database will create it's if not entered)", "example": "ObjectId(592c33475f8c1b064c4d1696)" }, + "version_id": { + "description": "The version ID from which it was created", + "example": "ObjectId(592c33475f8c1b064c4d1695)" + }, "schema": { "description": "The schema associated with this document", "type": "string", From a364b90ae330d2a3691cb219e8f5df83497c5373 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Mar 2020 10:50:48 +0100 Subject: [PATCH 34/43] integrate_new store subset and version entity to instance.data --- pype/plugins/global/publish/integrate_master_version.py | 5 +---- pype/plugins/global/publish/integrate_new.py | 7 +++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 715d99c1c8..1cee7d1f24 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -61,13 +61,10 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): master_publish_dir = self.get_publish_dir(instance) - src_version_entity = None + src_version_entity = instance.data.get("versionEntity") filtered_repre_ids = [] for repre_id, repre_info in published_repres.items(): repre = repre_info["representation"] - if src_version_entity is None: - src_version_entity = repre_info.get("version_entity") - if repre["name"].lower() in self.ignored_representation_names: self.log.debug( "Filtering representation with name: `{}`".format( diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 8c27ccfa84..71a045a004 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -162,6 +162,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) subset = self.get_subset(asset_entity, instance) + instance.data["subsetEntity"] = subset version_number = instance.data["version"] self.log.debug("Next version: v{}".format(version_number)) @@ -237,6 +238,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) version = io.find_one({"_id": version_id}) + instance.data["versionEntity"] = version existing_repres = list(io.find({ "parent": version_id, @@ -463,10 +465,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): published_representations[repre_id] = { "representation": representation, "anatomy_data": template_data, - "published_files": published_files, - # TODO prabably should store subset and version to instance - "subset_entity": subset, - "version_entity": version + "published_files": published_files } self.log.debug("__ representations: {}".format(representations)) From 7d614c616daf65f5a384e549fa2202d3b3f5079e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Mar 2020 11:10:43 +0100 Subject: [PATCH 35/43] fixed bugs in itegrate master version when publishing first version --- pype/plugins/global/publish/integrate_master_version.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 1cee7d1f24..4600a95aa4 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -202,6 +202,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): repre_name_low = repre["name"].lower() archived_repres_by_name[repre_name_low] = repre + backup_master_publish_dir = str(master_publish_dir) if os.path.exists(master_publish_dir): backup_master_publish_dir = master_publish_dir + ".BACKUP" max_idx = 10 @@ -401,10 +402,12 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ) # Remove backuped previous master - shutil.rmtree(backup_master_publish_dir) + if os.path.exists(backup_master_publish_dir): + shutil.rmtree(backup_master_publish_dir) except Exception: - os.rename(backup_master_publish_dir, master_publish_dir) + if os.path.exists(backup_master_publish_dir): + os.rename(backup_master_publish_dir, master_publish_dir) self.log.error(( "!!! Creating of Master version failed." " Previous master version maybe lost some data!" From 2b384bcfca2a9e6d80b9a564445b010407f1a9a2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Mar 2020 11:12:31 +0100 Subject: [PATCH 36/43] more specific validations of previous fix --- .../global/publish/integrate_master_version.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 4600a95aa4..0eba275407 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -202,7 +202,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): repre_name_low = repre["name"].lower() archived_repres_by_name[repre_name_low] = repre - backup_master_publish_dir = str(master_publish_dir) + backup_master_publish_dir = None if os.path.exists(master_publish_dir): backup_master_publish_dir = master_publish_dir + ".BACKUP" max_idx = 10 @@ -402,11 +402,17 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): ) # Remove backuped previous master - if os.path.exists(backup_master_publish_dir): + if ( + backup_master_publish_dir is not None and + os.path.exists(backup_master_publish_dir) + ): shutil.rmtree(backup_master_publish_dir) except Exception: - if os.path.exists(backup_master_publish_dir): + if ( + backup_master_publish_dir is not None and + os.path.exists(backup_master_publish_dir) + ): os.rename(backup_master_publish_dir, master_publish_dir) self.log.error(( "!!! Creating of Master version failed." From f579e8467027c30da160be55365357c18c5993b2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 19 Mar 2020 12:53:45 +0100 Subject: [PATCH 37/43] fix to subsetGroup --- pype/plugins/global/publish/submit_publish_job.py | 2 +- pype/scripts/otio_burnin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index e517198ba2..2914203578 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -366,7 +366,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_instance = copy(instance_data) new_instance["subset"] = subset_name - new_instance["group"] = aov + new_instance["subsetGroup"] = aov ext = cols[0].tail.lstrip(".") diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 8d0b925089..8b52216968 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -296,7 +296,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): args=args, overwrite=overwrite ) - print(command) + # print(command) proc = subprocess.Popen(command, shell=True) proc.communicate() From a6ec2060ccff6117cf39ab5768094a4109e4cb1d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 19 Mar 2020 19:29:54 +0100 Subject: [PATCH 38/43] filter master version to only some families --- pype/plugins/global/publish/integrate_master_version.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 0eba275407..16aa0dd23d 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -15,6 +15,15 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # Must happen after IntegrateNew order = pyblish.api.IntegratorOrder + 0.1 + optional = True + + families = ["model", + "rig", + "setdress", + "look", + "pointcache", + "animation"] + # Can specify representation names that will be ignored (lower case) ignored_representation_names = [] db_representation_context_keys = [ From fe8a71f97a271a6a07079c1947130f3fee207b18 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 19 Mar 2020 21:16:40 +0100 Subject: [PATCH 39/43] grouping by layer name --- pype/plugins/global/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2914203578..9c556f3512 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -366,7 +366,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_instance = copy(instance_data) new_instance["subset"] = subset_name - new_instance["subsetGroup"] = aov + new_instance["subsetGroup"] = subset ext = cols[0].tail.lstrip(".") From 81ccb8d767f9eefeb5dc00eea9874330f0113976 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 20 Mar 2020 08:59:09 +0100 Subject: [PATCH 40/43] stop hound from reporting line break before operator --- .flake8 | 1 + 1 file changed, 1 insertion(+) diff --git a/.flake8 b/.flake8 index 67ed2d77a3..f28d8cbfc3 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,7 @@ [flake8] # ignore = D203 ignore = BLK100 +ignore = W504 max-line-length = 79 exclude = .git, From e8ac6ddbf9b89558601eee7fbc17533518c20b82 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 20 Mar 2020 11:32:21 +0100 Subject: [PATCH 41/43] fixed group name to include full subset name but '_AOV' --- pype/plugins/global/publish/submit_publish_job.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 9c556f3512..556132cd77 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -348,10 +348,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): assert len(cols) == 1, "only one image sequence type is expected" # create subset name `familyTaskSubset_AOV` - subset_name = 'render{}{}{}{}_{}'.format( + group_name = 'render{}{}{}{}'.format( task[0].upper(), task[1:], - subset[0].upper(), subset[1:], - aov) + subset[0].upper(), subset[1:]) + + subset_name = '{}_{}'.format(group_name, aov) staging = os.path.dirname(list(cols[0])[0]) @@ -366,7 +367,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_instance = copy(instance_data) new_instance["subset"] = subset_name - new_instance["subsetGroup"] = subset + new_instance["subsetGroup"] = group_name ext = cols[0].tail.lstrip(".") From ac178e9d5b5336d607fa37c0755329e4977c224e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 20 Mar 2020 12:05:10 +0100 Subject: [PATCH 42/43] use right intent variable in integrate new --- pype/plugins/global/publish/integrate_new.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ccfb3689e2..5052ae3aff 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -263,8 +263,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for idx, repre in enumerate(instance.data["representations"]): # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) - if intent is not None: - template_data["intent"] = intent + if intent_value is not None: + template_data["intent"] = intent_value resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") From f2eb13e3ca595525cad4b7a9ec7a2b77cb566a9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Mar 2020 14:57:25 +0100 Subject: [PATCH 43/43] hound cleanups --- .../publish/integrate_master_version.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py index 16aa0dd23d..3c7838b708 100644 --- a/pype/plugins/global/publish/integrate_master_version.py +++ b/pype/plugins/global/publish/integrate_master_version.py @@ -17,12 +17,14 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): optional = True - families = ["model", - "rig", - "setdress", - "look", - "pointcache", - "animation"] + families = [ + "model", + "rig", + "setdress", + "look", + "pointcache", + "animation" + ] # Can specify representation names that will be ignored (lower case) ignored_representation_names = [] @@ -109,13 +111,13 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): all_copied_files = [] transfers = instance.data.get("transfers", list()) - for src, dst in transfers: + for dst in transfers.values(): dst = os.path.normpath(dst) if dst not in all_copied_files: all_copied_files.append(dst) hardlinks = instance.data.get("hardlinks", list()) - for src, dst in hardlinks: + for dst in hardlinks.values(): dst = os.path.normpath(dst) if dst not in all_copied_files: all_copied_files.append(dst) @@ -190,7 +192,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): # Separate old representations into `to replace` and `to delete` old_repres_to_replace = {} old_repres_to_delete = {} - for repre_id, repre_info in published_repres.items(): + for repre_info in published_repres.values(): repre = repre_info["representation"] repre_name_low = repre["name"].lower() if repre_name_low in old_repres_by_name: @@ -260,7 +262,7 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin): )) try: src_to_dst_file_paths = [] - for repre_id, repre_info in published_repres.items(): + for repre_info in published_repres.values(): # Skip if new repre does not have published repre files published_files = repre_info["published_files"]